code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
from models.readingtip import ReadingTip
from database import db
class ReadingTipRepository:
def __init__(self):
pass
def get_tips(self, user, tag="all"):
if tag == "all":
return ReadingTip.query.filter_by(user=user).all()
else:
return ReadingTip.query.filter_by(user=user).filter(ReadingTip.tags.any(name=tag)).all()
def update_tip(self, tip_id, title, link, tags):
tip = self.get_tip(tip_id)
print(tags)
tip.title = title
tip.link = link
tip.tags = tags
db.session.commit()
def create_tip(self, tip):
db.session.add(tip)
db.session.commit()
return tip
def get_tip(self, tip_id):
return ReadingTip.query.get(tip_id)
def delete_tip(self, tip):
db.session.delete(tip)
db.session.commit()
def contains_title(self, user, title):
amount = ReadingTip.query.filter_by(user=user, title=title).count()
return amount > 0
def read_tip(self, tip, date):
ReadingTip.query.filter_by(id=tip.id).update({"read":date})
db.session.commit()
readingtip_repository = ReadingTipRepository()
|
normal
|
{
"blob_id": "d82b68d5c83ae538d7a8b5ae5547b43ac4e8a3d4",
"index": 6910,
"step-1": "<mask token>\n\n\nclass ReadingTipRepository:\n <mask token>\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n <mask token>\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n <mask token>\n <mask token>\n <mask token>\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ReadingTipRepository:\n <mask token>\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n <mask token>\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ReadingTipRepository:\n\n def __init__(self):\n pass\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n\n def get_tip(self, tip_id):\n return ReadingTip.query.get(tip_id)\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\n<mask token>\n",
"step-4": "from models.readingtip import ReadingTip\nfrom database import db\n\n\nclass ReadingTipRepository:\n\n def __init__(self):\n pass\n\n def get_tips(self, user, tag='all'):\n if tag == 'all':\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.\n tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n\n def get_tip(self, tip_id):\n return ReadingTip.query.get(tip_id)\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({'read': date})\n db.session.commit()\n\n\nreadingtip_repository = ReadingTipRepository()\n",
"step-5": "from models.readingtip import ReadingTip\nfrom database import db\n\nclass ReadingTipRepository:\n def __init__(self):\n pass\n\n def get_tips(self, user, tag=\"all\"):\n if tag == \"all\":\n return ReadingTip.query.filter_by(user=user).all()\n else:\n return ReadingTip.query.filter_by(user=user).filter(ReadingTip.tags.any(name=tag)).all()\n\n def update_tip(self, tip_id, title, link, tags):\n tip = self.get_tip(tip_id)\n print(tags)\n tip.title = title\n tip.link = link\n tip.tags = tags\n db.session.commit()\n\n def create_tip(self, tip):\n db.session.add(tip)\n db.session.commit()\n return tip\n\n def get_tip(self, tip_id):\n return ReadingTip.query.get(tip_id)\n\n def delete_tip(self, tip):\n db.session.delete(tip)\n db.session.commit()\n\n def contains_title(self, user, title):\n amount = ReadingTip.query.filter_by(user=user, title=title).count()\n return amount > 0\n\n def read_tip(self, tip, date):\n ReadingTip.query.filter_by(id=tip.id).update({\"read\":date})\n db.session.commit()\n\nreadingtip_repository = ReadingTipRepository()\n",
"step-ids": [
4,
7,
9,
11,
12
]
}
|
[
4,
7,
9,
11,
12
] |
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import render, HttpResponseRedirect, Http404
from django.contrib.auth import authenticate, login, logout
from accounts.forms import RegistrationForm, LoginForm, StudentDetailsForm, companyDetailsForm, SocietyDetailsForm
from accounts.models import MyUser, studentData, CompanyData, SoietyData
from accounts.helper_functions import password_check, email_check
# Create your views here.
def login_page(request):
if request.user.is_authenticated():
return HttpResponseRedirect("/")
else:
form = LoginForm(request.POST or None)
next_url = request.GET.get('next')
if form.is_valid():
username = form.cleaned_data['email']
password = form.cleaned_data['password']
print username, password
user = authenticate(username=username, password=password)
if user is not None:
try:
user_details = studentData.objects.get(id=user.id)
login(request, user)
return HttpResponseRedirect('/home')
except ObjectDoesNotExist:
account = MyUser.objects.get(id=user.id)
account_type = account.get_account_tyoe()
return HttpResponseRedirect("complete_registration/" + account_type +"/"+str(user.id))
context = {
"form": form
}
return render(request, "generalPages/loginpage.html", context)
def register_page(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# context = {
# "form": RegistrationForm(),
# "action_value_society": "register/society",
# "action_value_student": "register/student",
# "action_value_company": "register/company",
# "submit_btn_value": "Register"
#
# }
# return render(request, "generalPages/register.html", context)
return render(request, "generalPages/register.html")
def student_reg(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# print form
#
# if form.is_valid():
# email = form.cleaned_data["email"]
# password = form.cleaned_data["password2"]
#
# print email + password
#
# user = MyUser.objects.create_user(email=email, password=password, userType="student")
# #todo: send out confirmation email
#
#
# # get the ID so i can pass it in the URL to the complete registration page
# user_id = user.id
# return HttpResponseRedirect("/complete_registration/student/" + str(user_id))
#
# else:
# #todo: change this that it raises username already in use error
# print "form is invalid"
# # todo: add a parameter that tells them, the username or password was incorrect
# return HttpResponseRedirect("/register")
return render(request, "student/CompleteStudentRegistration.html")
def company_reg(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# print form
#
# if form.is_valid():
# email = form.cleaned_data["email"]
# password = form.cleaned_data["password2"]
#
# print email + password
#
# user = MyUser.objects.create_user(email=email, password=password, userType="company")
# # todo: send out confirmation email
#
# # get the ID so i can pass it in the URL to the complete registration page
# user_id = user.id
# return HttpResponseRedirect("/complete_registration/company/" + str(user_id))
#
# else:
# print "form is invalid"
# # todo: add a parameter that tells them, the username or password was incorrect
# return HttpResponseRedirect("/register")
return render(request, "company/completeCompanyregistration.html")
def society_reg(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# print form
#
# if form.is_valid():
# email = form.cleaned_data["email"]
# password = form.cleaned_data["password2"]
#
# print email + password
#
# user = MyUser.objects.create_user(email=email, password=password, userType="society")
# # todo: send out confirmation email
#
# # get the ID so i can pass it in the URL to the complete registration page
# user_id = user.id
# return HttpResponseRedirect("/complete_registration/society/" + str(user_id))
#
# else:
# print "form is invalid"
# # todo: add a parameter that tells them, the username or password was incorrect
# return HttpResponseRedirect("/register")
return render(request, "society/completeSocietyRegistration.html")
def complete_student_registration(request):
print request.POST
return HttpResponseRedirect("/")
# # check if the id is the one that matchest to their email:
#
#
# # print "in their"
# # print request
# #
# # return HttpResponseRedirect("/")
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# try:
# user = MyUser.objects.get(id=id)
#
# except ObjectDoesNotExist:
# return HttpResponseRedirect("/register")
# except:
# return HttpResponseRedirect("/login")
#
# try:
# user_details = studentData.objects.get(id=id)
# login(request, user)
# return HttpResponseRedirect('/home')
# except ObjectDoesNotExist:
#
# if user.user_type == 'student':
# form = StudentDetailsForm(request.POST or None)
#
# if form.is_valid():
# f_name = form.cleaned_data["first_name"]
# s_name= form.cleaned_data["surname"]
# studyCunt = form.cleaned_data["countryOfStudy"]
# course= form.cleaned_data['course']
# university = form.cleaned_data['university']
#
# studentData.objects.create(id=user, first_name=f_name, surname=s_name,
# countryOfStudy=studyCunt, course=course, university=university)
# login(request, user)
# return HttpResponseRedirect("/home")
# # else:
# # print "form is invalid"
# context = {
# "form": StudentDetailsForm(),
#
# }
# return render(request, "student/CompleteStudentRegistration.html", context)
#
# pass
# else:
# return HttpResponseRedirect('/login')
# except:
# return HttpResponseRedirect("/404")
def complete_company_registration(request, id):
# check if the id is the one that matchest to their email:
# print "in their"
# print request
#
# return HttpResponseRedirect("/")
if request.user.is_authenticated():
return HttpResponseRedirect("/")
else:
try:
user = MyUser.objects.get(id=id)
except ObjectDoesNotExist:
return HttpResponseRedirect("/register")
except:
return HttpResponseRedirect("/login")
try:
user_details = CompanyData.objects.get(id=id)
login(request, user)
return HttpResponseRedirect('/company_home')
except ObjectDoesNotExist:
if user.user_type == 'company':
form = companyDetailsForm(request.POST or None)
if form.is_valid():
print "there"
company_name = form.cleaned_data["company_name"]
website = form.cleaned_data["company_website"]
city = form.cleaned_data["HQ_city"]
industry = form.cleaned_data["industry"]
CompanyData.objects.create(id=user, Company_name=company_name, company_website=website,
HQ_city=city, description=None, industry=industry)
login(request, user)
return HttpResponseRedirect("/company_home")
# else:
# print "form is invalid"
context = {
"form": companyDetailsForm(),
}
return render(request, "company/completeCompanyregistration.html", context)
pass
else:
return HttpResponseRedirect('/login')
except:
return HttpResponseRedirect("/404")
def complete_society_registration(request, id):
print "hey"
if request.user.is_authenticated():
return HttpResponseRedirect("/")
else:
print "ho"
try:
user = MyUser.objects.get(id=id)
except ObjectDoesNotExist:
return HttpResponseRedirect("/register")
except:
return HttpResponseRedirect("/login")
try:
user_details = SoietyData.objects.get(id=id)
login(request, user)
return HttpResponseRedirect('/home')
except ObjectDoesNotExist:
print "lets "
if user.user_type == 'society':
form = SocietyDetailsForm(request.POST or None)
if form.is_valid():
name = form.cleaned_data['society_name']
university = form.cleaned_data['society_university']
fb = form.cleaned_data['society_FB']
website = form.cleaned_data['society_website']
SoietyData.objects.create(id=user, society_name=name, society_university=university,
society_facebook=fb, society_website=website)
login(request, user)
return HttpResponseRedirect("/society_home")
# else:
# print "form is invalid"
context = {
"form": SocietyDetailsForm(),
}
print "go"
return render(request, "society/completeSocietyRegistration.html", context)
else:
return HttpResponseRedirect('/login')
except:
return HttpResponseRedirect("/thisisaknownerror")
def logout_call(request):
logout(request)
return HttpResponseRedirect('/')
|
normal
|
{
"blob_id": "7f21fcc1265be8b3263971a4e76470616459f433",
"index": 6061,
"step-1": "from django.core.exceptions import ObjectDoesNotExist\nfrom django.shortcuts import render, HttpResponseRedirect, Http404\nfrom django.contrib.auth import authenticate, login, logout\n\nfrom accounts.forms import RegistrationForm, LoginForm, StudentDetailsForm, companyDetailsForm, SocietyDetailsForm\nfrom accounts.models import MyUser, studentData, CompanyData, SoietyData\nfrom accounts.helper_functions import password_check, email_check\n\n# Create your views here.\n\ndef login_page(request):\n if request.user.is_authenticated():\n return HttpResponseRedirect(\"/\")\n else:\n\n form = LoginForm(request.POST or None)\n next_url = request.GET.get('next')\n\n if form.is_valid():\n username = form.cleaned_data['email']\n password = form.cleaned_data['password']\n print username, password\n\n user = authenticate(username=username, password=password)\n\n if user is not None:\n\n\n try:\n user_details = studentData.objects.get(id=user.id)\n login(request, user)\n return HttpResponseRedirect('/home')\n except ObjectDoesNotExist:\n account = MyUser.objects.get(id=user.id)\n account_type = account.get_account_tyoe()\n return HttpResponseRedirect(\"complete_registration/\" + account_type +\"/\"+str(user.id))\n context = {\n \"form\": form\n }\n return render(request, \"generalPages/loginpage.html\", context)\n\n\ndef register_page(request):\n\n\n # if request.user.is_authenticated():\n # return HttpResponseRedirect(\"/\")\n # else:\n # form = RegistrationForm(request.POST or None)\n # context = {\n # \"form\": RegistrationForm(),\n # \"action_value_society\": \"register/society\",\n # \"action_value_student\": \"register/student\",\n # \"action_value_company\": \"register/company\",\n # \"submit_btn_value\": \"Register\"\n #\n # }\n # return render(request, \"generalPages/register.html\", context)\n\n return render(request, \"generalPages/register.html\")\n\n\ndef student_reg(request):\n # if request.user.is_authenticated():\n # return HttpResponseRedirect(\"/\")\n # else:\n # form = RegistrationForm(request.POST or None)\n # print form\n #\n # if form.is_valid():\n # email = form.cleaned_data[\"email\"]\n # password = form.cleaned_data[\"password2\"]\n #\n # print email + password\n #\n # user = MyUser.objects.create_user(email=email, password=password, userType=\"student\")\n # #todo: send out confirmation email\n #\n #\n # # get the ID so i can pass it in the URL to the complete registration page\n # user_id = user.id\n # return HttpResponseRedirect(\"/complete_registration/student/\" + str(user_id))\n #\n # else:\n # #todo: change this that it raises username already in use error\n # print \"form is invalid\"\n # # todo: add a parameter that tells them, the username or password was incorrect\n # return HttpResponseRedirect(\"/register\")\n return render(request, \"student/CompleteStudentRegistration.html\")\n\n\n\ndef company_reg(request):\n # if request.user.is_authenticated():\n # return HttpResponseRedirect(\"/\")\n # else:\n # form = RegistrationForm(request.POST or None)\n # print form\n #\n # if form.is_valid():\n # email = form.cleaned_data[\"email\"]\n # password = form.cleaned_data[\"password2\"]\n #\n # print email + password\n #\n # user = MyUser.objects.create_user(email=email, password=password, userType=\"company\")\n # # todo: send out confirmation email\n #\n # # get the ID so i can pass it in the URL to the complete registration page\n # user_id = user.id\n # return HttpResponseRedirect(\"/complete_registration/company/\" + str(user_id))\n #\n # else:\n # print \"form is invalid\"\n # # todo: add a parameter that tells them, the username or password was incorrect\n # return HttpResponseRedirect(\"/register\")\n return render(request, \"company/completeCompanyregistration.html\")\n\n\ndef society_reg(request):\n # if request.user.is_authenticated():\n # return HttpResponseRedirect(\"/\")\n # else:\n # form = RegistrationForm(request.POST or None)\n # print form\n #\n # if form.is_valid():\n # email = form.cleaned_data[\"email\"]\n # password = form.cleaned_data[\"password2\"]\n #\n # print email + password\n #\n # user = MyUser.objects.create_user(email=email, password=password, userType=\"society\")\n # # todo: send out confirmation email\n #\n # # get the ID so i can pass it in the URL to the complete registration page\n # user_id = user.id\n # return HttpResponseRedirect(\"/complete_registration/society/\" + str(user_id))\n #\n # else:\n # print \"form is invalid\"\n # # todo: add a parameter that tells them, the username or password was incorrect\n # return HttpResponseRedirect(\"/register\")\n return render(request, \"society/completeSocietyRegistration.html\")\n\n\ndef complete_student_registration(request):\n\n print request.POST\n\n return HttpResponseRedirect(\"/\")\n\n\n # # check if the id is the one that matchest to their email:\n #\n #\n # # print \"in their\"\n # # print request\n # #\n # # return HttpResponseRedirect(\"/\")\n # if request.user.is_authenticated():\n # return HttpResponseRedirect(\"/\")\n # else:\n # try:\n # user = MyUser.objects.get(id=id)\n #\n # except ObjectDoesNotExist:\n # return HttpResponseRedirect(\"/register\")\n # except:\n # return HttpResponseRedirect(\"/login\")\n #\n # try:\n # user_details = studentData.objects.get(id=id)\n # login(request, user)\n # return HttpResponseRedirect('/home')\n # except ObjectDoesNotExist:\n #\n # if user.user_type == 'student':\n # form = StudentDetailsForm(request.POST or None)\n #\n # if form.is_valid():\n # f_name = form.cleaned_data[\"first_name\"]\n # s_name= form.cleaned_data[\"surname\"]\n # studyCunt = form.cleaned_data[\"countryOfStudy\"]\n # course= form.cleaned_data['course']\n # university = form.cleaned_data['university']\n #\n # studentData.objects.create(id=user, first_name=f_name, surname=s_name,\n # countryOfStudy=studyCunt, course=course, university=university)\n # login(request, user)\n # return HttpResponseRedirect(\"/home\")\n # # else:\n # # print \"form is invalid\"\n # context = {\n # \"form\": StudentDetailsForm(),\n #\n # }\n # return render(request, \"student/CompleteStudentRegistration.html\", context)\n #\n # pass\n # else:\n # return HttpResponseRedirect('/login')\n # except:\n # return HttpResponseRedirect(\"/404\")\n\n\n\ndef complete_company_registration(request, id):\n # check if the id is the one that matchest to their email:\n\n\n # print \"in their\"\n # print request\n #\n # return HttpResponseRedirect(\"/\")\n if request.user.is_authenticated():\n return HttpResponseRedirect(\"/\")\n else:\n try:\n user = MyUser.objects.get(id=id)\n\n except ObjectDoesNotExist:\n return HttpResponseRedirect(\"/register\")\n except:\n return HttpResponseRedirect(\"/login\")\n\n try:\n user_details = CompanyData.objects.get(id=id)\n login(request, user)\n return HttpResponseRedirect('/company_home')\n except ObjectDoesNotExist:\n\n if user.user_type == 'company':\n\n form = companyDetailsForm(request.POST or None)\n\n if form.is_valid():\n print \"there\"\n company_name = form.cleaned_data[\"company_name\"]\n website = form.cleaned_data[\"company_website\"]\n city = form.cleaned_data[\"HQ_city\"]\n industry = form.cleaned_data[\"industry\"]\n\n CompanyData.objects.create(id=user, Company_name=company_name, company_website=website,\n HQ_city=city, description=None, industry=industry)\n login(request, user)\n return HttpResponseRedirect(\"/company_home\")\n # else:\n # print \"form is invalid\"\n context = {\n \"form\": companyDetailsForm(),\n\n }\n return render(request, \"company/completeCompanyregistration.html\", context)\n\n pass\n else:\n return HttpResponseRedirect('/login')\n except:\n return HttpResponseRedirect(\"/404\")\n\n\ndef complete_society_registration(request, id):\n print \"hey\"\n if request.user.is_authenticated():\n return HttpResponseRedirect(\"/\")\n else:\n print \"ho\"\n try:\n user = MyUser.objects.get(id=id)\n\n except ObjectDoesNotExist:\n return HttpResponseRedirect(\"/register\")\n except:\n return HttpResponseRedirect(\"/login\")\n\n try:\n user_details = SoietyData.objects.get(id=id)\n login(request, user)\n return HttpResponseRedirect('/home')\n except ObjectDoesNotExist:\n print \"lets \"\n if user.user_type == 'society':\n form = SocietyDetailsForm(request.POST or None)\n\n if form.is_valid():\n name = form.cleaned_data['society_name']\n university = form.cleaned_data['society_university']\n fb = form.cleaned_data['society_FB']\n website = form.cleaned_data['society_website']\n\n SoietyData.objects.create(id=user, society_name=name, society_university=university,\n society_facebook=fb, society_website=website)\n login(request, user)\n return HttpResponseRedirect(\"/society_home\")\n # else:\n # print \"form is invalid\"\n context = {\n \"form\": SocietyDetailsForm(),\n\n }\n print \"go\"\n return render(request, \"society/completeSocietyRegistration.html\", context)\n else:\n return HttpResponseRedirect('/login')\n except:\n return HttpResponseRedirect(\"/thisisaknownerror\")\n\n\n\n\ndef logout_call(request):\n logout(request)\n return HttpResponseRedirect('/')\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
import sqlalchemy as sa
import ujson
from aiohttp import web, WSMsgType
from .db import TLE
from .log import logger
from .utils import parse_sa_filter, parse_sa_order, check_sa_column, get_sa_column
async def query(request):
filters = []
if 'filters' not in request.query:
raise web.HTTPBadRequest(reason='Query parameter `filters` is required')
try:
_filters = ujson.loads(request.query.get('filters', '{}'))
for k, v in _filters.items():
filters.extend(parse_sa_filter(TLE, k, v))
except ValueError:
raise web.HTTPBadRequest(reason='Query parameter `filters` must contains valid JSON')
_order = request.query.get('order', '{}')
if _order.startswith('{'):
try:
order = ujson.loads(_order)
except ValueError:
raise web.HTTPBadRequest(reason='Query parameter `order` must contains valid JSON')
else:
order = _order
order = parse_sa_order(TLE, order)
only = [get_sa_column(TLE, key) for key in request.query.get('only', '').split(',') if check_sa_column(TLE, key)]
async with request.app['pg'].acquire() as conn:
rp = await conn.execute(sa.select(only or [TLE]).where(sa.and_(*filters)).order_by(*order))
return [dict(r) async for r in rp]
async def index(request):
html = '''
<html>
<head>
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script>
var source = new WebSocket('ws://' + window.location.host + '/subscribe');
function eventListener(event) {
var message = JSON.parse(event.data);
$('.messages').append([
$('<dt>').text(message.channel),
$('<dd>').text(event.data),
]);
}
source.onmessage = eventListener;
</script>
</head>
<body>
<dl class="messages"></dl>
</body>
</html>
'''
return web.Response(text=html, content_type='text/html')
async def subscribe(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
request.app['channels'].add(ws)
logger.debug('Someone joined.')
try:
while True:
msg = await ws.receive_json()
if msg.get('command') == 'close':
await ws.close()
except Exception as exc:
logger.exception(exc)
finally:
request.app['channels'].remove(ws)
if ws.closed:
request.app['channels'].remove(ws)
logger.debug('websocket connection closed')
return ws
|
normal
|
{
"blob_id": "c414e5d3934f741540fb5721a529b48f95e17016",
"index": 5982,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nasync def query(request):\n filters = []\n if 'filters' not in request.query:\n raise web.HTTPBadRequest(reason='Query parameter `filters` is required'\n )\n try:\n _filters = ujson.loads(request.query.get('filters', '{}'))\n for k, v in _filters.items():\n filters.extend(parse_sa_filter(TLE, k, v))\n except ValueError:\n raise web.HTTPBadRequest(reason=\n 'Query parameter `filters` must contains valid JSON')\n _order = request.query.get('order', '{}')\n if _order.startswith('{'):\n try:\n order = ujson.loads(_order)\n except ValueError:\n raise web.HTTPBadRequest(reason=\n 'Query parameter `order` must contains valid JSON')\n else:\n order = _order\n order = parse_sa_order(TLE, order)\n only = [get_sa_column(TLE, key) for key in request.query.get('only', ''\n ).split(',') if check_sa_column(TLE, key)]\n async with request.app['pg'].acquire() as conn:\n rp = await conn.execute(sa.select(only or [TLE]).where(sa.and_(*\n filters)).order_by(*order))\n return [dict(r) async for r in rp]\n\n\nasync def index(request):\n html = \"\"\"\n <html>\n <head>\n <script src=\"//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js\"></script>\n <script>\n var source = new WebSocket('ws://' + window.location.host + '/subscribe');\n function eventListener(event) {\n var message = JSON.parse(event.data);\n $('.messages').append([\n $('<dt>').text(message.channel),\n $('<dd>').text(event.data),\n ]);\n }\n source.onmessage = eventListener;\n </script>\n </head>\n <body>\n <dl class=\"messages\"></dl>\n </body>\n </html>\n \"\"\"\n return web.Response(text=html, content_type='text/html')\n\n\nasync def subscribe(request):\n ws = web.WebSocketResponse()\n await ws.prepare(request)\n request.app['channels'].add(ws)\n logger.debug('Someone joined.')\n try:\n while True:\n msg = await ws.receive_json()\n if msg.get('command') == 'close':\n await ws.close()\n except Exception as exc:\n logger.exception(exc)\n finally:\n request.app['channels'].remove(ws)\n if ws.closed:\n request.app['channels'].remove(ws)\n logger.debug('websocket connection closed')\n return ws\n",
"step-3": "import sqlalchemy as sa\nimport ujson\nfrom aiohttp import web, WSMsgType\nfrom .db import TLE\nfrom .log import logger\nfrom .utils import parse_sa_filter, parse_sa_order, check_sa_column, get_sa_column\n\n\nasync def query(request):\n filters = []\n if 'filters' not in request.query:\n raise web.HTTPBadRequest(reason='Query parameter `filters` is required'\n )\n try:\n _filters = ujson.loads(request.query.get('filters', '{}'))\n for k, v in _filters.items():\n filters.extend(parse_sa_filter(TLE, k, v))\n except ValueError:\n raise web.HTTPBadRequest(reason=\n 'Query parameter `filters` must contains valid JSON')\n _order = request.query.get('order', '{}')\n if _order.startswith('{'):\n try:\n order = ujson.loads(_order)\n except ValueError:\n raise web.HTTPBadRequest(reason=\n 'Query parameter `order` must contains valid JSON')\n else:\n order = _order\n order = parse_sa_order(TLE, order)\n only = [get_sa_column(TLE, key) for key in request.query.get('only', ''\n ).split(',') if check_sa_column(TLE, key)]\n async with request.app['pg'].acquire() as conn:\n rp = await conn.execute(sa.select(only or [TLE]).where(sa.and_(*\n filters)).order_by(*order))\n return [dict(r) async for r in rp]\n\n\nasync def index(request):\n html = \"\"\"\n <html>\n <head>\n <script src=\"//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js\"></script>\n <script>\n var source = new WebSocket('ws://' + window.location.host + '/subscribe');\n function eventListener(event) {\n var message = JSON.parse(event.data);\n $('.messages').append([\n $('<dt>').text(message.channel),\n $('<dd>').text(event.data),\n ]);\n }\n source.onmessage = eventListener;\n </script>\n </head>\n <body>\n <dl class=\"messages\"></dl>\n </body>\n </html>\n \"\"\"\n return web.Response(text=html, content_type='text/html')\n\n\nasync def subscribe(request):\n ws = web.WebSocketResponse()\n await ws.prepare(request)\n request.app['channels'].add(ws)\n logger.debug('Someone joined.')\n try:\n while True:\n msg = await ws.receive_json()\n if msg.get('command') == 'close':\n await ws.close()\n except Exception as exc:\n logger.exception(exc)\n finally:\n request.app['channels'].remove(ws)\n if ws.closed:\n request.app['channels'].remove(ws)\n logger.debug('websocket connection closed')\n return ws\n",
"step-4": "# -*- coding: utf-8 -*-\n\nimport sqlalchemy as sa\nimport ujson\nfrom aiohttp import web, WSMsgType\n\nfrom .db import TLE\nfrom .log import logger\nfrom .utils import parse_sa_filter, parse_sa_order, check_sa_column, get_sa_column\n\n\nasync def query(request):\n filters = []\n if 'filters' not in request.query:\n raise web.HTTPBadRequest(reason='Query parameter `filters` is required')\n\n try:\n _filters = ujson.loads(request.query.get('filters', '{}'))\n for k, v in _filters.items():\n filters.extend(parse_sa_filter(TLE, k, v))\n except ValueError:\n raise web.HTTPBadRequest(reason='Query parameter `filters` must contains valid JSON')\n\n _order = request.query.get('order', '{}')\n if _order.startswith('{'):\n try:\n order = ujson.loads(_order)\n except ValueError:\n raise web.HTTPBadRequest(reason='Query parameter `order` must contains valid JSON')\n else:\n order = _order\n\n order = parse_sa_order(TLE, order)\n only = [get_sa_column(TLE, key) for key in request.query.get('only', '').split(',') if check_sa_column(TLE, key)]\n\n async with request.app['pg'].acquire() as conn:\n rp = await conn.execute(sa.select(only or [TLE]).where(sa.and_(*filters)).order_by(*order))\n return [dict(r) async for r in rp]\n\n\nasync def index(request):\n html = '''\n <html>\n <head>\n <script src=\"//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js\"></script>\n <script>\n var source = new WebSocket('ws://' + window.location.host + '/subscribe');\n function eventListener(event) {\n var message = JSON.parse(event.data);\n $('.messages').append([\n $('<dt>').text(message.channel),\n $('<dd>').text(event.data),\n ]);\n }\n source.onmessage = eventListener;\n </script>\n </head>\n <body>\n <dl class=\"messages\"></dl>\n </body>\n </html>\n '''\n return web.Response(text=html, content_type='text/html')\n\n\nasync def subscribe(request):\n ws = web.WebSocketResponse()\n await ws.prepare(request)\n request.app['channels'].add(ws)\n logger.debug('Someone joined.')\n try:\n while True:\n msg = await ws.receive_json()\n if msg.get('command') == 'close':\n await ws.close()\n except Exception as exc:\n logger.exception(exc)\n finally:\n request.app['channels'].remove(ws)\n\n if ws.closed:\n request.app['channels'].remove(ws)\n\n logger.debug('websocket connection closed')\n return ws\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
DEFAULT_SIZE = 512
class DataEncoding:
@staticmethod
def segment_decode(segment):
arr = bytearray(segment)
ack_binary = bytearray([arr[i] for i in range(4)])
tip_binary = bytearray([arr[4]])
len_binary = bytearray([arr[i] for i in (5,6)])
ack = int.from_bytes(ack_binary, byteorder='big', signed=False)
tip = int.from_bytes(tip_binary, byteorder='big', signed=False)
length = int.from_bytes(len_binary, byteorder='big', signed=False)
data = bytearray([arr[i] for i in range(7, 7 + length) if arr[i] != b'\x00'])
return {'ack': ack, 'tip': tip, 'len': length, 'data': data}
# codificare: (segment_number, segment_type, segment_len), segment_data
# creeaza primul pachet, cel care contine numele
@staticmethod
def encode_start(transmitter,nume_fisier):
transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1
segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)
segment_type = b'\x01'
lungime_nume = len(nume_fisier)
segment_len = lungime_nume.to_bytes(2, byteorder='big', signed=False)
segment = segment_number + segment_type + segment_len
for ch in nume_fisier:
segment += (ord(ch).to_bytes(1, byteorder='big', signed=False))
return segment
# creeaza pachetele care contine bitii din fisier
@staticmethod
def encode_data(transmitter,segment_data):
transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1
segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)
segment_type = b'\x02'
segment_len = DEFAULT_SIZE.to_bytes(2, byteorder='big', signed=False)
segment = segment_number + segment_type + segment_len + segment_data
return segment
# in campul de segment_code, al doilea octet va fi lungimea caracterelor utile
@staticmethod
def encode_end(transmitter,segment_data):
global end_transmission
transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1
segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)
segment_type = b'\x03'
segment_data_len = len(segment_data)
segment_data = segment_data + b'\x00'*(DEFAULT_SIZE - segment_data_len)
segment_len = segment_data_len.to_bytes(2, byteorder='big', signed=False)
segment = segment_number + segment_type + segment_len + segment_data
return segment
@staticmethod
def encode_error(transmitter,segment_data):
pass
@staticmethod
def encode(transmitter,tip, data):
segment_type = {
'START': DataEncoding.encode_start,
'DATA' : DataEncoding.encode_data,
'END' : DataEncoding.encode_end
}
return segment_type.get(tip, DataEncoding.encode_error)(transmitter,data)
#citirea fisier ca pachete de octeti
@staticmethod
def bytes_from_file(transmitter, chunk_size=DEFAULT_SIZE):
with open(transmitter.filepath, "rb") as f:
while True:
chunk = f.read(chunk_size)
if chunk:
yield chunk
else:
break
#codificarea pachetelor de octeti
@staticmethod
def encode_bytes(transmitter):
for b in DataEncoding.bytes_from_file(transmitter.filepath):
if len(b) == DEFAULT_SIZE:
yield DataEncoding.encode(transmitter,'DATA', b)
else:
yield DataEncoding.encode(transmitter,'END', b)
|
normal
|
{
"blob_id": "47c5375816ab35e8225e5f3695f7ee2ab5336076",
"index": 4312,
"step-1": "<mask token>\n\n\nclass DataEncoding:\n\n @staticmethod\n def segment_decode(segment):\n arr = bytearray(segment)\n ack_binary = bytearray([arr[i] for i in range(4)])\n tip_binary = bytearray([arr[4]])\n len_binary = bytearray([arr[i] for i in (5, 6)])\n ack = int.from_bytes(ack_binary, byteorder='big', signed=False)\n tip = int.from_bytes(tip_binary, byteorder='big', signed=False)\n length = int.from_bytes(len_binary, byteorder='big', signed=False)\n data = bytearray([arr[i] for i in range(7, 7 + length) if arr[i] !=\n b'\\x00'])\n return {'ack': ack, 'tip': tip, 'len': length, 'data': data}\n <mask token>\n\n @staticmethod\n def encode_data(transmitter, segment_data):\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x02'\n segment_len = DEFAULT_SIZE.to_bytes(2, byteorder='big', signed=False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n\n @staticmethod\n def encode_end(transmitter, segment_data):\n global end_transmission\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x03'\n segment_data_len = len(segment_data)\n segment_data = segment_data + b'\\x00' * (DEFAULT_SIZE -\n segment_data_len)\n segment_len = segment_data_len.to_bytes(2, byteorder='big', signed=\n False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n <mask token>\n\n @staticmethod\n def encode(transmitter, tip, data):\n segment_type = {'START': DataEncoding.encode_start, 'DATA':\n DataEncoding.encode_data, 'END': DataEncoding.encode_end}\n return segment_type.get(tip, DataEncoding.encode_error)(transmitter,\n data)\n\n @staticmethod\n def bytes_from_file(transmitter, chunk_size=DEFAULT_SIZE):\n with open(transmitter.filepath, 'rb') as f:\n while True:\n chunk = f.read(chunk_size)\n if chunk:\n yield chunk\n else:\n break\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass DataEncoding:\n\n @staticmethod\n def segment_decode(segment):\n arr = bytearray(segment)\n ack_binary = bytearray([arr[i] for i in range(4)])\n tip_binary = bytearray([arr[4]])\n len_binary = bytearray([arr[i] for i in (5, 6)])\n ack = int.from_bytes(ack_binary, byteorder='big', signed=False)\n tip = int.from_bytes(tip_binary, byteorder='big', signed=False)\n length = int.from_bytes(len_binary, byteorder='big', signed=False)\n data = bytearray([arr[i] for i in range(7, 7 + length) if arr[i] !=\n b'\\x00'])\n return {'ack': ack, 'tip': tip, 'len': length, 'data': data}\n\n @staticmethod\n def encode_start(transmitter, nume_fisier):\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x01'\n lungime_nume = len(nume_fisier)\n segment_len = lungime_nume.to_bytes(2, byteorder='big', signed=False)\n segment = segment_number + segment_type + segment_len\n for ch in nume_fisier:\n segment += ord(ch).to_bytes(1, byteorder='big', signed=False)\n return segment\n\n @staticmethod\n def encode_data(transmitter, segment_data):\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x02'\n segment_len = DEFAULT_SIZE.to_bytes(2, byteorder='big', signed=False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n\n @staticmethod\n def encode_end(transmitter, segment_data):\n global end_transmission\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x03'\n segment_data_len = len(segment_data)\n segment_data = segment_data + b'\\x00' * (DEFAULT_SIZE -\n segment_data_len)\n segment_len = segment_data_len.to_bytes(2, byteorder='big', signed=\n False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n <mask token>\n\n @staticmethod\n def encode(transmitter, tip, data):\n segment_type = {'START': DataEncoding.encode_start, 'DATA':\n DataEncoding.encode_data, 'END': DataEncoding.encode_end}\n return segment_type.get(tip, DataEncoding.encode_error)(transmitter,\n data)\n\n @staticmethod\n def bytes_from_file(transmitter, chunk_size=DEFAULT_SIZE):\n with open(transmitter.filepath, 'rb') as f:\n while True:\n chunk = f.read(chunk_size)\n if chunk:\n yield chunk\n else:\n break\n\n @staticmethod\n def encode_bytes(transmitter):\n for b in DataEncoding.bytes_from_file(transmitter.filepath):\n if len(b) == DEFAULT_SIZE:\n yield DataEncoding.encode(transmitter, 'DATA', b)\n else:\n yield DataEncoding.encode(transmitter, 'END', b)\n",
"step-3": "<mask token>\n\n\nclass DataEncoding:\n\n @staticmethod\n def segment_decode(segment):\n arr = bytearray(segment)\n ack_binary = bytearray([arr[i] for i in range(4)])\n tip_binary = bytearray([arr[4]])\n len_binary = bytearray([arr[i] for i in (5, 6)])\n ack = int.from_bytes(ack_binary, byteorder='big', signed=False)\n tip = int.from_bytes(tip_binary, byteorder='big', signed=False)\n length = int.from_bytes(len_binary, byteorder='big', signed=False)\n data = bytearray([arr[i] for i in range(7, 7 + length) if arr[i] !=\n b'\\x00'])\n return {'ack': ack, 'tip': tip, 'len': length, 'data': data}\n\n @staticmethod\n def encode_start(transmitter, nume_fisier):\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x01'\n lungime_nume = len(nume_fisier)\n segment_len = lungime_nume.to_bytes(2, byteorder='big', signed=False)\n segment = segment_number + segment_type + segment_len\n for ch in nume_fisier:\n segment += ord(ch).to_bytes(1, byteorder='big', signed=False)\n return segment\n\n @staticmethod\n def encode_data(transmitter, segment_data):\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x02'\n segment_len = DEFAULT_SIZE.to_bytes(2, byteorder='big', signed=False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n\n @staticmethod\n def encode_end(transmitter, segment_data):\n global end_transmission\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x03'\n segment_data_len = len(segment_data)\n segment_data = segment_data + b'\\x00' * (DEFAULT_SIZE -\n segment_data_len)\n segment_len = segment_data_len.to_bytes(2, byteorder='big', signed=\n False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n\n @staticmethod\n def encode_error(transmitter, segment_data):\n pass\n\n @staticmethod\n def encode(transmitter, tip, data):\n segment_type = {'START': DataEncoding.encode_start, 'DATA':\n DataEncoding.encode_data, 'END': DataEncoding.encode_end}\n return segment_type.get(tip, DataEncoding.encode_error)(transmitter,\n data)\n\n @staticmethod\n def bytes_from_file(transmitter, chunk_size=DEFAULT_SIZE):\n with open(transmitter.filepath, 'rb') as f:\n while True:\n chunk = f.read(chunk_size)\n if chunk:\n yield chunk\n else:\n break\n\n @staticmethod\n def encode_bytes(transmitter):\n for b in DataEncoding.bytes_from_file(transmitter.filepath):\n if len(b) == DEFAULT_SIZE:\n yield DataEncoding.encode(transmitter, 'DATA', b)\n else:\n yield DataEncoding.encode(transmitter, 'END', b)\n",
"step-4": "DEFAULT_SIZE = 512\n\n\nclass DataEncoding:\n\n @staticmethod\n def segment_decode(segment):\n arr = bytearray(segment)\n ack_binary = bytearray([arr[i] for i in range(4)])\n tip_binary = bytearray([arr[4]])\n len_binary = bytearray([arr[i] for i in (5, 6)])\n ack = int.from_bytes(ack_binary, byteorder='big', signed=False)\n tip = int.from_bytes(tip_binary, byteorder='big', signed=False)\n length = int.from_bytes(len_binary, byteorder='big', signed=False)\n data = bytearray([arr[i] for i in range(7, 7 + length) if arr[i] !=\n b'\\x00'])\n return {'ack': ack, 'tip': tip, 'len': length, 'data': data}\n\n @staticmethod\n def encode_start(transmitter, nume_fisier):\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x01'\n lungime_nume = len(nume_fisier)\n segment_len = lungime_nume.to_bytes(2, byteorder='big', signed=False)\n segment = segment_number + segment_type + segment_len\n for ch in nume_fisier:\n segment += ord(ch).to_bytes(1, byteorder='big', signed=False)\n return segment\n\n @staticmethod\n def encode_data(transmitter, segment_data):\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x02'\n segment_len = DEFAULT_SIZE.to_bytes(2, byteorder='big', signed=False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n\n @staticmethod\n def encode_end(transmitter, segment_data):\n global end_transmission\n transmitter.ack = transmitter.ack + 1\n segment_number = transmitter.ack.to_bytes(4, byteorder='big',\n signed=False)\n segment_type = b'\\x03'\n segment_data_len = len(segment_data)\n segment_data = segment_data + b'\\x00' * (DEFAULT_SIZE -\n segment_data_len)\n segment_len = segment_data_len.to_bytes(2, byteorder='big', signed=\n False)\n segment = segment_number + segment_type + segment_len + segment_data\n return segment\n\n @staticmethod\n def encode_error(transmitter, segment_data):\n pass\n\n @staticmethod\n def encode(transmitter, tip, data):\n segment_type = {'START': DataEncoding.encode_start, 'DATA':\n DataEncoding.encode_data, 'END': DataEncoding.encode_end}\n return segment_type.get(tip, DataEncoding.encode_error)(transmitter,\n data)\n\n @staticmethod\n def bytes_from_file(transmitter, chunk_size=DEFAULT_SIZE):\n with open(transmitter.filepath, 'rb') as f:\n while True:\n chunk = f.read(chunk_size)\n if chunk:\n yield chunk\n else:\n break\n\n @staticmethod\n def encode_bytes(transmitter):\n for b in DataEncoding.bytes_from_file(transmitter.filepath):\n if len(b) == DEFAULT_SIZE:\n yield DataEncoding.encode(transmitter, 'DATA', b)\n else:\n yield DataEncoding.encode(transmitter, 'END', b)\n",
"step-5": "DEFAULT_SIZE = 512\r\n\r\nclass DataEncoding:\r\n @staticmethod\r\n def segment_decode(segment):\r\n arr = bytearray(segment)\r\n ack_binary = bytearray([arr[i] for i in range(4)])\r\n tip_binary = bytearray([arr[4]])\r\n len_binary = bytearray([arr[i] for i in (5,6)])\r\n\r\n ack = int.from_bytes(ack_binary, byteorder='big', signed=False)\r\n tip = int.from_bytes(tip_binary, byteorder='big', signed=False)\r\n length = int.from_bytes(len_binary, byteorder='big', signed=False)\r\n data = bytearray([arr[i] for i in range(7, 7 + length) if arr[i] != b'\\x00'])\r\n\r\n return {'ack': ack, 'tip': tip, 'len': length, 'data': data}\r\n\r\n\r\n # codificare: (segment_number, segment_type, segment_len), segment_data\r\n # creeaza primul pachet, cel care contine numele\r\n @staticmethod\r\n def encode_start(transmitter,nume_fisier):\r\n transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1\r\n segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)\r\n\r\n segment_type = b'\\x01'\r\n lungime_nume = len(nume_fisier)\r\n segment_len = lungime_nume.to_bytes(2, byteorder='big', signed=False)\r\n segment = segment_number + segment_type + segment_len\r\n\r\n for ch in nume_fisier:\r\n segment += (ord(ch).to_bytes(1, byteorder='big', signed=False))\r\n\r\n return segment\r\n\r\n\r\n # creeaza pachetele care contine bitii din fisier\r\n @staticmethod\r\n def encode_data(transmitter,segment_data):\r\n transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1\r\n segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)\r\n\r\n segment_type = b'\\x02'\r\n segment_len = DEFAULT_SIZE.to_bytes(2, byteorder='big', signed=False)\r\n segment = segment_number + segment_type + segment_len + segment_data\r\n\r\n return segment\r\n\r\n # in campul de segment_code, al doilea octet va fi lungimea caracterelor utile\r\n @staticmethod\r\n def encode_end(transmitter,segment_data):\r\n global end_transmission\r\n transmitter.ack = transmitter.ack + 1 # primul ack trimis este 1\r\n segment_number = transmitter.ack.to_bytes(4, byteorder='big', signed=False)\r\n\r\n segment_type = b'\\x03'\r\n segment_data_len = len(segment_data)\r\n segment_data = segment_data + b'\\x00'*(DEFAULT_SIZE - segment_data_len)\r\n segment_len = segment_data_len.to_bytes(2, byteorder='big', signed=False)\r\n segment = segment_number + segment_type + segment_len + segment_data\r\n\r\n return segment\r\n\r\n @staticmethod\r\n def encode_error(transmitter,segment_data):\r\n pass\r\n\r\n\r\n @staticmethod\r\n def encode(transmitter,tip, data):\r\n segment_type = {\r\n 'START': DataEncoding.encode_start,\r\n 'DATA' : DataEncoding.encode_data,\r\n 'END' : DataEncoding.encode_end\r\n }\r\n return segment_type.get(tip, DataEncoding.encode_error)(transmitter,data)\r\n\r\n\r\n #citirea fisier ca pachete de octeti\r\n @staticmethod\r\n def bytes_from_file(transmitter, chunk_size=DEFAULT_SIZE):\r\n with open(transmitter.filepath, \"rb\") as f:\r\n while True:\r\n chunk = f.read(chunk_size)\r\n if chunk:\r\n yield chunk\r\n else:\r\n break\r\n\r\n #codificarea pachetelor de octeti\r\n @staticmethod\r\n def encode_bytes(transmitter):\r\n for b in DataEncoding.bytes_from_file(transmitter.filepath):\r\n if len(b) == DEFAULT_SIZE:\r\n yield DataEncoding.encode(transmitter,'DATA', b)\r\n else:\r\n yield DataEncoding.encode(transmitter,'END', b)\r\n\r\n\r\n",
"step-ids": [
6,
8,
9,
10,
11
]
}
|
[
6,
8,
9,
10,
11
] |
SPACE = 0
MARK = 1
def frame_to_bit_chunks(frame_values, baud_rate=45.45, start_bit=SPACE, stop_bit=MARK):
"""フレームごとの信号強度からデータビットのまとまりに変換する"""
binary_values = frame_to_binary_values(frame_values)
bit_duration_values = binary_values_to_bit_duration(binary_values)
bit_values = bit_duration_to_bit_values(bit_duration_values, baud_rate)
bit_chunks = bit_values_to_bit_chunks(bit_values, start_bit, stop_bit)
return bit_chunks
def frame_to_binary_values(frame_values, threshold=1.0):
"""フレームごとの信号強度から0/1を判定する"""
# ヒステリシスを持たせるときの前の状態
current_binary_value = SPACE
for mark_value, space_value, time in frame_values:
# mark の強度が space の強度の threshold 倍を越えていれば mark と判断する
if mark_value > space_value * threshold:
current_binary_value = MARK
# space の強度が mark の強度の threshold 倍を越えていれば space と判断する
if space_value > mark_value * threshold:
current_binary_value = SPACE
yield (current_binary_value, time)
def binary_values_to_bit_duration(binary_values):
"""連続する0/1の長さを測る"""
# 前の値
previous_binary_value = SPACE
# 前の値に変化した経過時間
previous_time = 0
# 今の値
current_binary_value = SPACE
# 今の値に変化した経過時間
current_time = 0
for binary_value, time in binary_values:
# 今の値を代入する
current_binary_value = binary_value
current_time = time
# 前と値が変わっていれば、前の値とその長さを出力する
if current_binary_value != previous_binary_value:
yield (previous_binary_value, current_time - previous_time)
# 今の値を前の値に代入する
previous_binary_value = current_binary_value
previous_time = current_time
# ループ内では最後の値は出力されないので、ここで出力する
yield (current_binary_value, current_time - previous_time)
def bit_duration_to_bit_values(bit_duration_values, baud_rate=45.45, minimum_bit_width=0.25):
"""短すぎる値を無視したり長い値を1bitごとに分割したりする"""
# 1bit あたりの時間(秒)
bit_duration = 1 / baud_rate
# 基準(minimum_bit_width) bit あたりの時間(秒)
minimum_duration = bit_duration * minimum_bit_width
# 最後に出力してからの経過時間
duration = 0
for bit_value, original_duration in bit_duration_values:
# 次の値を読んで、経過時間を足す
duration += original_duration
while duration > minimum_duration:
# 今の値の経過時間が基準を超えている間繰り返す
handle_duration = min(bit_duration, duration)
width = handle_duration / bit_duration
yield (bit_value, width)
# 出力した分だけ経過時間を減らす
duration -= handle_duration
def bit_values_to_bit_chunks(bit_values, start_bit=SPACE, stop_bit=MARK, lsb_on_left=True):
"""1bit ごとの値からデータビットを抽出する
bit_index|ビットの役割
---------|----------
0 |スタートビット
1 |データビット
2 |データビット
3 |データビット
4 |データビット
5 |データビット
6 |ストップビット
bit_index が 1-5の範囲のみを出力する
"""
# 前のデータ とりあえずスタートビットとしておく
previous_bit_value = start_bit
# データビットの何番目を処理しているかを数えておく
# はじめはどのタイミングか分からないので None にしておく
bit_index = None
# データビットを貯める
chunk = []
for current_bit_value, _ in bit_values:
if bit_index is None:
# 初期状態、まだデータのタイミングが分かっていない
if previous_bit_value == stop_bit and current_bit_value == start_bit:
# 1つ目のストップビット→スタートビットの遷移を検出
# タイミングが決まる
bit_index = 0
else:
# データのタイミングが分かっている
# 次のビットを読む
bit_index += 1
if bit_index <= 5:
# 5個目まではデータビットなので読む
# この if はデータビットの順番が 12345 か 54321 のどちらにも対応するためのもの
if lsb_on_left:
# list への append は最後に追加する
chunk.append(current_bit_value)
else:
# list への insert(0) は最初に追加する
chunk.insert(0, current_bit_value)
else:
# データビットが終わった
if bit_index == 6:
# ストップビットが来るはず あんまり気にしないで貯めたデータを出力する
yield ''.join(str(bit) for bit in chunk)
# データを空にしておく
chunk.clear()
if previous_bit_value == stop_bit and current_bit_value == start_bit:
# スタートビットが来たので状態をリセットする
bit_index = 0
previous_bit_value = current_bit_value
|
normal
|
{
"blob_id": "ff67ef77958e78335dc1dc2c7e08bf42998387c6",
"index": 2374,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef binary_values_to_bit_duration(binary_values):\n \"\"\"連続する0/1の長さを測る\"\"\"\n previous_binary_value = SPACE\n previous_time = 0\n current_binary_value = SPACE\n current_time = 0\n for binary_value, time in binary_values:\n current_binary_value = binary_value\n current_time = time\n if current_binary_value != previous_binary_value:\n yield previous_binary_value, current_time - previous_time\n previous_binary_value = current_binary_value\n previous_time = current_time\n yield current_binary_value, current_time - previous_time\n\n\ndef bit_duration_to_bit_values(bit_duration_values, baud_rate=45.45,\n minimum_bit_width=0.25):\n \"\"\"短すぎる値を無視したり長い値を1bitごとに分割したりする\"\"\"\n bit_duration = 1 / baud_rate\n minimum_duration = bit_duration * minimum_bit_width\n duration = 0\n for bit_value, original_duration in bit_duration_values:\n duration += original_duration\n while duration > minimum_duration:\n handle_duration = min(bit_duration, duration)\n width = handle_duration / bit_duration\n yield bit_value, width\n duration -= handle_duration\n\n\ndef bit_values_to_bit_chunks(bit_values, start_bit=SPACE, stop_bit=MARK,\n lsb_on_left=True):\n \"\"\"1bit ごとの値からデータビットを抽出する\n\n bit_index|ビットの役割\n ---------|----------\n 0 |スタートビット\n 1 |データビット\n 2 |データビット\n 3 |データビット\n 4 |データビット\n 5 |データビット\n 6 |ストップビット\n\n bit_index が 1-5の範囲のみを出力する\n \"\"\"\n previous_bit_value = start_bit\n bit_index = None\n chunk = []\n for current_bit_value, _ in bit_values:\n if bit_index is None:\n if (previous_bit_value == stop_bit and current_bit_value ==\n start_bit):\n bit_index = 0\n else:\n bit_index += 1\n if bit_index <= 5:\n if lsb_on_left:\n chunk.append(current_bit_value)\n else:\n chunk.insert(0, current_bit_value)\n else:\n if bit_index == 6:\n yield ''.join(str(bit) for bit in chunk)\n chunk.clear()\n if (previous_bit_value == stop_bit and current_bit_value ==\n start_bit):\n bit_index = 0\n previous_bit_value = current_bit_value\n",
"step-3": "<mask token>\n\n\ndef frame_to_bit_chunks(frame_values, baud_rate=45.45, start_bit=SPACE,\n stop_bit=MARK):\n \"\"\"フレームごとの信号強度からデータビットのまとまりに変換する\"\"\"\n binary_values = frame_to_binary_values(frame_values)\n bit_duration_values = binary_values_to_bit_duration(binary_values)\n bit_values = bit_duration_to_bit_values(bit_duration_values, baud_rate)\n bit_chunks = bit_values_to_bit_chunks(bit_values, start_bit, stop_bit)\n return bit_chunks\n\n\n<mask token>\n\n\ndef binary_values_to_bit_duration(binary_values):\n \"\"\"連続する0/1の長さを測る\"\"\"\n previous_binary_value = SPACE\n previous_time = 0\n current_binary_value = SPACE\n current_time = 0\n for binary_value, time in binary_values:\n current_binary_value = binary_value\n current_time = time\n if current_binary_value != previous_binary_value:\n yield previous_binary_value, current_time - previous_time\n previous_binary_value = current_binary_value\n previous_time = current_time\n yield current_binary_value, current_time - previous_time\n\n\ndef bit_duration_to_bit_values(bit_duration_values, baud_rate=45.45,\n minimum_bit_width=0.25):\n \"\"\"短すぎる値を無視したり長い値を1bitごとに分割したりする\"\"\"\n bit_duration = 1 / baud_rate\n minimum_duration = bit_duration * minimum_bit_width\n duration = 0\n for bit_value, original_duration in bit_duration_values:\n duration += original_duration\n while duration > minimum_duration:\n handle_duration = min(bit_duration, duration)\n width = handle_duration / bit_duration\n yield bit_value, width\n duration -= handle_duration\n\n\ndef bit_values_to_bit_chunks(bit_values, start_bit=SPACE, stop_bit=MARK,\n lsb_on_left=True):\n \"\"\"1bit ごとの値からデータビットを抽出する\n\n bit_index|ビットの役割\n ---------|----------\n 0 |スタートビット\n 1 |データビット\n 2 |データビット\n 3 |データビット\n 4 |データビット\n 5 |データビット\n 6 |ストップビット\n\n bit_index が 1-5の範囲のみを出力する\n \"\"\"\n previous_bit_value = start_bit\n bit_index = None\n chunk = []\n for current_bit_value, _ in bit_values:\n if bit_index is None:\n if (previous_bit_value == stop_bit and current_bit_value ==\n start_bit):\n bit_index = 0\n else:\n bit_index += 1\n if bit_index <= 5:\n if lsb_on_left:\n chunk.append(current_bit_value)\n else:\n chunk.insert(0, current_bit_value)\n else:\n if bit_index == 6:\n yield ''.join(str(bit) for bit in chunk)\n chunk.clear()\n if (previous_bit_value == stop_bit and current_bit_value ==\n start_bit):\n bit_index = 0\n previous_bit_value = current_bit_value\n",
"step-4": "<mask token>\n\n\ndef frame_to_bit_chunks(frame_values, baud_rate=45.45, start_bit=SPACE,\n stop_bit=MARK):\n \"\"\"フレームごとの信号強度からデータビットのまとまりに変換する\"\"\"\n binary_values = frame_to_binary_values(frame_values)\n bit_duration_values = binary_values_to_bit_duration(binary_values)\n bit_values = bit_duration_to_bit_values(bit_duration_values, baud_rate)\n bit_chunks = bit_values_to_bit_chunks(bit_values, start_bit, stop_bit)\n return bit_chunks\n\n\ndef frame_to_binary_values(frame_values, threshold=1.0):\n \"\"\"フレームごとの信号強度から0/1を判定する\"\"\"\n current_binary_value = SPACE\n for mark_value, space_value, time in frame_values:\n if mark_value > space_value * threshold:\n current_binary_value = MARK\n if space_value > mark_value * threshold:\n current_binary_value = SPACE\n yield current_binary_value, time\n\n\ndef binary_values_to_bit_duration(binary_values):\n \"\"\"連続する0/1の長さを測る\"\"\"\n previous_binary_value = SPACE\n previous_time = 0\n current_binary_value = SPACE\n current_time = 0\n for binary_value, time in binary_values:\n current_binary_value = binary_value\n current_time = time\n if current_binary_value != previous_binary_value:\n yield previous_binary_value, current_time - previous_time\n previous_binary_value = current_binary_value\n previous_time = current_time\n yield current_binary_value, current_time - previous_time\n\n\ndef bit_duration_to_bit_values(bit_duration_values, baud_rate=45.45,\n minimum_bit_width=0.25):\n \"\"\"短すぎる値を無視したり長い値を1bitごとに分割したりする\"\"\"\n bit_duration = 1 / baud_rate\n minimum_duration = bit_duration * minimum_bit_width\n duration = 0\n for bit_value, original_duration in bit_duration_values:\n duration += original_duration\n while duration > minimum_duration:\n handle_duration = min(bit_duration, duration)\n width = handle_duration / bit_duration\n yield bit_value, width\n duration -= handle_duration\n\n\ndef bit_values_to_bit_chunks(bit_values, start_bit=SPACE, stop_bit=MARK,\n lsb_on_left=True):\n \"\"\"1bit ごとの値からデータビットを抽出する\n\n bit_index|ビットの役割\n ---------|----------\n 0 |スタートビット\n 1 |データビット\n 2 |データビット\n 3 |データビット\n 4 |データビット\n 5 |データビット\n 6 |ストップビット\n\n bit_index が 1-5の範囲のみを出力する\n \"\"\"\n previous_bit_value = start_bit\n bit_index = None\n chunk = []\n for current_bit_value, _ in bit_values:\n if bit_index is None:\n if (previous_bit_value == stop_bit and current_bit_value ==\n start_bit):\n bit_index = 0\n else:\n bit_index += 1\n if bit_index <= 5:\n if lsb_on_left:\n chunk.append(current_bit_value)\n else:\n chunk.insert(0, current_bit_value)\n else:\n if bit_index == 6:\n yield ''.join(str(bit) for bit in chunk)\n chunk.clear()\n if (previous_bit_value == stop_bit and current_bit_value ==\n start_bit):\n bit_index = 0\n previous_bit_value = current_bit_value\n",
"step-5": "\nSPACE = 0\nMARK = 1\n\ndef frame_to_bit_chunks(frame_values, baud_rate=45.45, start_bit=SPACE, stop_bit=MARK):\n \"\"\"フレームごとの信号強度からデータビットのまとまりに変換する\"\"\"\n\n binary_values = frame_to_binary_values(frame_values)\n bit_duration_values = binary_values_to_bit_duration(binary_values)\n bit_values = bit_duration_to_bit_values(bit_duration_values, baud_rate)\n bit_chunks = bit_values_to_bit_chunks(bit_values, start_bit, stop_bit)\n\n return bit_chunks\n\n\ndef frame_to_binary_values(frame_values, threshold=1.0):\n \"\"\"フレームごとの信号強度から0/1を判定する\"\"\"\n\n # ヒステリシスを持たせるときの前の状態\n current_binary_value = SPACE\n for mark_value, space_value, time in frame_values:\n # mark の強度が space の強度の threshold 倍を越えていれば mark と判断する\n if mark_value > space_value * threshold:\n current_binary_value = MARK\n # space の強度が mark の強度の threshold 倍を越えていれば space と判断する\n if space_value > mark_value * threshold:\n current_binary_value = SPACE\n yield (current_binary_value, time)\n\n\ndef binary_values_to_bit_duration(binary_values):\n \"\"\"連続する0/1の長さを測る\"\"\"\n\n # 前の値\n previous_binary_value = SPACE\n # 前の値に変化した経過時間\n previous_time = 0\n # 今の値\n current_binary_value = SPACE\n # 今の値に変化した経過時間\n current_time = 0\n for binary_value, time in binary_values:\n # 今の値を代入する\n current_binary_value = binary_value\n current_time = time\n # 前と値が変わっていれば、前の値とその長さを出力する\n if current_binary_value != previous_binary_value:\n yield (previous_binary_value, current_time - previous_time)\n # 今の値を前の値に代入する\n previous_binary_value = current_binary_value\n previous_time = current_time\n\n # ループ内では最後の値は出力されないので、ここで出力する\n yield (current_binary_value, current_time - previous_time)\n\n\ndef bit_duration_to_bit_values(bit_duration_values, baud_rate=45.45, minimum_bit_width=0.25):\n \"\"\"短すぎる値を無視したり長い値を1bitごとに分割したりする\"\"\"\n\n # 1bit あたりの時間(秒)\n bit_duration = 1 / baud_rate\n # 基準(minimum_bit_width) bit あたりの時間(秒)\n minimum_duration = bit_duration * minimum_bit_width\n # 最後に出力してからの経過時間\n duration = 0\n for bit_value, original_duration in bit_duration_values:\n # 次の値を読んで、経過時間を足す\n duration += original_duration\n while duration > minimum_duration:\n # 今の値の経過時間が基準を超えている間繰り返す\n handle_duration = min(bit_duration, duration)\n width = handle_duration / bit_duration\n yield (bit_value, width)\n # 出力した分だけ経過時間を減らす\n duration -= handle_duration\n\n\ndef bit_values_to_bit_chunks(bit_values, start_bit=SPACE, stop_bit=MARK, lsb_on_left=True):\n \"\"\"1bit ごとの値からデータビットを抽出する\n\n bit_index|ビットの役割\n ---------|----------\n 0 |スタートビット\n 1 |データビット\n 2 |データビット\n 3 |データビット\n 4 |データビット\n 5 |データビット\n 6 |ストップビット\n\n bit_index が 1-5の範囲のみを出力する\n \"\"\"\n # 前のデータ とりあえずスタートビットとしておく\n previous_bit_value = start_bit\n # データビットの何番目を処理しているかを数えておく\n # はじめはどのタイミングか分からないので None にしておく\n bit_index = None\n # データビットを貯める\n chunk = []\n\n for current_bit_value, _ in bit_values:\n if bit_index is None:\n # 初期状態、まだデータのタイミングが分かっていない\n if previous_bit_value == stop_bit and current_bit_value == start_bit:\n # 1つ目のストップビット→スタートビットの遷移を検出\n # タイミングが決まる\n bit_index = 0\n else:\n # データのタイミングが分かっている\n # 次のビットを読む\n bit_index += 1\n if bit_index <= 5:\n # 5個目まではデータビットなので読む\n # この if はデータビットの順番が 12345 か 54321 のどちらにも対応するためのもの\n if lsb_on_left:\n # list への append は最後に追加する\n chunk.append(current_bit_value)\n else:\n # list への insert(0) は最初に追加する\n chunk.insert(0, current_bit_value)\n else:\n # データビットが終わった\n if bit_index == 6:\n # ストップビットが来るはず あんまり気にしないで貯めたデータを出力する\n yield ''.join(str(bit) for bit in chunk)\n # データを空にしておく\n chunk.clear()\n if previous_bit_value == stop_bit and current_bit_value == start_bit:\n # スタートビットが来たので状態をリセットする\n bit_index = 0\n previous_bit_value = current_bit_value\n\n",
"step-ids": [
0,
3,
4,
5,
7
]
}
|
[
0,
3,
4,
5,
7
] |
from datetime import datetime
from unittest import mock
import pytest
from freezegun import freeze_time
from datahub.ingestion.api.common import PipelineContext
from src.datahub.ingestion.source.aws.s3_util import make_s3_urn
FROZEN_TIME = "2020-04-14 07:00:00"
@pytest.mark.integration
def test_athena_config_query_location_old_plus_new_value_not_allowed():
from datahub.ingestion.source.sql.athena import AthenaConfig
with pytest.raises(ValueError):
AthenaConfig.parse_obj(
{
"aws_region": "us-west-1",
"s3_staging_dir": "s3://sample-staging-dir/",
"query_result_location": "s3://query_result_location",
"work_group": "test-workgroup",
}
)
@pytest.mark.integration
def test_athena_config_staging_dir_is_set_as_query_result():
from datahub.ingestion.source.sql.athena import AthenaConfig
config = AthenaConfig.parse_obj(
{
"aws_region": "us-west-1",
"s3_staging_dir": "s3://sample-staging-dir/",
"work_group": "test-workgroup",
}
)
expected_config = AthenaConfig.parse_obj(
{
"aws_region": "us-west-1",
"query_result_location": "s3://sample-staging-dir/",
"work_group": "test-workgroup",
}
)
assert config.json() == expected_config.json()
@pytest.mark.integration
def test_athena_uri():
from datahub.ingestion.source.sql.athena import AthenaConfig
config = AthenaConfig.parse_obj(
{
"aws_region": "us-west-1",
"query_result_location": "s3://query-result-location/",
"work_group": "test-workgroup",
}
)
assert (
config.get_sql_alchemy_url()
== "awsathena+rest://@athena.us-west-1.amazonaws.com:443/?s3_staging_dir=s3%3A%2F%2Fquery-result-location%2F&work_group=test-workgroup&catalog_name=awsdatacatalog&duration_seconds=3600"
)
@pytest.mark.integration
@freeze_time(FROZEN_TIME)
def test_athena_get_table_properties():
from pyathena.model import AthenaTableMetadata
from datahub.ingestion.source.sql.athena import AthenaConfig, AthenaSource
config = AthenaConfig.parse_obj(
{
"aws_region": "us-west-1",
"s3_staging_dir": "s3://sample-staging-dir/",
"work_group": "test-workgroup",
}
)
schema: str = "test_schema"
table: str = "test_table"
table_metadata = {
"TableMetadata": {
"Name": "test",
"TableType": "testType",
"CreateTime": datetime.now(),
"LastAccessTime": datetime.now(),
"PartitionKeys": [
{"Name": "testKey", "Type": "string", "Comment": "testComment"}
],
"Parameters": {
"comment": "testComment",
"location": "s3://testLocation",
"inputformat": "testInputFormat",
"outputformat": "testOutputFormat",
"serde.serialization.lib": "testSerde",
},
},
}
mock_cursor = mock.MagicMock()
mock_inspector = mock.MagicMock()
mock_inspector.engine.raw_connection().cursor.return_value = mock_cursor
mock_cursor._get_table_metadata.return_value = AthenaTableMetadata(
response=table_metadata
)
ctx = PipelineContext(run_id="test")
source = AthenaSource(config=config, ctx=ctx)
description, custom_properties, location = source.get_table_properties(
inspector=mock_inspector, table=table, schema=schema
)
assert custom_properties == {
"comment": "testComment",
"create_time": "2020-04-14 07:00:00",
"inputformat": "testInputFormat",
"last_access_time": "2020-04-14 07:00:00",
"location": "s3://testLocation",
"outputformat": "testOutputFormat",
"partition_keys": '[{"name": "testKey", "type": "string", "comment": "testComment"}]',
"serde.serialization.lib": "testSerde",
"table_type": "testType",
}
assert location == make_s3_urn("s3://testLocation", "PROD")
|
normal
|
{
"blob_id": "1304b6373edeca394070b8a3d144608cf07172e3",
"index": 9448,
"step-1": "<mask token>\n\n\[email protected]\ndef test_athena_config_query_location_old_plus_new_value_not_allowed():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n with pytest.raises(ValueError):\n AthenaConfig.parse_obj({'aws_region': 'us-west-1', 's3_staging_dir':\n 's3://sample-staging-dir/', 'query_result_location':\n 's3://query_result_location', 'work_group': 'test-workgroup'})\n\n\[email protected]\ndef test_athena_config_staging_dir_is_set_as_query_result():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n expected_config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 'query_result_location': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n assert config.json() == expected_config.json()\n\n\n<mask token>\n\n\[email protected]\n@freeze_time(FROZEN_TIME)\ndef test_athena_get_table_properties():\n from pyathena.model import AthenaTableMetadata\n from datahub.ingestion.source.sql.athena import AthenaConfig, AthenaSource\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n schema: str = 'test_schema'\n table: str = 'test_table'\n table_metadata = {'TableMetadata': {'Name': 'test', 'TableType':\n 'testType', 'CreateTime': datetime.now(), 'LastAccessTime':\n datetime.now(), 'PartitionKeys': [{'Name': 'testKey', 'Type':\n 'string', 'Comment': 'testComment'}], 'Parameters': {'comment':\n 'testComment', 'location': 's3://testLocation', 'inputformat':\n 'testInputFormat', 'outputformat': 'testOutputFormat',\n 'serde.serialization.lib': 'testSerde'}}}\n mock_cursor = mock.MagicMock()\n mock_inspector = mock.MagicMock()\n mock_inspector.engine.raw_connection().cursor.return_value = mock_cursor\n mock_cursor._get_table_metadata.return_value = AthenaTableMetadata(response\n =table_metadata)\n ctx = PipelineContext(run_id='test')\n source = AthenaSource(config=config, ctx=ctx)\n description, custom_properties, location = source.get_table_properties(\n inspector=mock_inspector, table=table, schema=schema)\n assert custom_properties == {'comment': 'testComment', 'create_time':\n '2020-04-14 07:00:00', 'inputformat': 'testInputFormat',\n 'last_access_time': '2020-04-14 07:00:00', 'location':\n 's3://testLocation', 'outputformat': 'testOutputFormat',\n 'partition_keys':\n '[{\"name\": \"testKey\", \"type\": \"string\", \"comment\": \"testComment\"}]',\n 'serde.serialization.lib': 'testSerde', 'table_type': 'testType'}\n assert location == make_s3_urn('s3://testLocation', 'PROD')\n",
"step-2": "<mask token>\n\n\[email protected]\ndef test_athena_config_query_location_old_plus_new_value_not_allowed():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n with pytest.raises(ValueError):\n AthenaConfig.parse_obj({'aws_region': 'us-west-1', 's3_staging_dir':\n 's3://sample-staging-dir/', 'query_result_location':\n 's3://query_result_location', 'work_group': 'test-workgroup'})\n\n\[email protected]\ndef test_athena_config_staging_dir_is_set_as_query_result():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n expected_config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 'query_result_location': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n assert config.json() == expected_config.json()\n\n\[email protected]\ndef test_athena_uri():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 'query_result_location': 's3://query-result-location/',\n 'work_group': 'test-workgroup'})\n assert config.get_sql_alchemy_url(\n ) == 'awsathena+rest://@athena.us-west-1.amazonaws.com:443/?s3_staging_dir=s3%3A%2F%2Fquery-result-location%2F&work_group=test-workgroup&catalog_name=awsdatacatalog&duration_seconds=3600'\n\n\[email protected]\n@freeze_time(FROZEN_TIME)\ndef test_athena_get_table_properties():\n from pyathena.model import AthenaTableMetadata\n from datahub.ingestion.source.sql.athena import AthenaConfig, AthenaSource\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n schema: str = 'test_schema'\n table: str = 'test_table'\n table_metadata = {'TableMetadata': {'Name': 'test', 'TableType':\n 'testType', 'CreateTime': datetime.now(), 'LastAccessTime':\n datetime.now(), 'PartitionKeys': [{'Name': 'testKey', 'Type':\n 'string', 'Comment': 'testComment'}], 'Parameters': {'comment':\n 'testComment', 'location': 's3://testLocation', 'inputformat':\n 'testInputFormat', 'outputformat': 'testOutputFormat',\n 'serde.serialization.lib': 'testSerde'}}}\n mock_cursor = mock.MagicMock()\n mock_inspector = mock.MagicMock()\n mock_inspector.engine.raw_connection().cursor.return_value = mock_cursor\n mock_cursor._get_table_metadata.return_value = AthenaTableMetadata(response\n =table_metadata)\n ctx = PipelineContext(run_id='test')\n source = AthenaSource(config=config, ctx=ctx)\n description, custom_properties, location = source.get_table_properties(\n inspector=mock_inspector, table=table, schema=schema)\n assert custom_properties == {'comment': 'testComment', 'create_time':\n '2020-04-14 07:00:00', 'inputformat': 'testInputFormat',\n 'last_access_time': '2020-04-14 07:00:00', 'location':\n 's3://testLocation', 'outputformat': 'testOutputFormat',\n 'partition_keys':\n '[{\"name\": \"testKey\", \"type\": \"string\", \"comment\": \"testComment\"}]',\n 'serde.serialization.lib': 'testSerde', 'table_type': 'testType'}\n assert location == make_s3_urn('s3://testLocation', 'PROD')\n",
"step-3": "<mask token>\nFROZEN_TIME = '2020-04-14 07:00:00'\n\n\[email protected]\ndef test_athena_config_query_location_old_plus_new_value_not_allowed():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n with pytest.raises(ValueError):\n AthenaConfig.parse_obj({'aws_region': 'us-west-1', 's3_staging_dir':\n 's3://sample-staging-dir/', 'query_result_location':\n 's3://query_result_location', 'work_group': 'test-workgroup'})\n\n\[email protected]\ndef test_athena_config_staging_dir_is_set_as_query_result():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n expected_config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 'query_result_location': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n assert config.json() == expected_config.json()\n\n\[email protected]\ndef test_athena_uri():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 'query_result_location': 's3://query-result-location/',\n 'work_group': 'test-workgroup'})\n assert config.get_sql_alchemy_url(\n ) == 'awsathena+rest://@athena.us-west-1.amazonaws.com:443/?s3_staging_dir=s3%3A%2F%2Fquery-result-location%2F&work_group=test-workgroup&catalog_name=awsdatacatalog&duration_seconds=3600'\n\n\[email protected]\n@freeze_time(FROZEN_TIME)\ndef test_athena_get_table_properties():\n from pyathena.model import AthenaTableMetadata\n from datahub.ingestion.source.sql.athena import AthenaConfig, AthenaSource\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n schema: str = 'test_schema'\n table: str = 'test_table'\n table_metadata = {'TableMetadata': {'Name': 'test', 'TableType':\n 'testType', 'CreateTime': datetime.now(), 'LastAccessTime':\n datetime.now(), 'PartitionKeys': [{'Name': 'testKey', 'Type':\n 'string', 'Comment': 'testComment'}], 'Parameters': {'comment':\n 'testComment', 'location': 's3://testLocation', 'inputformat':\n 'testInputFormat', 'outputformat': 'testOutputFormat',\n 'serde.serialization.lib': 'testSerde'}}}\n mock_cursor = mock.MagicMock()\n mock_inspector = mock.MagicMock()\n mock_inspector.engine.raw_connection().cursor.return_value = mock_cursor\n mock_cursor._get_table_metadata.return_value = AthenaTableMetadata(response\n =table_metadata)\n ctx = PipelineContext(run_id='test')\n source = AthenaSource(config=config, ctx=ctx)\n description, custom_properties, location = source.get_table_properties(\n inspector=mock_inspector, table=table, schema=schema)\n assert custom_properties == {'comment': 'testComment', 'create_time':\n '2020-04-14 07:00:00', 'inputformat': 'testInputFormat',\n 'last_access_time': '2020-04-14 07:00:00', 'location':\n 's3://testLocation', 'outputformat': 'testOutputFormat',\n 'partition_keys':\n '[{\"name\": \"testKey\", \"type\": \"string\", \"comment\": \"testComment\"}]',\n 'serde.serialization.lib': 'testSerde', 'table_type': 'testType'}\n assert location == make_s3_urn('s3://testLocation', 'PROD')\n",
"step-4": "from datetime import datetime\nfrom unittest import mock\nimport pytest\nfrom freezegun import freeze_time\nfrom datahub.ingestion.api.common import PipelineContext\nfrom src.datahub.ingestion.source.aws.s3_util import make_s3_urn\nFROZEN_TIME = '2020-04-14 07:00:00'\n\n\[email protected]\ndef test_athena_config_query_location_old_plus_new_value_not_allowed():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n with pytest.raises(ValueError):\n AthenaConfig.parse_obj({'aws_region': 'us-west-1', 's3_staging_dir':\n 's3://sample-staging-dir/', 'query_result_location':\n 's3://query_result_location', 'work_group': 'test-workgroup'})\n\n\[email protected]\ndef test_athena_config_staging_dir_is_set_as_query_result():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n expected_config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 'query_result_location': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n assert config.json() == expected_config.json()\n\n\[email protected]\ndef test_athena_uri():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 'query_result_location': 's3://query-result-location/',\n 'work_group': 'test-workgroup'})\n assert config.get_sql_alchemy_url(\n ) == 'awsathena+rest://@athena.us-west-1.amazonaws.com:443/?s3_staging_dir=s3%3A%2F%2Fquery-result-location%2F&work_group=test-workgroup&catalog_name=awsdatacatalog&duration_seconds=3600'\n\n\[email protected]\n@freeze_time(FROZEN_TIME)\ndef test_athena_get_table_properties():\n from pyathena.model import AthenaTableMetadata\n from datahub.ingestion.source.sql.athena import AthenaConfig, AthenaSource\n config = AthenaConfig.parse_obj({'aws_region': 'us-west-1',\n 's3_staging_dir': 's3://sample-staging-dir/', 'work_group':\n 'test-workgroup'})\n schema: str = 'test_schema'\n table: str = 'test_table'\n table_metadata = {'TableMetadata': {'Name': 'test', 'TableType':\n 'testType', 'CreateTime': datetime.now(), 'LastAccessTime':\n datetime.now(), 'PartitionKeys': [{'Name': 'testKey', 'Type':\n 'string', 'Comment': 'testComment'}], 'Parameters': {'comment':\n 'testComment', 'location': 's3://testLocation', 'inputformat':\n 'testInputFormat', 'outputformat': 'testOutputFormat',\n 'serde.serialization.lib': 'testSerde'}}}\n mock_cursor = mock.MagicMock()\n mock_inspector = mock.MagicMock()\n mock_inspector.engine.raw_connection().cursor.return_value = mock_cursor\n mock_cursor._get_table_metadata.return_value = AthenaTableMetadata(response\n =table_metadata)\n ctx = PipelineContext(run_id='test')\n source = AthenaSource(config=config, ctx=ctx)\n description, custom_properties, location = source.get_table_properties(\n inspector=mock_inspector, table=table, schema=schema)\n assert custom_properties == {'comment': 'testComment', 'create_time':\n '2020-04-14 07:00:00', 'inputformat': 'testInputFormat',\n 'last_access_time': '2020-04-14 07:00:00', 'location':\n 's3://testLocation', 'outputformat': 'testOutputFormat',\n 'partition_keys':\n '[{\"name\": \"testKey\", \"type\": \"string\", \"comment\": \"testComment\"}]',\n 'serde.serialization.lib': 'testSerde', 'table_type': 'testType'}\n assert location == make_s3_urn('s3://testLocation', 'PROD')\n",
"step-5": "from datetime import datetime\nfrom unittest import mock\n\nimport pytest\nfrom freezegun import freeze_time\n\nfrom datahub.ingestion.api.common import PipelineContext\nfrom src.datahub.ingestion.source.aws.s3_util import make_s3_urn\n\nFROZEN_TIME = \"2020-04-14 07:00:00\"\n\n\[email protected]\ndef test_athena_config_query_location_old_plus_new_value_not_allowed():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n\n with pytest.raises(ValueError):\n AthenaConfig.parse_obj(\n {\n \"aws_region\": \"us-west-1\",\n \"s3_staging_dir\": \"s3://sample-staging-dir/\",\n \"query_result_location\": \"s3://query_result_location\",\n \"work_group\": \"test-workgroup\",\n }\n )\n\n\[email protected]\ndef test_athena_config_staging_dir_is_set_as_query_result():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n\n config = AthenaConfig.parse_obj(\n {\n \"aws_region\": \"us-west-1\",\n \"s3_staging_dir\": \"s3://sample-staging-dir/\",\n \"work_group\": \"test-workgroup\",\n }\n )\n\n expected_config = AthenaConfig.parse_obj(\n {\n \"aws_region\": \"us-west-1\",\n \"query_result_location\": \"s3://sample-staging-dir/\",\n \"work_group\": \"test-workgroup\",\n }\n )\n\n assert config.json() == expected_config.json()\n\n\[email protected]\ndef test_athena_uri():\n from datahub.ingestion.source.sql.athena import AthenaConfig\n\n config = AthenaConfig.parse_obj(\n {\n \"aws_region\": \"us-west-1\",\n \"query_result_location\": \"s3://query-result-location/\",\n \"work_group\": \"test-workgroup\",\n }\n )\n assert (\n config.get_sql_alchemy_url()\n == \"awsathena+rest://@athena.us-west-1.amazonaws.com:443/?s3_staging_dir=s3%3A%2F%2Fquery-result-location%2F&work_group=test-workgroup&catalog_name=awsdatacatalog&duration_seconds=3600\"\n )\n\n\[email protected]\n@freeze_time(FROZEN_TIME)\ndef test_athena_get_table_properties():\n from pyathena.model import AthenaTableMetadata\n\n from datahub.ingestion.source.sql.athena import AthenaConfig, AthenaSource\n\n config = AthenaConfig.parse_obj(\n {\n \"aws_region\": \"us-west-1\",\n \"s3_staging_dir\": \"s3://sample-staging-dir/\",\n \"work_group\": \"test-workgroup\",\n }\n )\n schema: str = \"test_schema\"\n table: str = \"test_table\"\n\n table_metadata = {\n \"TableMetadata\": {\n \"Name\": \"test\",\n \"TableType\": \"testType\",\n \"CreateTime\": datetime.now(),\n \"LastAccessTime\": datetime.now(),\n \"PartitionKeys\": [\n {\"Name\": \"testKey\", \"Type\": \"string\", \"Comment\": \"testComment\"}\n ],\n \"Parameters\": {\n \"comment\": \"testComment\",\n \"location\": \"s3://testLocation\",\n \"inputformat\": \"testInputFormat\",\n \"outputformat\": \"testOutputFormat\",\n \"serde.serialization.lib\": \"testSerde\",\n },\n },\n }\n\n mock_cursor = mock.MagicMock()\n mock_inspector = mock.MagicMock()\n mock_inspector.engine.raw_connection().cursor.return_value = mock_cursor\n mock_cursor._get_table_metadata.return_value = AthenaTableMetadata(\n response=table_metadata\n )\n\n ctx = PipelineContext(run_id=\"test\")\n source = AthenaSource(config=config, ctx=ctx)\n description, custom_properties, location = source.get_table_properties(\n inspector=mock_inspector, table=table, schema=schema\n )\n assert custom_properties == {\n \"comment\": \"testComment\",\n \"create_time\": \"2020-04-14 07:00:00\",\n \"inputformat\": \"testInputFormat\",\n \"last_access_time\": \"2020-04-14 07:00:00\",\n \"location\": \"s3://testLocation\",\n \"outputformat\": \"testOutputFormat\",\n \"partition_keys\": '[{\"name\": \"testKey\", \"type\": \"string\", \"comment\": \"testComment\"}]',\n \"serde.serialization.lib\": \"testSerde\",\n \"table_type\": \"testType\",\n }\n\n assert location == make_s3_urn(\"s3://testLocation\", \"PROD\")\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import sys
from ulang.runtime.main import main
main(sys.argv)
|
normal
|
{
"blob_id": "e0c5498d9b18a6a32fcd2725ef4f6a1adaef6c68",
"index": 2098,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nmain(sys.argv)\n",
"step-3": "import sys\nfrom ulang.runtime.main import main\nmain(sys.argv)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""
Package for django_static_template.
"""
|
normal
|
{
"blob_id": "818623621b609d67f8f657be4ade6e3bb86a0bc5",
"index": 4226,
"step-1": "<mask token>\n",
"step-2": "\"\"\"\r\nPackage for django_static_template.\r\n\"\"\"\r\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
#!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from titus.fcn import Fcn
from titus.fcn import LibFcn
from titus.signature import Sig
from titus.signature import Sigs
from titus.datatype import *
from titus.errors import *
from titus.util import callfcn, div
import titus.P as P
from functools import reduce
provides = {}
def provide(fcn):
provides[fcn.name] = fcn
prefix = "la."
def np():
import numpy
return numpy
def rowKeys(x):
return set(x.keys())
def colKeys(x):
if len(x) == 0:
return set()
else:
return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in list(x.values())])
def arraysToMatrix(x):
return np().matrix(x, dtype=np().double)
def arrayToRowVector(x):
return np().matrix(x, dtype=np().double).T
def rowVectorToArray(x):
return x.T.tolist()[0]
def matrixToArrays(x):
return x.tolist()
def mapsToMatrix(x, rows, cols):
return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in rows], dtype=np().double)
def mapToRowVector(x, keys):
return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T
def rowVectorToMap(x, keys):
return dict(list(zip(keys, x.T.tolist()[0])))
def matrixToMaps(x, rows, cols):
return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x.tolist()))
def raggedArray(x):
collens = list(map(len, x))
return max(collens) != min(collens)
def raggedMap(x):
return len(set(len(xi) for xi in list(x.values()))) != 1
class MapApply(LibFcn):
name = prefix + "map"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24000
def __call__(self, state, scope, pos, paramTypes, x, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
provide(MapApply())
class Scale(LibFcn):
name = prefix + "scale"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"alpha": P.Double()}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"alpha": P.Double()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"alpha": P.Double()}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"alpha": P.Double()}], P.Map(P.Map(P.Double())))])
errcodeBase = 24010
def __call__(self, state, scope, pos, paramTypes, x, alpha):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[xj * alpha for xj in xi] for xi in x]
elif isinstance(x, (list, tuple)):
return [xi * alpha for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):
return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
else:
return dict((i, xi * alpha) for i, xi in list(x.items()))
provide(Scale())
class ZipMap(LibFcn):
name = prefix + "zipmap"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24020
def __call__(self, state, scope, pos, paramTypes, x, y, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for i in rows)
provide(ZipMap())
class Add(LibFcn):
name = prefix + "add"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24030
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj + yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi + yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)
provide(Add())
class Sub(LibFcn):
name = prefix + "sub"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24040
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj - yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi - yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)
provide(Sub())
class Dot(LibFcn):
name = prefix + "dot"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]["type"] == "array":
if isinstance(paramTypes[1]["items"], dict) and paramTypes[1]["items"]["type"] == "array":
# array matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(any(math.isnan(z) or math.isinf(z) for z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
else:
# array matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
elif paramTypes[1]["type"] == "map":
if isinstance(paramTypes[1]["values"], dict) and paramTypes[1]["values"]["type"] == "map":
# map matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
# map matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(math.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
provide(Dot())
class Transpose(LibFcn):
name = prefix + "transpose"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
provide(Transpose())
class Inverse(LibFcn):
name = prefix + "inverse"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
provide(Inverse())
class Trace(LibFcn):
name = prefix + "trace"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
provide(Trace())
class Det(LibFcn):
name = prefix + "det"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
return float("nan")
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
return float("nan")
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
provide(Det())
class Symmetric(LibFcn):
name = prefix + "symmetric"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"tol": P.Double()}], P.Boolean()),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"tol": P.Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and ((x > 0.0 and y > 0.0) or (x < 0.0 and y < 0.0)):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {}).get(i, 0.0), tol) for j in keys) for i in keys)
provide(Symmetric())
class EigenBasis(LibFcn):
name = prefix + "eigenBasis"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [evects[:,i] * (-1.0 if evects[0,i] < 0.0 else 1.0) for i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i,j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys), len(keys)), list(map(str, range(len(keys)))), keys)
provide(EigenBasis())
class Truncate(LibFcn):
name = prefix + "truncate"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"keep": P.Int()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"keep": P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
provide(Truncate())
|
normal
|
{
"blob_id": "780dc49c3eaef3fb25ca0aac760326b1c3adc633",
"index": 6002,
"step-1": "<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\n<mask token>\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef np():\n import numpy\n return numpy\n\n\ndef rowKeys(x):\n return set(x.keys())\n\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in\n list(x.values())])\n\n\n<mask token>\n\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\n\n<mask token>\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\n<mask token>\n\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef np():\n import numpy\n return numpy\n\n\ndef rowKeys(x):\n return set(x.keys())\n\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in\n list(x.values())])\n\n\ndef arraysToMatrix(x):\n return np().matrix(x, dtype=np().double)\n\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\n\n<mask token>\n\n\ndef matrixToArrays(x):\n return x.tolist()\n\n\ndef mapsToMatrix(x, rows, cols):\n return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in\n rows], dtype=np().double)\n\n\ndef mapToRowVector(x, keys):\n return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T\n\n\ndef rowVectorToMap(x, keys):\n return dict(list(zip(keys, x.T.tolist()[0])))\n\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x\n .tolist()))\n\n\ndef raggedArray(x):\n collens = list(map(len, x))\n return max(collens) != min(collens)\n\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\n\nclass MapApply(LibFcn):\n name = prefix + 'map'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'fcn': P.Fcn([P.\n Double()], P.Double())}], P.Array(P.Array(P.Double()))), Sig([{'x':\n P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.Double()], P.Double())}\n ], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x\n ]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for \n j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass Scale(LibFcn):\n name = prefix + 'scale'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'alpha': P.Double()}], P.\n Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))}, {\n 'alpha': P.Double()}], P.Array(P.Array(P.Double()))), Sig([{'x': P.\n Map(P.Double())}, {'alpha': P.Double()}], P.Map(P.Double())), Sig([\n {'x': P.Map(P.Map(P.Double()))}, {'alpha': P.Double()}], P.Map(P.\n Map(P.Double())))])\n errcodeBase = 24010\n\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n return [[(xj * alpha) for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [(xi * alpha) for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items\n ()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\n\n<mask token>\n\n\nclass ZipMap(LibFcn):\n name = prefix + 'zipmap'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Array(P.Double()))}, {'fcn': P.Fcn([P.Double(), P.Double()], P.\n Double())}], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(\n P.Double()))}, {'y': P.Map(P.Map(P.Double()))}, {'fcn': P.Fcn([P.\n Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip\n (xi, yi)] for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {\n }).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for\n i in rows)\n\n\n<mask token>\n\n\nclass Add(LibFcn):\n name = prefix + 'add'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj + yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi + yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Sub(LibFcn):\n name = prefix + 'sub'\n sig = Sigs([Sig([{'x': P.Array(P.Double())}, {'y': P.Array(P.Double())}\n ], P.Array(P.Double())), Sig([{'x': P.Array(P.Array(P.Double()))},\n {'y': P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{'x': P.Map(P.Double())}, {'y': P.Map(P.Double())}], P.Map(P.\n Double())), Sig([{'x': P.Map(P.Map(P.Double()))}, {'y': P.Map(P.Map\n (P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x) and isinstance(y, (list, tuple)) and all(\n isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip\n (x, y)):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [[(xj - yj) for xj, yj in zip(xi, yi)] for xi, yi in zip\n (x, y)]\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n return [(xi - yi) for xi, yi in zip(x, y)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())) and isinstance(y, dict) and all(isinstance(y[i],\n dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}\n ).get(j, 0.0)) for j in cols)) for i in rows)\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\n\n<mask token>\n\n\nclass Dot(LibFcn):\n name = prefix + 'dot'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'y': P.Array(P.\n Double())}], P.Array(P.Double())), Sig([{'x': P.Map(P.Map(P.Double(\n )))}, {'y': P.Map(P.Double())}], P.Map(P.Double())), Sig([{'x': P.\n Array(P.Array(P.Double()))}, {'y': P.Array(P.Array(P.Double()))}],\n P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))},\n {'y': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1]['type'] == 'array':\n if isinstance(paramTypes[1]['items'], dict) and paramTypes[1][\n 'items']['type'] == 'array':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(any(math.isnan(z) or math.isinf(z) for\n z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for\n row in x) or any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n try:\n if bad:\n raise PFARuntimeException('contains non-finite value',\n self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException('misaligned matrices', self.\n errcodeBase + 0, self.name, pos)\n elif paramTypes[1]['type'] == 'map':\n if isinstance(paramTypes[1]['values'], dict) and paramTypes[1][\n 'values']['type'] == 'map':\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(any(\n math.isnan(z) or math.isinf(z) for z in list(row.values\n ())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n else:\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(\n row.values())) for row in list(x.values())) or any(math\n .isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0\n ] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 1, self.name, pos)\n if bad:\n raise PFARuntimeException('contains non-finite value', \n self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\n\n<mask token>\n\n\nclass Transpose(LibFcn):\n name = prefix + 'transpose'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24060\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\n\n<mask token>\n\n\nclass Inverse(LibFcn):\n name = prefix + 'inverse'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24070\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\n\n<mask token>\n\n\nclass Trace(LibFcn):\n name = prefix + 'trace'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\n\n<mask token>\n\n\nclass Det(LibFcn):\n name = prefix + 'det'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Double()), Sig\n ([{'x': P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n return float('nan')\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n return float('nan')\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\n\n<mask token>\n\n\nclass Symmetric(LibFcn):\n name = prefix + 'symmetric'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'tol': P.Double(\n )}], P.Boolean()), Sig([{'x': P.Map(P.Map(P.Double()))}, {'tol': P.\n Double()}], P.Boolean())])\n errcodeBase = 24100\n\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and (x > 0.0 and y > 0.0 or x < \n 0.0 and y < 0.0):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y\n ) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(\n cols)) for i in range(rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {})\n .get(i, 0.0), tol) for j in keys) for i in keys)\n\n\n<mask token>\n\n\nclass EigenBasis(LibFcn):\n name = prefix + 'eigenBasis'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}], P.Array(P.Array(\n P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()))}], P.Map(P.Map(P\n .Double())))])\n errcodeBase = 24110\n\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [(evects[:, i] * (-1.0 if evects[0, i] < 0.0 else 1.0)) for\n i in range(size)]\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i, j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException('non-square matrix', self.\n errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in\n x):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.\n values())) for row in list(x.values())):\n raise PFARuntimeException('non-finite matrix', self.\n errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys),\n len(keys)), list(map(str, range(len(keys)))), keys)\n\n\n<mask token>\n\n\nclass Truncate(LibFcn):\n name = prefix + 'truncate'\n sig = Sigs([Sig([{'x': P.Array(P.Array(P.Double()))}, {'keep': P.Int()}\n ], P.Array(P.Array(P.Double()))), Sig([{'x': P.Map(P.Map(P.Double()\n ))}, {'keep': P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple\n )) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException('ragged columns', self.\n errcodeBase + 1, self.name, pos)\n return x[:keep]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in\n list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException('too few rows/cols', self.\n errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/env python\n\n# Copyright (C) 2014 Open Data (\"Open Data\" refers to\n# one or more of the following companies: Open Data Partners LLC,\n# Open Data Research LLC, or Open Data Capital LLC.)\n# \n# This file is part of Hadrian.\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\n\nfrom titus.fcn import Fcn\nfrom titus.fcn import LibFcn\nfrom titus.signature import Sig\nfrom titus.signature import Sigs\nfrom titus.datatype import *\nfrom titus.errors import *\nfrom titus.util import callfcn, div\nimport titus.P as P\nfrom functools import reduce\n\nprovides = {}\ndef provide(fcn):\n provides[fcn.name] = fcn\n\nprefix = \"la.\"\n\ndef np():\n import numpy\n return numpy\n\ndef rowKeys(x):\n return set(x.keys())\n\ndef colKeys(x):\n if len(x) == 0:\n return set()\n else:\n return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in list(x.values())])\n\ndef arraysToMatrix(x):\n return np().matrix(x, dtype=np().double)\n\ndef arrayToRowVector(x):\n return np().matrix(x, dtype=np().double).T\n\ndef rowVectorToArray(x):\n return x.T.tolist()[0]\n\ndef matrixToArrays(x):\n return x.tolist()\n\ndef mapsToMatrix(x, rows, cols):\n return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in rows], dtype=np().double)\n\ndef mapToRowVector(x, keys):\n return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T\n\ndef rowVectorToMap(x, keys):\n return dict(list(zip(keys, x.T.tolist()[0])))\n\ndef matrixToMaps(x, rows, cols):\n return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x.tolist()))\n\ndef raggedArray(x):\n collens = list(map(len, x))\n return max(collens) != min(collens)\n\ndef raggedMap(x):\n return len(set(len(xi) for xi in list(x.values()))) != 1\n\nclass MapApply(LibFcn):\n name = prefix + \"map\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"fcn\": P.Fcn([P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"fcn\": P.Fcn([P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24000\n def __call__(self, state, scope, pos, paramTypes, x, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for j, xj in list(xi.items()))) for i, xi in list(x.items()))\n\nprovide(MapApply())\n\nclass Scale(LibFcn):\n name = prefix + \"scale\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"alpha\": P.Double()}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"alpha\": P.Double()}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"alpha\": P.Double()}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"alpha\": P.Double()}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24010\n def __call__(self, state, scope, pos, paramTypes, x, alpha):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n return [[xj * alpha for xj in xi] for xi in x]\n elif isinstance(x, (list, tuple)):\n return [xi * alpha for xi in x]\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):\n return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items()))) for i, xi in list(x.items()))\n else:\n return dict((i, xi * alpha) for i, xi in list(x.items()))\n\nprovide(Scale())\n\nclass ZipMap(LibFcn):\n name = prefix + \"zipmap\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}, {\"fcn\": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}, {\"fcn\": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24020\n def __call__(self, state, scope, pos, paramTypes, x, y, fcn):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for i in rows)\n\nprovide(ZipMap())\n\nclass Add(LibFcn):\n name = prefix + \"add\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24030\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[xj + yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [xi + yi for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)\n\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)\n\nprovide(Add())\n\nclass Sub(LibFcn):\n name = prefix + \"sub\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Double())}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Double())}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24040\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \\\n isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):\n if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [[xj - yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]\n\n elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):\n if len(x) != len(y):\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n return [xi - yi for xi, yi in zip(x, y)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \\\n isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):\n rows = rowKeys(x).union(rowKeys(y))\n cols = colKeys(x).union(colKeys(y))\n return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)\n\n else:\n rows = rowKeys(x).union(rowKeys(y))\n return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)\n\nprovide(Sub())\n\nclass Dot(LibFcn):\n name = prefix + \"dot\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Double())}], P.Array(P.Double())),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Double())}], P.Map(P.Double())),\n Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"y\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"y\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24050\n def __call__(self, state, scope, pos, paramTypes, x, y):\n if paramTypes[1][\"type\"] == \"array\":\n if isinstance(paramTypes[1][\"items\"], dict) and paramTypes[1][\"items\"][\"type\"] == \"array\":\n # array matrix-matrix case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \\\n any(any(math.isnan(z) or math.isinf(z) for z in row) for row in y)\n xmat = arraysToMatrix(x)\n ymat = arraysToMatrix(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n try:\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return matrixToArrays(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n\n else:\n # array matrix-vector case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \\\n any(math.isnan(z) or math.isinf(z) for z in y)\n xmat = arraysToMatrix(x)\n ymat = arrayToRowVector(y)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n try:\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return rowVectorToArray(np().dot(xmat, ymat))\n except ValueError:\n raise PFARuntimeException(\"misaligned matrices\", self.errcodeBase + 0, self.name, pos)\n\n elif paramTypes[1][\"type\"] == \"map\":\n if isinstance(paramTypes[1][\"values\"], dict) and paramTypes[1][\"values\"][\"type\"] == \"map\":\n # map matrix-matrix case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \\\n any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(y.values()))\n rows = list(rowKeys(x))\n inter = list(colKeys(x).union(rowKeys(y)))\n cols = list(colKeys(y))\n xmat = mapsToMatrix(x, rows, inter)\n ymat = mapsToMatrix(y, inter, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return matrixToMaps(np().dot(xmat, ymat), rows, cols)\n\n else:\n # map matrix-vector case\n bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \\\n any(math.isnan(z) or math.isinf(z) for z in list(y.values()))\n rows = list(rowKeys(x))\n cols = list(colKeys(x).union(rowKeys(y)))\n xmat = mapsToMatrix(x, rows, cols)\n ymat = mapToRowVector(y, cols)\n if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 1, self.name, pos)\n if bad: raise PFARuntimeException(\"contains non-finite value\", self.errcodeBase + 2, self.name, pos)\n return rowVectorToMap(np().dot(xmat, ymat), rows)\n\nprovide(Dot())\n \nclass Transpose(LibFcn):\n name = prefix + \"transpose\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24060\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return [[x[r][c] for r in range(rows)] for c in range(cols)]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedMap(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)\n\nprovide(Transpose())\n\nclass Inverse(LibFcn):\n name = prefix + \"inverse\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24070\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return matrixToArrays(arraysToMatrix(x).I)\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = list(rowKeys(x))\n cols = list(colKeys(x))\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n xmat = mapsToMatrix(x, rows, cols)\n return matrixToMaps(xmat.I, cols, rows)\n\nprovide(Inverse())\n\nclass Trace(LibFcn):\n name = prefix + \"trace\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Double()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24080\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows == 0:\n return 0.0\n else:\n cols = len(x[0])\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 0, self.name, pos)\n return sum(x[i][i] for i in range(min(rows, cols)))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = rowKeys(x).intersection(colKeys(x))\n return sum(x[i][i] for i in keys)\n\nprovide(Trace())\n\nclass Det(LibFcn):\n name = prefix + \"det\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Double()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Double())])\n errcodeBase = 24090\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):\n return float(\"nan\")\n else:\n return float(np().linalg.det(arraysToMatrix(x)))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):\n return float(\"nan\")\n else:\n return float(np().linalg.det(mapsToMatrix(x, keys, keys)))\n\nprovide(Det())\n\nclass Symmetric(LibFcn):\n name = prefix + \"symmetric\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"tol\": P.Double()}], P.Boolean()),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"tol\": P.Double()}], P.Boolean())])\n errcodeBase = 24100\n @staticmethod\n def same(x, y, tol):\n if math.isinf(x) and math.isinf(y) and ((x > 0.0 and y > 0.0) or (x < 0.0 and y < 0.0)):\n return True\n elif math.isnan(x) and math.isnan(y):\n return True\n elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y) and not math.isnan(y):\n return abs(x - y) < tol\n else:\n return False\n def __call__(self, state, scope, pos, paramTypes, x, tol):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n return all(all(self.same(x[i][j], x[j][i], tol) for j in range(cols)) for i in range(rows))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {}).get(i, 0.0), tol) for j in keys) for i in keys)\n\nprovide(Symmetric())\n\nclass EigenBasis(LibFcn):\n name = prefix + \"eigenBasis\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])\n\n errcodeBase = 24110\n def calculate(self, x, size):\n symm = (x + x.T) * 0.5\n\n evals, evects = np().linalg.eig(symm)\n evects = np().array(evects)\n evects2 = [evects[:,i] * (-1.0 if evects[0,i] < 0.0 else 1.0) for i in range(size)]\n\n eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]\n order = np().argsort(eigvalm2)\n\n out = np().empty((size, size), dtype=np().double)\n for i in range(size):\n for j in range(size):\n out[i,j] = evects2[order[i]][j] * eigvalm2[order[i]]\n return out\n\n def __call__(self, state, scope, pos, paramTypes, x):\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n if rows != cols:\n raise PFARuntimeException(\"non-square matrix\", self.errcodeBase + 2, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):\n raise PFARuntimeException(\"non-finite matrix\", self.errcodeBase + 3, self.name, pos)\n return matrixToArrays(self.calculate(arraysToMatrix(x), rows))\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n keys = list(rowKeys(x).union(colKeys(x)))\n if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):\n raise PFARuntimeException(\"non-finite matrix\", self.errcodeBase + 3, self.name, pos)\n return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys), len(keys)), list(map(str, range(len(keys)))), keys)\n\nprovide(EigenBasis())\n\nclass Truncate(LibFcn):\n name = prefix + \"truncate\"\n sig = Sigs([Sig([{\"x\": P.Array(P.Array(P.Double()))}, {\"keep\": P.Int()}], P.Array(P.Array(P.Double()))),\n Sig([{\"x\": P.Map(P.Map(P.Double()))}, {\"keep\": P.Array(P.String())}], P.Map(P.Map(P.Double())))])\n errcodeBase = 24120\n def __call__(self, state, scope, pos, paramTypes, x, keep):\n if isinstance(keep, int) and keep < 0:\n keep = 0\n\n if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):\n rows = len(x)\n if rows < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n cols = len(x[0])\n if cols < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n if raggedArray(x):\n raise PFARuntimeException(\"ragged columns\", self.errcodeBase + 1, self.name, pos)\n return x[:keep]\n\n elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):\n rows = rowKeys(x)\n cols = colKeys(x)\n if len(rows) < 1 or len(cols) < 1:\n raise PFARuntimeException(\"too few rows/cols\", self.errcodeBase + 0, self.name, pos)\n return dict((k, x[k]) for k in rows if k in keep)\n\nprovide(Truncate())\n",
"step-ids": [
26,
42,
47,
53,
59
]
}
|
[
26,
42,
47,
53,
59
] |
#@@---------------------------@@
# Author: Chamil Jayasundara
# Date: 5/18/17
# Description: Extract SFLOW data from slow logs
#@@---------------------------@@
import itertools
from collections import defaultdict
"""Flow Sample and Datagram Objects"""
class Container(object):
def __init__(self, id):
self.id = id
self.content = defaultdict(int)
def __getitem__(self, key):
return self.content[key]
def __setitem__(self, key, value):
self.content[key] = value
class Datagram(Container):
datagram_counter = itertools.count().next
def __init__(self):
super(Datagram, self).__init__(Datagram.datagram_counter())
self['flowSamples'] = {}
class FlowSample(Container):
flowsample_counter = itertools.count().next
def __init__(self):
super(FlowSample, self).__init__(FlowSample.flowsample_counter())
#############################
"""Data Extraction"""
def process_line_and_store_in_obj(line, obj):
partition = line.partition(" ")
obj[partition[0]] = partition[2].rstrip()
###State Machine Classses
class WithinDatagram(object):
def __init__(self, traceObj):
self.Trace = traceObj
self.current_datagram = None
def process(self,line):
if "startDatagram" in line:
self.current_datagram = Datagram()
elif "endDatagram" in line:
self.Trace.callable(self.current_datagram.content)
elif "startSample" in line:
self.Trace.currentState = self.Trace.within_flowsample
self.Trace.within_flowsample.re_init(FlowSample(), self.current_datagram)
else:
process_line_and_store_in_obj(line, self.current_datagram)
class WithinFlowsample(object):
def __init__(self, traceObj):
self.Trace = traceObj
self.current_datagram = None
self.current_flowsample = None
def re_init(self, flowsampleObj, datagramObj):
self.current_datagram = datagramObj
self.current_flowsample = flowsampleObj
def process(self,line):
if "endSample" in line:
self.current_datagram['flowSamples'][self.current_flowsample.id] = self.current_flowsample.content
self.Trace.currentState = self.Trace.within_datagram
else:
process_line_and_store_in_obj(line, self.current_flowsample)
class Trace(object):
def __init__(self, callable=None):
self.within_datagram = WithinDatagram(self)
self.within_flowsample = WithinFlowsample(self)
self.currentState = self.within_datagram
self.callable = callable
def process(self, line):
self.currentState.process(line)
|
normal
|
{
"blob_id": "395ff2e7c052b57548151fc71fad971c94ebceea",
"index": 3974,
"step-1": "<mask token>\n\n\nclass WithinDatagram(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n <mask token>\n\n\nclass WithinFlowsample(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n self.current_flowsample = None\n\n def re_init(self, flowsampleObj, datagramObj):\n self.current_datagram = datagramObj\n self.current_flowsample = flowsampleObj\n\n def process(self, line):\n if 'endSample' in line:\n self.current_datagram['flowSamples'][self.current_flowsample.id\n ] = self.current_flowsample.content\n self.Trace.currentState = self.Trace.within_datagram\n else:\n process_line_and_store_in_obj(line, self.current_flowsample)\n\n\nclass Trace(object):\n\n def __init__(self, callable=None):\n self.within_datagram = WithinDatagram(self)\n self.within_flowsample = WithinFlowsample(self)\n self.currentState = self.within_datagram\n self.callable = callable\n\n def process(self, line):\n self.currentState.process(line)\n",
"step-2": "<mask token>\n\n\nclass FlowSample(Container):\n <mask token>\n\n def __init__(self):\n super(FlowSample, self).__init__(FlowSample.flowsample_counter())\n\n\n<mask token>\n\n\nclass WithinDatagram(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n\n def process(self, line):\n if 'startDatagram' in line:\n self.current_datagram = Datagram()\n elif 'endDatagram' in line:\n self.Trace.callable(self.current_datagram.content)\n elif 'startSample' in line:\n self.Trace.currentState = self.Trace.within_flowsample\n self.Trace.within_flowsample.re_init(FlowSample(), self.\n current_datagram)\n else:\n process_line_and_store_in_obj(line, self.current_datagram)\n\n\nclass WithinFlowsample(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n self.current_flowsample = None\n\n def re_init(self, flowsampleObj, datagramObj):\n self.current_datagram = datagramObj\n self.current_flowsample = flowsampleObj\n\n def process(self, line):\n if 'endSample' in line:\n self.current_datagram['flowSamples'][self.current_flowsample.id\n ] = self.current_flowsample.content\n self.Trace.currentState = self.Trace.within_datagram\n else:\n process_line_and_store_in_obj(line, self.current_flowsample)\n\n\nclass Trace(object):\n\n def __init__(self, callable=None):\n self.within_datagram = WithinDatagram(self)\n self.within_flowsample = WithinFlowsample(self)\n self.currentState = self.within_datagram\n self.callable = callable\n\n def process(self, line):\n self.currentState.process(line)\n",
"step-3": "<mask token>\n\n\nclass Datagram(Container):\n <mask token>\n <mask token>\n\n\nclass FlowSample(Container):\n flowsample_counter = itertools.count().next\n\n def __init__(self):\n super(FlowSample, self).__init__(FlowSample.flowsample_counter())\n\n\n<mask token>\n\n\nclass WithinDatagram(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n\n def process(self, line):\n if 'startDatagram' in line:\n self.current_datagram = Datagram()\n elif 'endDatagram' in line:\n self.Trace.callable(self.current_datagram.content)\n elif 'startSample' in line:\n self.Trace.currentState = self.Trace.within_flowsample\n self.Trace.within_flowsample.re_init(FlowSample(), self.\n current_datagram)\n else:\n process_line_and_store_in_obj(line, self.current_datagram)\n\n\nclass WithinFlowsample(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n self.current_flowsample = None\n\n def re_init(self, flowsampleObj, datagramObj):\n self.current_datagram = datagramObj\n self.current_flowsample = flowsampleObj\n\n def process(self, line):\n if 'endSample' in line:\n self.current_datagram['flowSamples'][self.current_flowsample.id\n ] = self.current_flowsample.content\n self.Trace.currentState = self.Trace.within_datagram\n else:\n process_line_and_store_in_obj(line, self.current_flowsample)\n\n\nclass Trace(object):\n\n def __init__(self, callable=None):\n self.within_datagram = WithinDatagram(self)\n self.within_flowsample = WithinFlowsample(self)\n self.currentState = self.within_datagram\n self.callable = callable\n\n def process(self, line):\n self.currentState.process(line)\n",
"step-4": "<mask token>\n\n\nclass Datagram(Container):\n datagram_counter = itertools.count().next\n\n def __init__(self):\n super(Datagram, self).__init__(Datagram.datagram_counter())\n self['flowSamples'] = {}\n\n\nclass FlowSample(Container):\n flowsample_counter = itertools.count().next\n\n def __init__(self):\n super(FlowSample, self).__init__(FlowSample.flowsample_counter())\n\n\n<mask token>\n\n\nclass WithinDatagram(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n\n def process(self, line):\n if 'startDatagram' in line:\n self.current_datagram = Datagram()\n elif 'endDatagram' in line:\n self.Trace.callable(self.current_datagram.content)\n elif 'startSample' in line:\n self.Trace.currentState = self.Trace.within_flowsample\n self.Trace.within_flowsample.re_init(FlowSample(), self.\n current_datagram)\n else:\n process_line_and_store_in_obj(line, self.current_datagram)\n\n\nclass WithinFlowsample(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n self.current_flowsample = None\n\n def re_init(self, flowsampleObj, datagramObj):\n self.current_datagram = datagramObj\n self.current_flowsample = flowsampleObj\n\n def process(self, line):\n if 'endSample' in line:\n self.current_datagram['flowSamples'][self.current_flowsample.id\n ] = self.current_flowsample.content\n self.Trace.currentState = self.Trace.within_datagram\n else:\n process_line_and_store_in_obj(line, self.current_flowsample)\n\n\nclass Trace(object):\n\n def __init__(self, callable=None):\n self.within_datagram = WithinDatagram(self)\n self.within_flowsample = WithinFlowsample(self)\n self.currentState = self.within_datagram\n self.callable = callable\n\n def process(self, line):\n self.currentState.process(line)\n",
"step-5": "#@@---------------------------@@\n# Author: Chamil Jayasundara\n# Date: 5/18/17\n# Description: Extract SFLOW data from slow logs\n#@@---------------------------@@\n\nimport itertools\nfrom collections import defaultdict\n\n\"\"\"Flow Sample and Datagram Objects\"\"\"\n\n\nclass Container(object):\n\n def __init__(self, id):\n self.id = id\n self.content = defaultdict(int)\n\n def __getitem__(self, key):\n return self.content[key]\n\n def __setitem__(self, key, value):\n self.content[key] = value\n\n\nclass Datagram(Container):\n\n datagram_counter = itertools.count().next\n\n def __init__(self):\n super(Datagram, self).__init__(Datagram.datagram_counter())\n self['flowSamples'] = {}\n\n\nclass FlowSample(Container):\n\n flowsample_counter = itertools.count().next\n\n def __init__(self):\n super(FlowSample, self).__init__(FlowSample.flowsample_counter())\n\n\n#############################\n\"\"\"Data Extraction\"\"\"\n\ndef process_line_and_store_in_obj(line, obj):\n partition = line.partition(\" \")\n obj[partition[0]] = partition[2].rstrip()\n\n\n###State Machine Classses\nclass WithinDatagram(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n\n def process(self,line):\n if \"startDatagram\" in line:\n self.current_datagram = Datagram()\n\n elif \"endDatagram\" in line:\n self.Trace.callable(self.current_datagram.content)\n\n elif \"startSample\" in line:\n self.Trace.currentState = self.Trace.within_flowsample\n self.Trace.within_flowsample.re_init(FlowSample(), self.current_datagram)\n\n else:\n process_line_and_store_in_obj(line, self.current_datagram)\n\n\nclass WithinFlowsample(object):\n\n def __init__(self, traceObj):\n self.Trace = traceObj\n self.current_datagram = None\n self.current_flowsample = None\n\n def re_init(self, flowsampleObj, datagramObj):\n self.current_datagram = datagramObj\n self.current_flowsample = flowsampleObj\n\n def process(self,line):\n if \"endSample\" in line:\n self.current_datagram['flowSamples'][self.current_flowsample.id] = self.current_flowsample.content\n self.Trace.currentState = self.Trace.within_datagram\n\n else:\n process_line_and_store_in_obj(line, self.current_flowsample)\n\n\nclass Trace(object):\n\n def __init__(self, callable=None):\n self.within_datagram = WithinDatagram(self)\n self.within_flowsample = WithinFlowsample(self)\n self.currentState = self.within_datagram\n self.callable = callable\n\n def process(self, line):\n self.currentState.process(line)\n\n",
"step-ids": [
9,
12,
14,
16,
23
]
}
|
[
9,
12,
14,
16,
23
] |
# coding=utf-8
from lxml import etree
import frontik.handler
class Page(frontik.handler.PageHandler):
def get_page(self):
self.set_xsl(self.get_argument('template', 'simple.xsl'))
self.doc.put(etree.Element('ok'))
if self.get_argument('raise', 'false') == 'true':
raise frontik.handler.HTTPError(400, xml=etree.Element('not-ok'))
|
normal
|
{
"blob_id": "6f331eedcdaceaded142c3ffe9400aaa817613c1",
"index": 5795,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Page(frontik.handler.PageHandler):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Page(frontik.handler.PageHandler):\n\n def get_page(self):\n self.set_xsl(self.get_argument('template', 'simple.xsl'))\n self.doc.put(etree.Element('ok'))\n if self.get_argument('raise', 'false') == 'true':\n raise frontik.handler.HTTPError(400, xml=etree.Element('not-ok'))\n",
"step-4": "from lxml import etree\nimport frontik.handler\n\n\nclass Page(frontik.handler.PageHandler):\n\n def get_page(self):\n self.set_xsl(self.get_argument('template', 'simple.xsl'))\n self.doc.put(etree.Element('ok'))\n if self.get_argument('raise', 'false') == 'true':\n raise frontik.handler.HTTPError(400, xml=etree.Element('not-ok'))\n",
"step-5": "# coding=utf-8\n\nfrom lxml import etree\n\nimport frontik.handler\n\n\nclass Page(frontik.handler.PageHandler):\n def get_page(self):\n self.set_xsl(self.get_argument('template', 'simple.xsl'))\n self.doc.put(etree.Element('ok'))\n\n if self.get_argument('raise', 'false') == 'true':\n raise frontik.handler.HTTPError(400, xml=etree.Element('not-ok'))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import graphics
from graphics import *
class Renderer():
def __init__(self, engine, width=700, height=600):
self.width = width
self.height = height
self.engine = engine
self.win = GraphWin("Game Board", width, height)
self.win.setBackground("blue")
def update(self):
self.win.update()
def get_window(self):
return(self.win)
def get_width(self):
return self.width
def draw_board(self):
for i in range(0, 6): #Determines size of terrain
horLines = Line(Point(0, i*self.height/6),Point(self.width, i*self.height/6))
horLines.setOutline('black')
horLines.draw(self.win)
for j in range(0, 7):
verLines = Line(Point(j*self.width/7, 0),Point(j*self.width/7, self.height))
verLines.setOutline('black')
verLines.draw(self.win)
for y in range(0,6):
for x in range(0,7):
slot = Circle(Point(x*self.width/7+50,y*self.height/6+50),37.5)
slot.setFill("white")
slot.draw(self.win)
def update_pieces(self,x,y,color):
board = self.engine.get_board()
pointY = y*self.height/6
pointX = x*self.width/7
piece = Circle(Point(pointX+50,pointY+50),37.5)
if color == 'r':
piece.setFill("red")
else:
piece.setFill("black")
piece.draw(self.win)
def end(self):
self.get_window().close()
class Menu(): #CHANGE TO SELF. WIDTH AND HIEGHT
def __init__(self,window):
self.window = window
skyBlue = color_rgb(135,206,250)
royalBlue = color_rgb(65,105,225)
self.menu = Rectangle(Point(.2*500,.15*500),Point(.8*500,.8*500))
self.menu.setFill(skyBlue)
self.menu.setOutline(skyBlue)
self.save = Rectangle(Point(.25*500,.2*500),Point(.75*500,.35*500))
self.save.setOutline(royalBlue)
self.save.setFill(royalBlue)
self.saveTxt = Text(Point(.50*500,.275*500), "SAVE")
self.saveTxt.setSize(30)
self.saveTxt.setFace("helvetica")
self.saveTxt.setStyle("bold")
self.load = Rectangle(Point(.25*500,.4*500),Point(.75*500,.55*500))
self.load.setOutline(royalBlue)
self.load.setFill(royalBlue)
self.loadTxt = Text(Point(.50*500,.475*500), "LOAD")
self.loadTxt.setSize(30)
self.loadTxt.setFace("helvetica")
self.loadTxt.setStyle("bold")
self.quit = Rectangle(Point(.25*500,.6*500),Point(.75*500,.75*500))
self.quit.setOutline(royalBlue)
self.quit.setFill(royalBlue)
self.quitTxt = Text(Point(.50*500,.675*500), "QUIT")
self.quitTxt.setSize(30)
self.quitTxt.setFace("helvetica")
self.quitTxt.setStyle("bold")
def openMenu(self):
self.menu.draw(self.window)
self.save.draw(self.window)
self.saveTxt.draw(self.window)
self.load.draw(self.window)
self.loadTxt.draw(self.window)
self.quit.draw(self.window)
self.quitTxt.draw(self.window)
def closeMenu(self):
self.menu.undraw()
self.save.undraw()
self.saveTxt.undraw()
self.load.undraw()
self.loadTxt.undraw()
self.quit.undraw()
self.quitTxt.undraw()
|
normal
|
{
"blob_id": "85a3682f144f02aa412d45c901f76c65de2e816d",
"index": 5599,
"step-1": "<mask token>\n\n\nclass Renderer:\n <mask token>\n <mask token>\n <mask token>\n\n def get_width(self):\n return self.width\n\n def draw_board(self):\n for i in range(0, 6):\n horLines = Line(Point(0, i * self.height / 6), Point(self.width,\n i * self.height / 6))\n horLines.setOutline('black')\n horLines.draw(self.win)\n for j in range(0, 7):\n verLines = Line(Point(j * self.width / 7, 0), Point(j * self.\n width / 7, self.height))\n verLines.setOutline('black')\n verLines.draw(self.win)\n for y in range(0, 6):\n for x in range(0, 7):\n slot = Circle(Point(x * self.width / 7 + 50, y * self.\n height / 6 + 50), 37.5)\n slot.setFill('white')\n slot.draw(self.win)\n <mask token>\n\n def end(self):\n self.get_window().close()\n\n\nclass Menu:\n\n def __init__(self, window):\n self.window = window\n skyBlue = color_rgb(135, 206, 250)\n royalBlue = color_rgb(65, 105, 225)\n self.menu = Rectangle(Point(0.2 * 500, 0.15 * 500), Point(0.8 * 500,\n 0.8 * 500))\n self.menu.setFill(skyBlue)\n self.menu.setOutline(skyBlue)\n self.save = Rectangle(Point(0.25 * 500, 0.2 * 500), Point(0.75 * \n 500, 0.35 * 500))\n self.save.setOutline(royalBlue)\n self.save.setFill(royalBlue)\n self.saveTxt = Text(Point(0.5 * 500, 0.275 * 500), 'SAVE')\n self.saveTxt.setSize(30)\n self.saveTxt.setFace('helvetica')\n self.saveTxt.setStyle('bold')\n self.load = Rectangle(Point(0.25 * 500, 0.4 * 500), Point(0.75 * \n 500, 0.55 * 500))\n self.load.setOutline(royalBlue)\n self.load.setFill(royalBlue)\n self.loadTxt = Text(Point(0.5 * 500, 0.475 * 500), 'LOAD')\n self.loadTxt.setSize(30)\n self.loadTxt.setFace('helvetica')\n self.loadTxt.setStyle('bold')\n self.quit = Rectangle(Point(0.25 * 500, 0.6 * 500), Point(0.75 * \n 500, 0.75 * 500))\n self.quit.setOutline(royalBlue)\n self.quit.setFill(royalBlue)\n self.quitTxt = Text(Point(0.5 * 500, 0.675 * 500), 'QUIT')\n self.quitTxt.setSize(30)\n self.quitTxt.setFace('helvetica')\n self.quitTxt.setStyle('bold')\n\n def openMenu(self):\n self.menu.draw(self.window)\n self.save.draw(self.window)\n self.saveTxt.draw(self.window)\n self.load.draw(self.window)\n self.loadTxt.draw(self.window)\n self.quit.draw(self.window)\n self.quitTxt.draw(self.window)\n\n def closeMenu(self):\n self.menu.undraw()\n self.save.undraw()\n self.saveTxt.undraw()\n self.load.undraw()\n self.loadTxt.undraw()\n self.quit.undraw()\n self.quitTxt.undraw()\n",
"step-2": "<mask token>\n\n\nclass Renderer:\n <mask token>\n <mask token>\n <mask token>\n\n def get_width(self):\n return self.width\n\n def draw_board(self):\n for i in range(0, 6):\n horLines = Line(Point(0, i * self.height / 6), Point(self.width,\n i * self.height / 6))\n horLines.setOutline('black')\n horLines.draw(self.win)\n for j in range(0, 7):\n verLines = Line(Point(j * self.width / 7, 0), Point(j * self.\n width / 7, self.height))\n verLines.setOutline('black')\n verLines.draw(self.win)\n for y in range(0, 6):\n for x in range(0, 7):\n slot = Circle(Point(x * self.width / 7 + 50, y * self.\n height / 6 + 50), 37.5)\n slot.setFill('white')\n slot.draw(self.win)\n\n def update_pieces(self, x, y, color):\n board = self.engine.get_board()\n pointY = y * self.height / 6\n pointX = x * self.width / 7\n piece = Circle(Point(pointX + 50, pointY + 50), 37.5)\n if color == 'r':\n piece.setFill('red')\n else:\n piece.setFill('black')\n piece.draw(self.win)\n\n def end(self):\n self.get_window().close()\n\n\nclass Menu:\n\n def __init__(self, window):\n self.window = window\n skyBlue = color_rgb(135, 206, 250)\n royalBlue = color_rgb(65, 105, 225)\n self.menu = Rectangle(Point(0.2 * 500, 0.15 * 500), Point(0.8 * 500,\n 0.8 * 500))\n self.menu.setFill(skyBlue)\n self.menu.setOutline(skyBlue)\n self.save = Rectangle(Point(0.25 * 500, 0.2 * 500), Point(0.75 * \n 500, 0.35 * 500))\n self.save.setOutline(royalBlue)\n self.save.setFill(royalBlue)\n self.saveTxt = Text(Point(0.5 * 500, 0.275 * 500), 'SAVE')\n self.saveTxt.setSize(30)\n self.saveTxt.setFace('helvetica')\n self.saveTxt.setStyle('bold')\n self.load = Rectangle(Point(0.25 * 500, 0.4 * 500), Point(0.75 * \n 500, 0.55 * 500))\n self.load.setOutline(royalBlue)\n self.load.setFill(royalBlue)\n self.loadTxt = Text(Point(0.5 * 500, 0.475 * 500), 'LOAD')\n self.loadTxt.setSize(30)\n self.loadTxt.setFace('helvetica')\n self.loadTxt.setStyle('bold')\n self.quit = Rectangle(Point(0.25 * 500, 0.6 * 500), Point(0.75 * \n 500, 0.75 * 500))\n self.quit.setOutline(royalBlue)\n self.quit.setFill(royalBlue)\n self.quitTxt = Text(Point(0.5 * 500, 0.675 * 500), 'QUIT')\n self.quitTxt.setSize(30)\n self.quitTxt.setFace('helvetica')\n self.quitTxt.setStyle('bold')\n\n def openMenu(self):\n self.menu.draw(self.window)\n self.save.draw(self.window)\n self.saveTxt.draw(self.window)\n self.load.draw(self.window)\n self.loadTxt.draw(self.window)\n self.quit.draw(self.window)\n self.quitTxt.draw(self.window)\n\n def closeMenu(self):\n self.menu.undraw()\n self.save.undraw()\n self.saveTxt.undraw()\n self.load.undraw()\n self.loadTxt.undraw()\n self.quit.undraw()\n self.quitTxt.undraw()\n",
"step-3": "<mask token>\n\n\nclass Renderer:\n\n def __init__(self, engine, width=700, height=600):\n self.width = width\n self.height = height\n self.engine = engine\n self.win = GraphWin('Game Board', width, height)\n self.win.setBackground('blue')\n <mask token>\n <mask token>\n\n def get_width(self):\n return self.width\n\n def draw_board(self):\n for i in range(0, 6):\n horLines = Line(Point(0, i * self.height / 6), Point(self.width,\n i * self.height / 6))\n horLines.setOutline('black')\n horLines.draw(self.win)\n for j in range(0, 7):\n verLines = Line(Point(j * self.width / 7, 0), Point(j * self.\n width / 7, self.height))\n verLines.setOutline('black')\n verLines.draw(self.win)\n for y in range(0, 6):\n for x in range(0, 7):\n slot = Circle(Point(x * self.width / 7 + 50, y * self.\n height / 6 + 50), 37.5)\n slot.setFill('white')\n slot.draw(self.win)\n\n def update_pieces(self, x, y, color):\n board = self.engine.get_board()\n pointY = y * self.height / 6\n pointX = x * self.width / 7\n piece = Circle(Point(pointX + 50, pointY + 50), 37.5)\n if color == 'r':\n piece.setFill('red')\n else:\n piece.setFill('black')\n piece.draw(self.win)\n\n def end(self):\n self.get_window().close()\n\n\nclass Menu:\n\n def __init__(self, window):\n self.window = window\n skyBlue = color_rgb(135, 206, 250)\n royalBlue = color_rgb(65, 105, 225)\n self.menu = Rectangle(Point(0.2 * 500, 0.15 * 500), Point(0.8 * 500,\n 0.8 * 500))\n self.menu.setFill(skyBlue)\n self.menu.setOutline(skyBlue)\n self.save = Rectangle(Point(0.25 * 500, 0.2 * 500), Point(0.75 * \n 500, 0.35 * 500))\n self.save.setOutline(royalBlue)\n self.save.setFill(royalBlue)\n self.saveTxt = Text(Point(0.5 * 500, 0.275 * 500), 'SAVE')\n self.saveTxt.setSize(30)\n self.saveTxt.setFace('helvetica')\n self.saveTxt.setStyle('bold')\n self.load = Rectangle(Point(0.25 * 500, 0.4 * 500), Point(0.75 * \n 500, 0.55 * 500))\n self.load.setOutline(royalBlue)\n self.load.setFill(royalBlue)\n self.loadTxt = Text(Point(0.5 * 500, 0.475 * 500), 'LOAD')\n self.loadTxt.setSize(30)\n self.loadTxt.setFace('helvetica')\n self.loadTxt.setStyle('bold')\n self.quit = Rectangle(Point(0.25 * 500, 0.6 * 500), Point(0.75 * \n 500, 0.75 * 500))\n self.quit.setOutline(royalBlue)\n self.quit.setFill(royalBlue)\n self.quitTxt = Text(Point(0.5 * 500, 0.675 * 500), 'QUIT')\n self.quitTxt.setSize(30)\n self.quitTxt.setFace('helvetica')\n self.quitTxt.setStyle('bold')\n\n def openMenu(self):\n self.menu.draw(self.window)\n self.save.draw(self.window)\n self.saveTxt.draw(self.window)\n self.load.draw(self.window)\n self.loadTxt.draw(self.window)\n self.quit.draw(self.window)\n self.quitTxt.draw(self.window)\n\n def closeMenu(self):\n self.menu.undraw()\n self.save.undraw()\n self.saveTxt.undraw()\n self.load.undraw()\n self.loadTxt.undraw()\n self.quit.undraw()\n self.quitTxt.undraw()\n",
"step-4": "<mask token>\n\n\nclass Renderer:\n\n def __init__(self, engine, width=700, height=600):\n self.width = width\n self.height = height\n self.engine = engine\n self.win = GraphWin('Game Board', width, height)\n self.win.setBackground('blue')\n\n def update(self):\n self.win.update()\n\n def get_window(self):\n return self.win\n\n def get_width(self):\n return self.width\n\n def draw_board(self):\n for i in range(0, 6):\n horLines = Line(Point(0, i * self.height / 6), Point(self.width,\n i * self.height / 6))\n horLines.setOutline('black')\n horLines.draw(self.win)\n for j in range(0, 7):\n verLines = Line(Point(j * self.width / 7, 0), Point(j * self.\n width / 7, self.height))\n verLines.setOutline('black')\n verLines.draw(self.win)\n for y in range(0, 6):\n for x in range(0, 7):\n slot = Circle(Point(x * self.width / 7 + 50, y * self.\n height / 6 + 50), 37.5)\n slot.setFill('white')\n slot.draw(self.win)\n\n def update_pieces(self, x, y, color):\n board = self.engine.get_board()\n pointY = y * self.height / 6\n pointX = x * self.width / 7\n piece = Circle(Point(pointX + 50, pointY + 50), 37.5)\n if color == 'r':\n piece.setFill('red')\n else:\n piece.setFill('black')\n piece.draw(self.win)\n\n def end(self):\n self.get_window().close()\n\n\nclass Menu:\n\n def __init__(self, window):\n self.window = window\n skyBlue = color_rgb(135, 206, 250)\n royalBlue = color_rgb(65, 105, 225)\n self.menu = Rectangle(Point(0.2 * 500, 0.15 * 500), Point(0.8 * 500,\n 0.8 * 500))\n self.menu.setFill(skyBlue)\n self.menu.setOutline(skyBlue)\n self.save = Rectangle(Point(0.25 * 500, 0.2 * 500), Point(0.75 * \n 500, 0.35 * 500))\n self.save.setOutline(royalBlue)\n self.save.setFill(royalBlue)\n self.saveTxt = Text(Point(0.5 * 500, 0.275 * 500), 'SAVE')\n self.saveTxt.setSize(30)\n self.saveTxt.setFace('helvetica')\n self.saveTxt.setStyle('bold')\n self.load = Rectangle(Point(0.25 * 500, 0.4 * 500), Point(0.75 * \n 500, 0.55 * 500))\n self.load.setOutline(royalBlue)\n self.load.setFill(royalBlue)\n self.loadTxt = Text(Point(0.5 * 500, 0.475 * 500), 'LOAD')\n self.loadTxt.setSize(30)\n self.loadTxt.setFace('helvetica')\n self.loadTxt.setStyle('bold')\n self.quit = Rectangle(Point(0.25 * 500, 0.6 * 500), Point(0.75 * \n 500, 0.75 * 500))\n self.quit.setOutline(royalBlue)\n self.quit.setFill(royalBlue)\n self.quitTxt = Text(Point(0.5 * 500, 0.675 * 500), 'QUIT')\n self.quitTxt.setSize(30)\n self.quitTxt.setFace('helvetica')\n self.quitTxt.setStyle('bold')\n\n def openMenu(self):\n self.menu.draw(self.window)\n self.save.draw(self.window)\n self.saveTxt.draw(self.window)\n self.load.draw(self.window)\n self.loadTxt.draw(self.window)\n self.quit.draw(self.window)\n self.quitTxt.draw(self.window)\n\n def closeMenu(self):\n self.menu.undraw()\n self.save.undraw()\n self.saveTxt.undraw()\n self.load.undraw()\n self.loadTxt.undraw()\n self.quit.undraw()\n self.quitTxt.undraw()\n",
"step-5": "import graphics \nfrom graphics import *\n\nclass Renderer():\n def __init__(self, engine, width=700, height=600):\n self.width = width\n self.height = height\n self.engine = engine\n self.win = GraphWin(\"Game Board\", width, height)\n self.win.setBackground(\"blue\")\n\n def update(self):\n self.win.update()\n\n\n def get_window(self):\n return(self.win)\n\n\n def get_width(self):\n return self.width\n\n\n def draw_board(self):\n for i in range(0, 6): #Determines size of terrain\n horLines = Line(Point(0, i*self.height/6),Point(self.width, i*self.height/6))\n horLines.setOutline('black')\n horLines.draw(self.win)\n\n for j in range(0, 7):\n verLines = Line(Point(j*self.width/7, 0),Point(j*self.width/7, self.height))\n verLines.setOutline('black')\n verLines.draw(self.win)\n\n for y in range(0,6):\n for x in range(0,7):\n slot = Circle(Point(x*self.width/7+50,y*self.height/6+50),37.5)\n slot.setFill(\"white\")\n slot.draw(self.win)\n\n def update_pieces(self,x,y,color):\n board = self.engine.get_board()\n pointY = y*self.height/6\n pointX = x*self.width/7\n piece = Circle(Point(pointX+50,pointY+50),37.5)\n if color == 'r':\n piece.setFill(\"red\")\n else:\n piece.setFill(\"black\")\n piece.draw(self.win)\n\n\n def end(self):\n self.get_window().close()\n\nclass Menu(): #CHANGE TO SELF. WIDTH AND HIEGHT\n def __init__(self,window):\n self.window = window\n\n skyBlue = color_rgb(135,206,250)\n royalBlue = color_rgb(65,105,225)\n\n self.menu = Rectangle(Point(.2*500,.15*500),Point(.8*500,.8*500))\n self.menu.setFill(skyBlue)\n self.menu.setOutline(skyBlue)\n\n self.save = Rectangle(Point(.25*500,.2*500),Point(.75*500,.35*500))\n self.save.setOutline(royalBlue)\n self.save.setFill(royalBlue)\n\n self.saveTxt = Text(Point(.50*500,.275*500), \"SAVE\")\n self.saveTxt.setSize(30)\n self.saveTxt.setFace(\"helvetica\")\n self.saveTxt.setStyle(\"bold\")\n\n self.load = Rectangle(Point(.25*500,.4*500),Point(.75*500,.55*500))\n self.load.setOutline(royalBlue)\n self.load.setFill(royalBlue)\n\n self.loadTxt = Text(Point(.50*500,.475*500), \"LOAD\")\n self.loadTxt.setSize(30)\n self.loadTxt.setFace(\"helvetica\")\n self.loadTxt.setStyle(\"bold\")\n\n self.quit = Rectangle(Point(.25*500,.6*500),Point(.75*500,.75*500))\n self.quit.setOutline(royalBlue)\n self.quit.setFill(royalBlue)\n\n self.quitTxt = Text(Point(.50*500,.675*500), \"QUIT\")\n self.quitTxt.setSize(30)\n self.quitTxt.setFace(\"helvetica\")\n self.quitTxt.setStyle(\"bold\")\n\n def openMenu(self):\n self.menu.draw(self.window)\n self.save.draw(self.window)\n self.saveTxt.draw(self.window)\n self.load.draw(self.window)\n self.loadTxt.draw(self.window)\n self.quit.draw(self.window)\n self.quitTxt.draw(self.window)\n\n def closeMenu(self):\n self.menu.undraw()\n self.save.undraw()\n self.saveTxt.undraw()\n self.load.undraw()\n self.loadTxt.undraw()\n self.quit.undraw()\n self.quitTxt.undraw()",
"step-ids": [
8,
9,
10,
12,
14
]
}
|
[
8,
9,
10,
12,
14
] |
print("gist test file4")
|
normal
|
{
"blob_id": "ec4725b5b60d10e86b29aab3723917ace5cf52f6",
"index": 8452,
"step-1": "<mask token>\n",
"step-2": "print('gist test file4')\n",
"step-3": "print(\"gist test file4\")",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
class Person:
def __init__(self,mood):
self.mood=mood;
def laugh(self):
self.mood.laugh()
def cry(self):
self.mood.cry()
def setMood(self, mood):
self.mood=mood
class Mood:
def laugh(self):
pass
def cry(self):
pass
class HappyMood(Mood):
def laugh(self):
print 'Ha ha ha!'
class SadMood(Mood):
def cry(self):
print 'Sniff sniff'
p=Person(HappyMood())
p.laugh()
p.cry()
p.setMood(SadMood())
p.laugh()
p.cry()
|
normal
|
{
"blob_id": "4deb691545887104b3fb70dd2be52138088ba1e8",
"index": 1751,
"step-1": "class Person:\n\tdef __init__(self,mood):\n\t\tself.mood=mood;\n\n\tdef laugh(self):\n\t\tself.mood.laugh()\n\n\tdef cry(self):\n\t\tself.mood.cry()\n\t\n\tdef setMood(self, mood):\n\t\tself.mood=mood\n\nclass Mood:\n\tdef laugh(self):\n\t\tpass\n\tdef cry(self):\n\t\tpass\n\nclass HappyMood(Mood):\n\tdef laugh(self):\n\t\tprint 'Ha ha ha!'\n\nclass SadMood(Mood):\n\tdef cry(self):\n\t\tprint 'Sniff sniff'\n\np=Person(HappyMood())\np.laugh()\np.cry()\n\np.setMood(SadMood())\np.laugh()\np.cry()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
'''
Module for handling configurable portions of tools
'''
from json import load
default_file_loc = 'config.json'
config = None
def loadConfiguration(fileloc):
'''Loads configuration from file location'''
global config
with open(fileloc, 'r') as file_:
conf = load(file_)
if config is None:
config = conf
else:
config.update(conf)
def get(key):
'''Gets the configuration value for key '''
return config[key]
loadConfiguration(default_file_loc)
|
normal
|
{
"blob_id": "5261ae90a67e2df8dd1c679a8046ee3e0cbc6221",
"index": 3264,
"step-1": "<mask token>\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\nloadConfiguration(default_file_loc)\n",
"step-3": "<mask token>\ndefault_file_loc = 'config.json'\nconfig = None\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\nloadConfiguration(default_file_loc)\n",
"step-4": "<mask token>\nfrom json import load\ndefault_file_loc = 'config.json'\nconfig = None\n\n\ndef loadConfiguration(fileloc):\n \"\"\"Loads configuration from file location\"\"\"\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\n\ndef get(key):\n \"\"\"Gets the configuration value for key \"\"\"\n return config[key]\n\n\nloadConfiguration(default_file_loc)\n",
"step-5": "'''\nModule for handling configurable portions of tools\n'''\n\nfrom json import load\n\ndefault_file_loc = 'config.json'\nconfig = None\n\ndef loadConfiguration(fileloc):\n '''Loads configuration from file location'''\n global config\n with open(fileloc, 'r') as file_:\n conf = load(file_)\n if config is None:\n config = conf\n else:\n config.update(conf)\n\ndef get(key):\n '''Gets the configuration value for key '''\n return config[key]\n\nloadConfiguration(default_file_loc)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from __future__ import division, print_function, absolute_import
"""
The dataset is stored in a CSV file, so we can use the TFLearn load_csv() function to
load the data from the CSV file into a python list.
We specify the 'target_column' argument to indicate that our labels (survived or not)
are located in the first column (id: 0). The function will return a tuple: (data, labels).
"""
import numpy as np
import tflearn
#DownLoad the Titanic dataset
from tflearn.datasets import titanic
titanic.download_dataset('titanic_dataset.csv')
#loadCSVfile,indicate that the first column represent labels
from tflearn.data_utils import load_csv
data, labels = load_csv('titanic_dataset.csv',target_column=0,
categorical_labels=True,n_classes=2)
'''
Preprocessing Data
Data are given 'as is' and need some preprocessing to be ready for use in our deep neural network classifier.
First, we will discard the fields that are not likely to help in our analysis.
For example, we make the assumption that the 'name' field will not be very useful in our task,
since a passenger's name and his or her chance of surviving are probably not correlated.
With such thinking, we can go ahead and discard the 'name' and 'ticket' fields.
Then, we need to convert all our data to numerical values,
because a neural network model can only perform operations over numbers.
However, our dataset contains some non-numerical values, such as 'name' and 'sex'. Because 'name' is discarded,
we just need to handle the 'sex' field. In this simple case, we will just assign '0' to males and '1' to females.
example:
survived pclass name sex age sibsp parch ticket fare
1 1 Aubart, Mme. Leontine Pauline female 24 0 0 PC 17477 69.3000
'''
# Here is the preprocessing function:
#Preprocessing function
def preprocess(passengers,columns_to_delete):
#Sort by descending is and delete column
for column_to_delete in sorted(columns_to_delete,reverse = True):
[passenger.pop(column_to_delete) for passenger in passengers]
# print(type(passengers[0]))
for i in range(len(passengers)):
# Converting 'sex' field to float (id is 1 after removing labels column)
passengers[i][1] = 1. if passengers[i][1] == 'female' else 0.
print(np.array(passengers,dtype=np.float32))
return np.array(passengers,dtype=np.float32)
# Ignore 'name' and 'ticket' columns (id 1 & 6 of data array)
to_ignore = [1,6]
#Preprocess data
data = preprocess(data,to_ignore)
'''
Build a Deep Neural Network
We are building a 3-layer neural network using TFLearn. First, we need to specify the shape of our input data.
In our case, each sample has a total of 6 features, and we will process samples per batch to save memory.
So our data input shape is [None, 6] ('None' stands for an unknown dimension, so we can change the total
number of samples that are processed in a batch).
'''
# Build neural network
net = tflearn.input_data(shape=[None,6])
net = tflearn.fully_connected(net,32)
net = tflearn.fully_connected(net,32)
net = tflearn.fully_connected(net,2,activation='softmax')
net =tflearn.regression(net)
'''
Training
TFLearn provides a model wrapper ('DNN') that automatically performs neural network classifier tasks,
such as training, prediction, save/restore, and more. We will run it for 10 epochs
(i.e., the network will see all data 10 times) with a batch size of 16.
'''
#Define model
model = tflearn.DNN(net)
# Start training (apply gradient descent algorithm)
model.fit(data, labels, n_epoch=10, batch_size=16, show_metric=True)
'''
Try the Model
It's time to try out our model.
For fun, let's take Titanic movie protagonists
(DiCaprio and Winslet) and calculate their chance of surviving (class 1).
'''
# Let's create some data for DiCaprio and Winslet
dicaprio = [3, 'Jack Dawson', 'male', 19, 0, 0, 'N/A', 5.0000]
winslet = [1, 'Rose DeWitt Bukater', 'female', 17, 1, 2, 'N/A', 100.0000]
# Preprocess data
dicaprio, winslet = preprocess([dicaprio, winslet], to_ignore)
# Predict surviving chances (class 1 results)
pred = model.predict([dicaprio, winslet])
print("DiCaprio Surviving Rate:", pred[0][1])
print("Winslet Surviving Rate:", pred[1][1])
|
normal
|
{
"blob_id": "87e9c1d264523d02b287dedb44472fc08b488908",
"index": 9630,
"step-1": "<mask token>\n\n\ndef preprocess(passengers, columns_to_delete):\n for column_to_delete in sorted(columns_to_delete, reverse=True):\n [passenger.pop(column_to_delete) for passenger in passengers]\n for i in range(len(passengers)):\n passengers[i][1] = 1.0 if passengers[i][1] == 'female' else 0.0\n print(np.array(passengers, dtype=np.float32))\n return np.array(passengers, dtype=np.float32)\n\n\n<mask token>\n",
"step-2": "<mask token>\ntitanic.download_dataset('titanic_dataset.csv')\n<mask token>\n\n\ndef preprocess(passengers, columns_to_delete):\n for column_to_delete in sorted(columns_to_delete, reverse=True):\n [passenger.pop(column_to_delete) for passenger in passengers]\n for i in range(len(passengers)):\n passengers[i][1] = 1.0 if passengers[i][1] == 'female' else 0.0\n print(np.array(passengers, dtype=np.float32))\n return np.array(passengers, dtype=np.float32)\n\n\n<mask token>\nmodel.fit(data, labels, n_epoch=10, batch_size=16, show_metric=True)\n<mask token>\nprint('DiCaprio Surviving Rate:', pred[0][1])\nprint('Winslet Surviving Rate:', pred[1][1])\n",
"step-3": "<mask token>\ntitanic.download_dataset('titanic_dataset.csv')\n<mask token>\ndata, labels = load_csv('titanic_dataset.csv', target_column=0,\n categorical_labels=True, n_classes=2)\n<mask token>\n\n\ndef preprocess(passengers, columns_to_delete):\n for column_to_delete in sorted(columns_to_delete, reverse=True):\n [passenger.pop(column_to_delete) for passenger in passengers]\n for i in range(len(passengers)):\n passengers[i][1] = 1.0 if passengers[i][1] == 'female' else 0.0\n print(np.array(passengers, dtype=np.float32))\n return np.array(passengers, dtype=np.float32)\n\n\nto_ignore = [1, 6]\ndata = preprocess(data, to_ignore)\n<mask token>\nnet = tflearn.input_data(shape=[None, 6])\nnet = tflearn.fully_connected(net, 32)\nnet = tflearn.fully_connected(net, 32)\nnet = tflearn.fully_connected(net, 2, activation='softmax')\nnet = tflearn.regression(net)\n<mask token>\nmodel = tflearn.DNN(net)\nmodel.fit(data, labels, n_epoch=10, batch_size=16, show_metric=True)\n<mask token>\ndicaprio = [3, 'Jack Dawson', 'male', 19, 0, 0, 'N/A', 5.0]\nwinslet = [1, 'Rose DeWitt Bukater', 'female', 17, 1, 2, 'N/A', 100.0]\ndicaprio, winslet = preprocess([dicaprio, winslet], to_ignore)\npred = model.predict([dicaprio, winslet])\nprint('DiCaprio Surviving Rate:', pred[0][1])\nprint('Winslet Surviving Rate:', pred[1][1])\n",
"step-4": "from __future__ import division, print_function, absolute_import\n<mask token>\nimport numpy as np\nimport tflearn\nfrom tflearn.datasets import titanic\ntitanic.download_dataset('titanic_dataset.csv')\nfrom tflearn.data_utils import load_csv\ndata, labels = load_csv('titanic_dataset.csv', target_column=0,\n categorical_labels=True, n_classes=2)\n<mask token>\n\n\ndef preprocess(passengers, columns_to_delete):\n for column_to_delete in sorted(columns_to_delete, reverse=True):\n [passenger.pop(column_to_delete) for passenger in passengers]\n for i in range(len(passengers)):\n passengers[i][1] = 1.0 if passengers[i][1] == 'female' else 0.0\n print(np.array(passengers, dtype=np.float32))\n return np.array(passengers, dtype=np.float32)\n\n\nto_ignore = [1, 6]\ndata = preprocess(data, to_ignore)\n<mask token>\nnet = tflearn.input_data(shape=[None, 6])\nnet = tflearn.fully_connected(net, 32)\nnet = tflearn.fully_connected(net, 32)\nnet = tflearn.fully_connected(net, 2, activation='softmax')\nnet = tflearn.regression(net)\n<mask token>\nmodel = tflearn.DNN(net)\nmodel.fit(data, labels, n_epoch=10, batch_size=16, show_metric=True)\n<mask token>\ndicaprio = [3, 'Jack Dawson', 'male', 19, 0, 0, 'N/A', 5.0]\nwinslet = [1, 'Rose DeWitt Bukater', 'female', 17, 1, 2, 'N/A', 100.0]\ndicaprio, winslet = preprocess([dicaprio, winslet], to_ignore)\npred = model.predict([dicaprio, winslet])\nprint('DiCaprio Surviving Rate:', pred[0][1])\nprint('Winslet Surviving Rate:', pred[1][1])\n",
"step-5": "from __future__ import division, print_function, absolute_import\n\"\"\"\nThe dataset is stored in a CSV file, so we can use the TFLearn load_csv() function to\n load the data from the CSV file into a python list.\n We specify the 'target_column' argument to indicate that our labels (survived or not)\n are located in the first column (id: 0). The function will return a tuple: (data, labels).\n\"\"\"\nimport numpy as np\nimport tflearn\n\n#DownLoad the Titanic dataset\nfrom tflearn.datasets import titanic\ntitanic.download_dataset('titanic_dataset.csv')\n\n#loadCSVfile,indicate that the first column represent labels\nfrom tflearn.data_utils import load_csv\ndata, labels = load_csv('titanic_dataset.csv',target_column=0,\n\t\t\t\t\t\tcategorical_labels=True,n_classes=2)\n\n'''\nPreprocessing Data\n\nData are given 'as is' and need some preprocessing to be ready for use in our deep neural network classifier.\nFirst, we will discard the fields that are not likely to help in our analysis.\nFor example, we make the assumption that the 'name' field will not be very useful in our task,\nsince a passenger's name and his or her chance of surviving are probably not correlated.\nWith such thinking, we can go ahead and discard the 'name' and 'ticket' fields.\nThen, we need to convert all our data to numerical values,\nbecause a neural network model can only perform operations over numbers.\nHowever, our dataset contains some non-numerical values, such as 'name' and 'sex'. Because 'name' is discarded,\nwe just need to handle the 'sex' field. In this simple case, we will just assign '0' to males and '1' to females.\n\nexample:\nsurvived\tpclass\tname\t\t\t\t\t\t\tsex\t\tage\t\tsibsp\tparch\tticket\t\tfare\n1\t\t\t1\t\tAubart, Mme. Leontine Pauline\tfemale\t24\t\t0\t\t0\t\tPC 17477\t69.3000\n'''\n# Here is the preprocessing function:\n#Preprocessing function\ndef preprocess(passengers,columns_to_delete):\n\t#Sort by descending is and delete column\n\tfor column_to_delete in sorted(columns_to_delete,reverse = True):\n\t\t[passenger.pop(column_to_delete) for passenger in passengers]\n\t# print(type(passengers[0]))\n\tfor i in range(len(passengers)):\n\t\t# Converting 'sex' field to float (id is 1 after removing labels column)\n\t\tpassengers[i][1] = 1. if passengers[i][1] == 'female' else 0.\n\tprint(np.array(passengers,dtype=np.float32))\n\treturn np.array(passengers,dtype=np.float32)\n\n# Ignore 'name' and 'ticket' columns (id 1 & 6 of data array)\nto_ignore = [1,6]\n#Preprocess data\ndata = preprocess(data,to_ignore)\n\n'''\nBuild a Deep Neural Network\n\nWe are building a 3-layer neural network using TFLearn. First, we need to specify the shape of our input data.\nIn our case, each sample has a total of 6 features, and we will process samples per batch to save memory.\nSo our data input shape is [None, 6] ('None' stands for an unknown dimension, so we can change the total\nnumber of samples that are processed in a batch).\n'''\n# Build neural network\nnet = tflearn.input_data(shape=[None,6])\nnet = tflearn.fully_connected(net,32)\nnet = tflearn.fully_connected(net,32)\nnet = tflearn.fully_connected(net,2,activation='softmax')\nnet =tflearn.regression(net)\n\n'''\nTraining\n\nTFLearn provides a model wrapper ('DNN') that automatically performs neural network classifier tasks,\nsuch as training, prediction, save/restore, and more. We will run it for 10 epochs\n(i.e., the network will see all data 10 times) with a batch size of 16.\n'''\n\n#Define model\nmodel = tflearn.DNN(net)\n# Start training (apply gradient descent algorithm)\nmodel.fit(data, labels, n_epoch=10, batch_size=16, show_metric=True)\n\n'''\nTry the Model\nIt's time to try out our model.\nFor fun, let's take Titanic movie protagonists\n(DiCaprio and Winslet) and calculate their chance of surviving (class 1).\n'''\n\n# Let's create some data for DiCaprio and Winslet\ndicaprio = [3, 'Jack Dawson', 'male', 19, 0, 0, 'N/A', 5.0000]\nwinslet = [1, 'Rose DeWitt Bukater', 'female', 17, 1, 2, 'N/A', 100.0000]\n# Preprocess data\ndicaprio, winslet = preprocess([dicaprio, winslet], to_ignore)\n# Predict surviving chances (class 1 results)\npred = model.predict([dicaprio, winslet])\nprint(\"DiCaprio Surviving Rate:\", pred[0][1])\nprint(\"Winslet Surviving Rate:\", pred[1][1])\n\n\n\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
'''
Created on 2014-03-25
@author: ZhaoJianning
Modified by WangHairui on 2014-09-12
'''
import unittest
import Stability
import time
import os,sys
import runtests
import re
import android
import datetime
class TestCamera(unittest.TestCase):
def setUp(self):
self.error = ''
self.setup = Stability.SetupDeviceConnections()
self.a = self.setup.initializeTestDevice()
self.id = self.setup.device_id
self.stabdl = Stability.StabDL(self.a)
self.path = "/mnt/sdcard/LepiPhoto"
self.a.input.back(3)
def tearDown(self):
self.a.input.back(3)
def launchCamera(self):
try:
act = "android.intent.action.MAIN"
cat = "android.intent.category.LAUNCHER"
flg = "0x10200000"
cmp = "com.letv.camera/.CameraActivity"
cmd = "am start -a %s -c %s -f %s -n %s" %(act, cat, flg, cmp)
#self.a.device.sh("su")
result = self.a.device.sh(cmd)
print result
if "Exception" in str(result) or "Error" in str(result):
return False
return True
except:
self.error += "launch camera meets exception"
return False
def checkPhoto(self):
photoList = self.a.device.sh("ls %s" %self.path)
print photoList
today = datetime.date.today().strftime('%Y%m%d')
if today in str(photoList):
return True
return False
def pullPhoto(self):
workd = os.path.join(android.log.report_directory(), android.log.logs_directory())
os.system('adb -s %s pull %s %s' %(self.id,self.path,workd))
self.a.device.sh("rm -rf %s/*.jpg" %self.path)
'''
def testCamera(self):
"""测试摄像头驱动工作正常|操作步骤:1. 命令行启动摄像头 2. 拍下照片 Fail项:1. 启动摄像头失败 2. 照片未拍下"""
try:
print "test camera"
self.a.device.sh("rm %s/*" %self.path)
if not self.launchCamera():
self.error += "launch camera failed"
raise Exception
time.sleep(10)
self.a.input.center()
time.sleep(10)
if not self.checkPhoto():
self.error = "failed to capture the photo"
raise Exception
self.pullPhoto()
except Exception, e:
self.a.log.debug("", "\n test camera")
self.fail("Error happened: %s %s" %(self.error, e))
'''
def testCamera(self):
"""测试摄像头驱动工作正常|操作步骤:1. 命令行启动摄像头 2. 拍下照片 Fail项:1. 启动摄像头失败 2. 照片未拍下"""
self.jar = "UiAutomator.jar"
#self.jar = "UiAutomator.jar"
self.case = "com.letv.camera.Camera#testCapture"
try:
self.a.device.sh("rm -rf %s/*" %self.path)
ua = Stability.UiAutomator(self.id, self.jar, self.case)
result, info = ua.runtest()
if result != 'PASS':
self.error = str(info)
raise Exception
if not self.checkPhoto():
self.error = "failed to capture the photo"
raise Exception
self.pullPhoto()
except Exception, e :
self.a.log.debug("", "\n testCamera")
self.fail("Error happened: %s %s" % (self.error, e))
def testOpenExit(self):
"""测试打开关闭摄像头|1. 打开乐拍 2, 退出乐拍"""
self.jar = "UiAutomator.jar"
#self.jar = "UiAutomator.jar"
self.case = "com.letv.camera.Camera#testOpenExit"
try:
#self.a.device.sh("rm -rf %s/*" %self.path)
ua = Stability.UiAutomator(self.id, self.jar, self.case)
result, info = ua.runtest()
if result != 'PASS':
self.error = str(info)
raise Exception
except Exception, e :
self.a.log.debug("", "\n testCamera")
self.fail("Error happened: %s %s" % (self.error, e))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testBrowser']
unittest.main()
|
normal
|
{
"blob_id": "a520a93ed2dcd26b9470ed56e96b65a1b3550176",
"index": 6260,
"step-1": "# -*- coding: utf-8 -*-\r\n'''\r\nCreated on 2014-03-25\r\n\r\n@author: ZhaoJianning\r\nModified by WangHairui on 2014-09-12\r\n'''\r\n\r\nimport unittest\r\nimport Stability\r\nimport time\r\nimport os,sys\r\nimport runtests\r\nimport re\r\nimport android\r\nimport datetime\r\n\r\nclass TestCamera(unittest.TestCase):\r\n \r\n def setUp(self):\r\n self.error = ''\r\n self.setup = Stability.SetupDeviceConnections()\r\n self.a = self.setup.initializeTestDevice()\r\n self.id = self.setup.device_id\r\n self.stabdl = Stability.StabDL(self.a) \r\n self.path = \"/mnt/sdcard/LepiPhoto\"\r\n self.a.input.back(3)\r\n \r\n def tearDown(self):\r\n self.a.input.back(3)\r\n \r\n def launchCamera(self):\r\n try:\r\n act = \"android.intent.action.MAIN\"\r\n cat = \"android.intent.category.LAUNCHER\"\r\n flg = \"0x10200000\"\r\n cmp = \"com.letv.camera/.CameraActivity\"\r\n cmd = \"am start -a %s -c %s -f %s -n %s\" %(act, cat, flg, cmp)\r\n \r\n #self.a.device.sh(\"su\")\r\n result = self.a.device.sh(cmd)\r\n print result\r\n if \"Exception\" in str(result) or \"Error\" in str(result):\r\n return False\r\n return True\r\n except:\r\n self.error += \"launch camera meets exception\"\r\n return False\r\n \r\n def checkPhoto(self):\r\n photoList = self.a.device.sh(\"ls %s\" %self.path)\r\n print photoList\r\n today = datetime.date.today().strftime('%Y%m%d')\r\n if today in str(photoList):\r\n return True\r\n return False\r\n\r\n def pullPhoto(self):\r\n workd = os.path.join(android.log.report_directory(), android.log.logs_directory())\r\n os.system('adb -s %s pull %s %s' %(self.id,self.path,workd))\r\n self.a.device.sh(\"rm -rf %s/*.jpg\" %self.path)\r\n ''' \r\n def testCamera(self):\r\n \"\"\"测试摄像头驱动工作正常|操作步骤:1. 命令行启动摄像头 2. 拍下照片 Fail项:1. 启动摄像头失败 2. 照片未拍下\"\"\"\r\n try:\r\n print \"test camera\"\r\n self.a.device.sh(\"rm %s/*\" %self.path)\r\n if not self.launchCamera():\r\n self.error += \"launch camera failed\"\r\n raise Exception\r\n time.sleep(10)\r\n self.a.input.center()\r\n time.sleep(10)\r\n if not self.checkPhoto():\r\n self.error = \"failed to capture the photo\"\r\n raise Exception\r\n self.pullPhoto()\r\n except Exception, e:\r\n self.a.log.debug(\"\", \"\\n test camera\")\r\n self.fail(\"Error happened: %s %s\" %(self.error, e))\r\n '''\r\n \r\n def testCamera(self):\r\n \"\"\"测试摄像头驱动工作正常|操作步骤:1. 命令行启动摄像头 2. 拍下照片 Fail项:1. 启动摄像头失败 2. 照片未拍下\"\"\"\r\n \r\n self.jar = \"UiAutomator.jar\"\r\n #self.jar = \"UiAutomator.jar\"\r\n self.case = \"com.letv.camera.Camera#testCapture\"\r\n try:\r\n self.a.device.sh(\"rm -rf %s/*\" %self.path)\r\n ua = Stability.UiAutomator(self.id, self.jar, self.case)\r\n result, info = ua.runtest()\r\n if result != 'PASS':\r\n self.error = str(info)\r\n raise Exception\r\n if not self.checkPhoto():\r\n self.error = \"failed to capture the photo\"\r\n raise Exception\r\n self.pullPhoto()\r\n except Exception, e :\r\n self.a.log.debug(\"\", \"\\n testCamera\")\r\n self.fail(\"Error happened: %s %s\" % (self.error, e))\r\n \r\n def testOpenExit(self):\r\n \"\"\"测试打开关闭摄像头|1. 打开乐拍 2, 退出乐拍\"\"\"\r\n self.jar = \"UiAutomator.jar\"\r\n #self.jar = \"UiAutomator.jar\"\r\n self.case = \"com.letv.camera.Camera#testOpenExit\"\r\n try:\r\n #self.a.device.sh(\"rm -rf %s/*\" %self.path)\r\n ua = Stability.UiAutomator(self.id, self.jar, self.case)\r\n result, info = ua.runtest()\r\n if result != 'PASS':\r\n self.error = str(info)\r\n raise Exception\r\n except Exception, e :\r\n self.a.log.debug(\"\", \"\\n testCamera\")\r\n self.fail(\"Error happened: %s %s\" % (self.error, e))\r\n \r\n \r\nif __name__ == \"__main__\":\r\n #import sys;sys.argv = ['', 'Test.testBrowser']\r\n unittest.main()\r\n \r\n \r\n \r\n\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import tensorflow as tf
import bbox_lib
def hard_negative_loss_mining(c_loss, negative_mask, k):
"""Hard negative mining in classification loss."""
# make sure at least one negative example
k = tf.maximum(k, 1)
# make sure at most all negative.
k = tf.minimum(k, c_loss.shape[-1])
neg_c_loss = c_loss * negative_mask
neg_c_loss = tf.nn.top_k(neg_c_loss, k)[0]
return tf.reduce_sum(neg_c_loss)
def compute_loss(network_output, bboxes, labels, num_classes, c_weight, r_weight,
neg_label_value, ignore_label_value, negative_ratio):
"""Compute loss function."""
with tf.variable_scope("losses"):
batch_size = bboxes.shape[0].value
one_hot_labels = tf.one_hot(labels + 1, num_classes + 1)
negative_mask = tf.cast(tf.equal(labels, neg_label_value), tf.float32)
positive_mask = tf.cast(tf.logical_and(tf.not_equal(labels, ignore_label_value),
tf.not_equal(labels, neg_label_value)), tf.float32)
with tf.variable_scope("classification_loss"):
classification_output = network_output[0]
classification_output = tf.reshape(
classification_output, [batch_size, -1, num_classes + 1])
c_loss = tf.losses.softmax_cross_entropy(
one_hot_labels, classification_output, reduction=tf.losses.Reduction.NONE)
num_positive = tf.cast(tf.reduce_sum(positive_mask), tf.int32)
pos_c_loss = tf.reduce_sum(c_loss * positive_mask)
neg_c_loss = hard_negative_loss_mining(c_loss, negative_mask,
num_positive * negative_ratio)
c_loss = (pos_c_loss + neg_c_loss) / batch_size
with tf.variable_scope("regression_loss"):
regression_output = network_output[1]
regression_output = tf.reshape(
regression_output, [batch_size, -1, 4])
r_loss = tf.losses.huber_loss(regression_output, bboxes, delta=1,
reduction=tf.losses.Reduction.NONE)
r_loss = tf.reduce_sum(
r_loss * positive_mask[..., tf.newaxis]) / batch_size
return c_weight * c_loss + r_weight * r_loss, c_loss, r_loss
def predict(network_output, mask, score_threshold, neg_label_value, anchors,
max_prediction, num_classes):
"""Decode predictions from the neural network."""
classification_output = network_output[0]
batch_size, _, _, output_dim = classification_output.get_shape().as_list()
regression_output = network_output[1]
bbox_list = []
label_list = []
ay, ax, ah, aw = bbox_lib.get_center_coordinates_and_sizes(anchors)
anchor_center_index = tf.cast(tf.transpose(tf.stack([ay, ax])), tf.int32)
for single_classification_output, single_regression_output, single_mask in zip(
classification_output, regression_output, mask):
# num_classes + 1 due to the negative class.
single_classification_output = tf.reshape(
single_classification_output, [-1, num_classes + 1])
single_classification_output = tf.nn.softmax(
single_classification_output, -1)
max_confidence = tf.reduce_max(single_classification_output, -1)
confident_mask = max_confidence > score_threshold
# - 1 due to the negative class.
max_index = tf.argmax(single_classification_output, 1) - 1
non_negative_mask = tf.not_equal(max_index, -1)
in_mask = tf.gather_nd(single_mask, anchor_center_index)
foreground_mask = tf.logical_and(
in_mask, tf.logical_and(confident_mask, non_negative_mask))
valid_labels = tf.boolean_mask(max_index, foreground_mask)
single_regression_output = tf.reshape(single_regression_output, [-1, 4])
predicted_bbox = bbox_lib.decode_box_with_anchor(
single_regression_output, anchors)
valid_boxes = tf.boolean_mask(predicted_bbox, foreground_mask)
valid_confidence_score = tf.boolean_mask(
max_confidence, foreground_mask)
selected_indices = tf.image.non_max_suppression(
valid_boxes, valid_confidence_score, max_prediction)
valid_boxes = tf.gather(valid_boxes, selected_indices)
valid_labels = tf.gather(valid_labels, selected_indices)
bbox_list.append(valid_boxes)
label_list.append(valid_labels)
return bbox_list, label_list
def build_model(num_classes, anchor_num_per_output):
base_network_model = tf.keras.applications.resnet50.ResNet50(
include_top=False, weights="imagenet")
for layer in base_network_model.layers:
layer.trainable = False
h = base_network_model.get_layer(name='activation_39').output
drop_rate = 0.5
h = tf.keras.layers.Dropout(drop_rate)(h)
classification_branch = tf.keras.layers.Conv2D(
(num_classes + 1) * anchor_num_per_output, (1, 1))(h)
regression_branch = tf.keras.layers.Conv2D(
4 * anchor_num_per_output, (1, 1))(h)
model_outputs = [classification_branch, regression_branch]
return tf.keras.models.Model(base_network_model.input, model_outputs)
|
normal
|
{
"blob_id": "6e17fef4507c72190a77976e4a8b2f56880f2d6f",
"index": 4895,
"step-1": "<mask token>\n\n\ndef hard_negative_loss_mining(c_loss, negative_mask, k):\n \"\"\"Hard negative mining in classification loss.\"\"\"\n k = tf.maximum(k, 1)\n k = tf.minimum(k, c_loss.shape[-1])\n neg_c_loss = c_loss * negative_mask\n neg_c_loss = tf.nn.top_k(neg_c_loss, k)[0]\n return tf.reduce_sum(neg_c_loss)\n\n\ndef compute_loss(network_output, bboxes, labels, num_classes, c_weight,\n r_weight, neg_label_value, ignore_label_value, negative_ratio):\n \"\"\"Compute loss function.\"\"\"\n with tf.variable_scope('losses'):\n batch_size = bboxes.shape[0].value\n one_hot_labels = tf.one_hot(labels + 1, num_classes + 1)\n negative_mask = tf.cast(tf.equal(labels, neg_label_value), tf.float32)\n positive_mask = tf.cast(tf.logical_and(tf.not_equal(labels,\n ignore_label_value), tf.not_equal(labels, neg_label_value)), tf\n .float32)\n with tf.variable_scope('classification_loss'):\n classification_output = network_output[0]\n classification_output = tf.reshape(classification_output, [\n batch_size, -1, num_classes + 1])\n c_loss = tf.losses.softmax_cross_entropy(one_hot_labels,\n classification_output, reduction=tf.losses.Reduction.NONE)\n num_positive = tf.cast(tf.reduce_sum(positive_mask), tf.int32)\n pos_c_loss = tf.reduce_sum(c_loss * positive_mask)\n neg_c_loss = hard_negative_loss_mining(c_loss, negative_mask, \n num_positive * negative_ratio)\n c_loss = (pos_c_loss + neg_c_loss) / batch_size\n with tf.variable_scope('regression_loss'):\n regression_output = network_output[1]\n regression_output = tf.reshape(regression_output, [batch_size, \n -1, 4])\n r_loss = tf.losses.huber_loss(regression_output, bboxes, delta=\n 1, reduction=tf.losses.Reduction.NONE)\n r_loss = tf.reduce_sum(r_loss * positive_mask[..., tf.newaxis]\n ) / batch_size\n return c_weight * c_loss + r_weight * r_loss, c_loss, r_loss\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef hard_negative_loss_mining(c_loss, negative_mask, k):\n \"\"\"Hard negative mining in classification loss.\"\"\"\n k = tf.maximum(k, 1)\n k = tf.minimum(k, c_loss.shape[-1])\n neg_c_loss = c_loss * negative_mask\n neg_c_loss = tf.nn.top_k(neg_c_loss, k)[0]\n return tf.reduce_sum(neg_c_loss)\n\n\ndef compute_loss(network_output, bboxes, labels, num_classes, c_weight,\n r_weight, neg_label_value, ignore_label_value, negative_ratio):\n \"\"\"Compute loss function.\"\"\"\n with tf.variable_scope('losses'):\n batch_size = bboxes.shape[0].value\n one_hot_labels = tf.one_hot(labels + 1, num_classes + 1)\n negative_mask = tf.cast(tf.equal(labels, neg_label_value), tf.float32)\n positive_mask = tf.cast(tf.logical_and(tf.not_equal(labels,\n ignore_label_value), tf.not_equal(labels, neg_label_value)), tf\n .float32)\n with tf.variable_scope('classification_loss'):\n classification_output = network_output[0]\n classification_output = tf.reshape(classification_output, [\n batch_size, -1, num_classes + 1])\n c_loss = tf.losses.softmax_cross_entropy(one_hot_labels,\n classification_output, reduction=tf.losses.Reduction.NONE)\n num_positive = tf.cast(tf.reduce_sum(positive_mask), tf.int32)\n pos_c_loss = tf.reduce_sum(c_loss * positive_mask)\n neg_c_loss = hard_negative_loss_mining(c_loss, negative_mask, \n num_positive * negative_ratio)\n c_loss = (pos_c_loss + neg_c_loss) / batch_size\n with tf.variable_scope('regression_loss'):\n regression_output = network_output[1]\n regression_output = tf.reshape(regression_output, [batch_size, \n -1, 4])\n r_loss = tf.losses.huber_loss(regression_output, bboxes, delta=\n 1, reduction=tf.losses.Reduction.NONE)\n r_loss = tf.reduce_sum(r_loss * positive_mask[..., tf.newaxis]\n ) / batch_size\n return c_weight * c_loss + r_weight * r_loss, c_loss, r_loss\n\n\n<mask token>\n\n\ndef build_model(num_classes, anchor_num_per_output):\n base_network_model = tf.keras.applications.resnet50.ResNet50(include_top\n =False, weights='imagenet')\n for layer in base_network_model.layers:\n layer.trainable = False\n h = base_network_model.get_layer(name='activation_39').output\n drop_rate = 0.5\n h = tf.keras.layers.Dropout(drop_rate)(h)\n classification_branch = tf.keras.layers.Conv2D((num_classes + 1) *\n anchor_num_per_output, (1, 1))(h)\n regression_branch = tf.keras.layers.Conv2D(4 * anchor_num_per_output, (\n 1, 1))(h)\n model_outputs = [classification_branch, regression_branch]\n return tf.keras.models.Model(base_network_model.input, model_outputs)\n",
"step-3": "<mask token>\n\n\ndef hard_negative_loss_mining(c_loss, negative_mask, k):\n \"\"\"Hard negative mining in classification loss.\"\"\"\n k = tf.maximum(k, 1)\n k = tf.minimum(k, c_loss.shape[-1])\n neg_c_loss = c_loss * negative_mask\n neg_c_loss = tf.nn.top_k(neg_c_loss, k)[0]\n return tf.reduce_sum(neg_c_loss)\n\n\ndef compute_loss(network_output, bboxes, labels, num_classes, c_weight,\n r_weight, neg_label_value, ignore_label_value, negative_ratio):\n \"\"\"Compute loss function.\"\"\"\n with tf.variable_scope('losses'):\n batch_size = bboxes.shape[0].value\n one_hot_labels = tf.one_hot(labels + 1, num_classes + 1)\n negative_mask = tf.cast(tf.equal(labels, neg_label_value), tf.float32)\n positive_mask = tf.cast(tf.logical_and(tf.not_equal(labels,\n ignore_label_value), tf.not_equal(labels, neg_label_value)), tf\n .float32)\n with tf.variable_scope('classification_loss'):\n classification_output = network_output[0]\n classification_output = tf.reshape(classification_output, [\n batch_size, -1, num_classes + 1])\n c_loss = tf.losses.softmax_cross_entropy(one_hot_labels,\n classification_output, reduction=tf.losses.Reduction.NONE)\n num_positive = tf.cast(tf.reduce_sum(positive_mask), tf.int32)\n pos_c_loss = tf.reduce_sum(c_loss * positive_mask)\n neg_c_loss = hard_negative_loss_mining(c_loss, negative_mask, \n num_positive * negative_ratio)\n c_loss = (pos_c_loss + neg_c_loss) / batch_size\n with tf.variable_scope('regression_loss'):\n regression_output = network_output[1]\n regression_output = tf.reshape(regression_output, [batch_size, \n -1, 4])\n r_loss = tf.losses.huber_loss(regression_output, bboxes, delta=\n 1, reduction=tf.losses.Reduction.NONE)\n r_loss = tf.reduce_sum(r_loss * positive_mask[..., tf.newaxis]\n ) / batch_size\n return c_weight * c_loss + r_weight * r_loss, c_loss, r_loss\n\n\ndef predict(network_output, mask, score_threshold, neg_label_value, anchors,\n max_prediction, num_classes):\n \"\"\"Decode predictions from the neural network.\"\"\"\n classification_output = network_output[0]\n batch_size, _, _, output_dim = classification_output.get_shape().as_list()\n regression_output = network_output[1]\n bbox_list = []\n label_list = []\n ay, ax, ah, aw = bbox_lib.get_center_coordinates_and_sizes(anchors)\n anchor_center_index = tf.cast(tf.transpose(tf.stack([ay, ax])), tf.int32)\n for single_classification_output, single_regression_output, single_mask in zip(\n classification_output, regression_output, mask):\n single_classification_output = tf.reshape(single_classification_output,\n [-1, num_classes + 1])\n single_classification_output = tf.nn.softmax(\n single_classification_output, -1)\n max_confidence = tf.reduce_max(single_classification_output, -1)\n confident_mask = max_confidence > score_threshold\n max_index = tf.argmax(single_classification_output, 1) - 1\n non_negative_mask = tf.not_equal(max_index, -1)\n in_mask = tf.gather_nd(single_mask, anchor_center_index)\n foreground_mask = tf.logical_and(in_mask, tf.logical_and(\n confident_mask, non_negative_mask))\n valid_labels = tf.boolean_mask(max_index, foreground_mask)\n single_regression_output = tf.reshape(single_regression_output, [-1, 4]\n )\n predicted_bbox = bbox_lib.decode_box_with_anchor(\n single_regression_output, anchors)\n valid_boxes = tf.boolean_mask(predicted_bbox, foreground_mask)\n valid_confidence_score = tf.boolean_mask(max_confidence,\n foreground_mask)\n selected_indices = tf.image.non_max_suppression(valid_boxes,\n valid_confidence_score, max_prediction)\n valid_boxes = tf.gather(valid_boxes, selected_indices)\n valid_labels = tf.gather(valid_labels, selected_indices)\n bbox_list.append(valid_boxes)\n label_list.append(valid_labels)\n return bbox_list, label_list\n\n\ndef build_model(num_classes, anchor_num_per_output):\n base_network_model = tf.keras.applications.resnet50.ResNet50(include_top\n =False, weights='imagenet')\n for layer in base_network_model.layers:\n layer.trainable = False\n h = base_network_model.get_layer(name='activation_39').output\n drop_rate = 0.5\n h = tf.keras.layers.Dropout(drop_rate)(h)\n classification_branch = tf.keras.layers.Conv2D((num_classes + 1) *\n anchor_num_per_output, (1, 1))(h)\n regression_branch = tf.keras.layers.Conv2D(4 * anchor_num_per_output, (\n 1, 1))(h)\n model_outputs = [classification_branch, regression_branch]\n return tf.keras.models.Model(base_network_model.input, model_outputs)\n",
"step-4": "import tensorflow as tf\nimport bbox_lib\n\n\ndef hard_negative_loss_mining(c_loss, negative_mask, k):\n \"\"\"Hard negative mining in classification loss.\"\"\"\n k = tf.maximum(k, 1)\n k = tf.minimum(k, c_loss.shape[-1])\n neg_c_loss = c_loss * negative_mask\n neg_c_loss = tf.nn.top_k(neg_c_loss, k)[0]\n return tf.reduce_sum(neg_c_loss)\n\n\ndef compute_loss(network_output, bboxes, labels, num_classes, c_weight,\n r_weight, neg_label_value, ignore_label_value, negative_ratio):\n \"\"\"Compute loss function.\"\"\"\n with tf.variable_scope('losses'):\n batch_size = bboxes.shape[0].value\n one_hot_labels = tf.one_hot(labels + 1, num_classes + 1)\n negative_mask = tf.cast(tf.equal(labels, neg_label_value), tf.float32)\n positive_mask = tf.cast(tf.logical_and(tf.not_equal(labels,\n ignore_label_value), tf.not_equal(labels, neg_label_value)), tf\n .float32)\n with tf.variable_scope('classification_loss'):\n classification_output = network_output[0]\n classification_output = tf.reshape(classification_output, [\n batch_size, -1, num_classes + 1])\n c_loss = tf.losses.softmax_cross_entropy(one_hot_labels,\n classification_output, reduction=tf.losses.Reduction.NONE)\n num_positive = tf.cast(tf.reduce_sum(positive_mask), tf.int32)\n pos_c_loss = tf.reduce_sum(c_loss * positive_mask)\n neg_c_loss = hard_negative_loss_mining(c_loss, negative_mask, \n num_positive * negative_ratio)\n c_loss = (pos_c_loss + neg_c_loss) / batch_size\n with tf.variable_scope('regression_loss'):\n regression_output = network_output[1]\n regression_output = tf.reshape(regression_output, [batch_size, \n -1, 4])\n r_loss = tf.losses.huber_loss(regression_output, bboxes, delta=\n 1, reduction=tf.losses.Reduction.NONE)\n r_loss = tf.reduce_sum(r_loss * positive_mask[..., tf.newaxis]\n ) / batch_size\n return c_weight * c_loss + r_weight * r_loss, c_loss, r_loss\n\n\ndef predict(network_output, mask, score_threshold, neg_label_value, anchors,\n max_prediction, num_classes):\n \"\"\"Decode predictions from the neural network.\"\"\"\n classification_output = network_output[0]\n batch_size, _, _, output_dim = classification_output.get_shape().as_list()\n regression_output = network_output[1]\n bbox_list = []\n label_list = []\n ay, ax, ah, aw = bbox_lib.get_center_coordinates_and_sizes(anchors)\n anchor_center_index = tf.cast(tf.transpose(tf.stack([ay, ax])), tf.int32)\n for single_classification_output, single_regression_output, single_mask in zip(\n classification_output, regression_output, mask):\n single_classification_output = tf.reshape(single_classification_output,\n [-1, num_classes + 1])\n single_classification_output = tf.nn.softmax(\n single_classification_output, -1)\n max_confidence = tf.reduce_max(single_classification_output, -1)\n confident_mask = max_confidence > score_threshold\n max_index = tf.argmax(single_classification_output, 1) - 1\n non_negative_mask = tf.not_equal(max_index, -1)\n in_mask = tf.gather_nd(single_mask, anchor_center_index)\n foreground_mask = tf.logical_and(in_mask, tf.logical_and(\n confident_mask, non_negative_mask))\n valid_labels = tf.boolean_mask(max_index, foreground_mask)\n single_regression_output = tf.reshape(single_regression_output, [-1, 4]\n )\n predicted_bbox = bbox_lib.decode_box_with_anchor(\n single_regression_output, anchors)\n valid_boxes = tf.boolean_mask(predicted_bbox, foreground_mask)\n valid_confidence_score = tf.boolean_mask(max_confidence,\n foreground_mask)\n selected_indices = tf.image.non_max_suppression(valid_boxes,\n valid_confidence_score, max_prediction)\n valid_boxes = tf.gather(valid_boxes, selected_indices)\n valid_labels = tf.gather(valid_labels, selected_indices)\n bbox_list.append(valid_boxes)\n label_list.append(valid_labels)\n return bbox_list, label_list\n\n\ndef build_model(num_classes, anchor_num_per_output):\n base_network_model = tf.keras.applications.resnet50.ResNet50(include_top\n =False, weights='imagenet')\n for layer in base_network_model.layers:\n layer.trainable = False\n h = base_network_model.get_layer(name='activation_39').output\n drop_rate = 0.5\n h = tf.keras.layers.Dropout(drop_rate)(h)\n classification_branch = tf.keras.layers.Conv2D((num_classes + 1) *\n anchor_num_per_output, (1, 1))(h)\n regression_branch = tf.keras.layers.Conv2D(4 * anchor_num_per_output, (\n 1, 1))(h)\n model_outputs = [classification_branch, regression_branch]\n return tf.keras.models.Model(base_network_model.input, model_outputs)\n",
"step-5": "import tensorflow as tf\nimport bbox_lib\n\n\ndef hard_negative_loss_mining(c_loss, negative_mask, k):\n \"\"\"Hard negative mining in classification loss.\"\"\"\n # make sure at least one negative example\n k = tf.maximum(k, 1)\n # make sure at most all negative.\n k = tf.minimum(k, c_loss.shape[-1])\n neg_c_loss = c_loss * negative_mask\n neg_c_loss = tf.nn.top_k(neg_c_loss, k)[0]\n return tf.reduce_sum(neg_c_loss)\n\n\ndef compute_loss(network_output, bboxes, labels, num_classes, c_weight, r_weight,\n neg_label_value, ignore_label_value, negative_ratio):\n \"\"\"Compute loss function.\"\"\"\n\n with tf.variable_scope(\"losses\"):\n batch_size = bboxes.shape[0].value\n one_hot_labels = tf.one_hot(labels + 1, num_classes + 1)\n negative_mask = tf.cast(tf.equal(labels, neg_label_value), tf.float32)\n positive_mask = tf.cast(tf.logical_and(tf.not_equal(labels, ignore_label_value),\n tf.not_equal(labels, neg_label_value)), tf.float32)\n\n with tf.variable_scope(\"classification_loss\"):\n classification_output = network_output[0]\n classification_output = tf.reshape(\n classification_output, [batch_size, -1, num_classes + 1])\n\n c_loss = tf.losses.softmax_cross_entropy(\n one_hot_labels, classification_output, reduction=tf.losses.Reduction.NONE)\n\n num_positive = tf.cast(tf.reduce_sum(positive_mask), tf.int32)\n pos_c_loss = tf.reduce_sum(c_loss * positive_mask)\n neg_c_loss = hard_negative_loss_mining(c_loss, negative_mask,\n num_positive * negative_ratio)\n\n c_loss = (pos_c_loss + neg_c_loss) / batch_size\n\n with tf.variable_scope(\"regression_loss\"):\n regression_output = network_output[1]\n regression_output = tf.reshape(\n regression_output, [batch_size, -1, 4])\n r_loss = tf.losses.huber_loss(regression_output, bboxes, delta=1,\n reduction=tf.losses.Reduction.NONE)\n\n r_loss = tf.reduce_sum(\n r_loss * positive_mask[..., tf.newaxis]) / batch_size\n\n return c_weight * c_loss + r_weight * r_loss, c_loss, r_loss\n\n\ndef predict(network_output, mask, score_threshold, neg_label_value, anchors,\n max_prediction, num_classes):\n \"\"\"Decode predictions from the neural network.\"\"\"\n\n classification_output = network_output[0]\n batch_size, _, _, output_dim = classification_output.get_shape().as_list()\n regression_output = network_output[1]\n bbox_list = []\n label_list = []\n\n ay, ax, ah, aw = bbox_lib.get_center_coordinates_and_sizes(anchors)\n anchor_center_index = tf.cast(tf.transpose(tf.stack([ay, ax])), tf.int32)\n for single_classification_output, single_regression_output, single_mask in zip(\n classification_output, regression_output, mask):\n # num_classes + 1 due to the negative class.\n single_classification_output = tf.reshape(\n single_classification_output, [-1, num_classes + 1])\n single_classification_output = tf.nn.softmax(\n single_classification_output, -1)\n\n max_confidence = tf.reduce_max(single_classification_output, -1)\n confident_mask = max_confidence > score_threshold\n # - 1 due to the negative class.\n max_index = tf.argmax(single_classification_output, 1) - 1\n non_negative_mask = tf.not_equal(max_index, -1)\n in_mask = tf.gather_nd(single_mask, anchor_center_index)\n foreground_mask = tf.logical_and(\n in_mask, tf.logical_and(confident_mask, non_negative_mask))\n\n valid_labels = tf.boolean_mask(max_index, foreground_mask)\n\n single_regression_output = tf.reshape(single_regression_output, [-1, 4])\n predicted_bbox = bbox_lib.decode_box_with_anchor(\n single_regression_output, anchors)\n valid_boxes = tf.boolean_mask(predicted_bbox, foreground_mask)\n valid_confidence_score = tf.boolean_mask(\n max_confidence, foreground_mask)\n\n selected_indices = tf.image.non_max_suppression(\n valid_boxes, valid_confidence_score, max_prediction)\n\n valid_boxes = tf.gather(valid_boxes, selected_indices)\n valid_labels = tf.gather(valid_labels, selected_indices)\n bbox_list.append(valid_boxes)\n label_list.append(valid_labels)\n\n return bbox_list, label_list\n\n\ndef build_model(num_classes, anchor_num_per_output):\n base_network_model = tf.keras.applications.resnet50.ResNet50(\n include_top=False, weights=\"imagenet\")\n\n for layer in base_network_model.layers:\n layer.trainable = False\n\n h = base_network_model.get_layer(name='activation_39').output\n drop_rate = 0.5\n h = tf.keras.layers.Dropout(drop_rate)(h)\n\n classification_branch = tf.keras.layers.Conv2D(\n (num_classes + 1) * anchor_num_per_output, (1, 1))(h)\n regression_branch = tf.keras.layers.Conv2D(\n 4 * anchor_num_per_output, (1, 1))(h)\n model_outputs = [classification_branch, regression_branch]\n return tf.keras.models.Model(base_network_model.input, model_outputs)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
ii = [('CookGHP3.py', 2), ('MarrFDI.py', 1), ('GodwWSL2.py', 2), (
'ChanWS.py', 6), ('SadlMLP.py', 1), ('WilbRLW.py', 1), ('AubePRP2.py',
1), ('MartHSI2.py', 1), ('WilbRLW5.py', 1), ('KnowJMM.py', 1), (
'AubePRP.py', 2), ('ChalTPW2.py', 1), ('ClarGE2.py', 2), ('CarlTFR.py',
3), ('SeniNSP.py', 4), ('GrimSLE.py', 1), ('RoscTTI3.py', 1), (
'CookGHP2.py', 1), ('CoolWHM.py', 1), ('DaltJMA.py', 1), ('NewmJLP.py',
1), ('GodwWLN.py', 3), ('MereHHB3.py', 1), ('MartHRW.py', 2), (
'BentJRP.py', 23), ('ThomGLG.py', 1), ('StorJCC.py', 1), ('LewiMJW.py',
1), ('WilbRLW3.py', 1), ('FitzRNS2.py', 1), ('MartHSI.py', 1), (
'EvarJSP.py', 5), ('DwigTHH.py', 4), ('TaylIF.py', 1), ('WordWYR.py', 1
), ('WaylFEP.py', 1)]
|
normal
|
{
"blob_id": "b80ccee42489aefb2858b8491008b252f6a2b9b7",
"index": 4864,
"step-1": "<mask token>\n",
"step-2": "ii = [('CookGHP3.py', 2), ('MarrFDI.py', 1), ('GodwWSL2.py', 2), (\n 'ChanWS.py', 6), ('SadlMLP.py', 1), ('WilbRLW.py', 1), ('AubePRP2.py', \n 1), ('MartHSI2.py', 1), ('WilbRLW5.py', 1), ('KnowJMM.py', 1), (\n 'AubePRP.py', 2), ('ChalTPW2.py', 1), ('ClarGE2.py', 2), ('CarlTFR.py',\n 3), ('SeniNSP.py', 4), ('GrimSLE.py', 1), ('RoscTTI3.py', 1), (\n 'CookGHP2.py', 1), ('CoolWHM.py', 1), ('DaltJMA.py', 1), ('NewmJLP.py',\n 1), ('GodwWLN.py', 3), ('MereHHB3.py', 1), ('MartHRW.py', 2), (\n 'BentJRP.py', 23), ('ThomGLG.py', 1), ('StorJCC.py', 1), ('LewiMJW.py',\n 1), ('WilbRLW3.py', 1), ('FitzRNS2.py', 1), ('MartHSI.py', 1), (\n 'EvarJSP.py', 5), ('DwigTHH.py', 4), ('TaylIF.py', 1), ('WordWYR.py', 1\n ), ('WaylFEP.py', 1)]\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# Create two integer variables and print their sum. What is the type of the
# result?
# Now, create a float variable and print its sum with an integer variable. What
# is the type of the result.
# Divide your smallest integer value by your largest integer value. Is the
# result what you expected? Now, do the same with your float variable and an
# integer variable. What to you get?
# Fill in the blanks, try adding the following two string variables and print
# the result. What do you get?
greeting = "My name is "
your_name = ""
# Try adding the following variables.
best_string = "I am "
your_age = 6
# Although Python can add integers and floats, it can't add strings and integers.
# In order to do this, we need to convert the integer variable to a string using
# the str keyword
# Uncomment the line below and check that it works.
# print(best_string + str(your_age))
# You can create complex string by using multiple string additions.
# Uncomment the line below and see the result.
# print(best_string + str(your_age) + " years old")
# We can also use the float keyword and the int keyword to convert variables to
# floats and ints respectively.
my_int = 5
print(float(my_int))
# Now, convert pi to an int.
pi = 3.1415
|
normal
|
{
"blob_id": "fcbbffe0682da9f2131fdddbef606dcae3303ce9",
"index": 1979,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(float(my_int))\n<mask token>\n",
"step-3": "greeting = 'My name is '\nyour_name = ''\nbest_string = 'I am '\nyour_age = 6\nmy_int = 5\nprint(float(my_int))\npi = 3.1415\n",
"step-4": "# Create two integer variables and print their sum. What is the type of the\n# result?\n\n# Now, create a float variable and print its sum with an integer variable. What\n# is the type of the result.\n\n# Divide your smallest integer value by your largest integer value. Is the\n# result what you expected? Now, do the same with your float variable and an\n# integer variable. What to you get?\n\n# Fill in the blanks, try adding the following two string variables and print\n# the result. What do you get?\ngreeting = \"My name is \"\nyour_name = \"\"\n\n# Try adding the following variables.\nbest_string = \"I am \"\nyour_age = 6\n\n\n# Although Python can add integers and floats, it can't add strings and integers.\n# In order to do this, we need to convert the integer variable to a string using\n# the str keyword\n\n# Uncomment the line below and check that it works.\n# print(best_string + str(your_age))\n\n# You can create complex string by using multiple string additions.\n# Uncomment the line below and see the result.\n# print(best_string + str(your_age) + \" years old\")\n\n# We can also use the float keyword and the int keyword to convert variables to\n# floats and ints respectively.\n\nmy_int = 5\nprint(float(my_int))\n\n# Now, convert pi to an int.\n\npi = 3.1415\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import telebot
import os
from misc.answers import answer_incorrect, answer_correct, answer_start
from helper import get_challenge_text, get_solved_challenge_text, is_correct_answer
bot = telebot.TeleBot(os.environ.get('API_KEY_TELEGRAM'))
default_parse_mode = "Markdown"
@bot.message_handler(commands=['start'])
def welcome(message):
print("/start detected")
print("[------------------------------]")
bot.send_message(message.chat.id, answer_start)
@bot.message_handler(commands=['new_game'])
def new_game(message):
print(f"try new game with message: {message.text}")
answer = ""
try:
answer = get_challenge_text(message.text)
print("Challenge successfully created")
except ValueError as exception:
answer = exception
print(f"ValueError occurred: {exception}")
bot.send_message(message.chat.id, answer, parse_mode=default_parse_mode)
print("[------------------------------]")
@bot.message_handler(content_types=['text'])
def message_listener(message):
reply_message = message.reply_to_message
if reply_message is not None:
print(f"Try to answer with: {message.text}")
answer = ""
try:
if is_correct_answer(message.text, reply_message):
answer = answer_correct
solved_challenge = get_solved_challenge_text(reply_message.text)
# set challenge status
bot.edit_message_text(chat_id=reply_message.chat.id, message_id=reply_message.message_id,
text=solved_challenge,
parse_mode=default_parse_mode)
print("Correct answer")
else:
answer = answer_incorrect
print("Incorrect answer")
except Exception as exception:
answer = exception
print("Already solved")
print("[------------------------------]")
bot.reply_to(message, answer)
# RUN
print("Bot started!")
print("[------------------------------]")
bot.polling(none_stop=True)
|
normal
|
{
"blob_id": "f9f66452756cb67689d33aeb2e77535086355a7d",
"index": 5115,
"step-1": "<mask token>\n\n\[email protected]_handler(commands=['new_game'])\ndef new_game(message):\n print(f'try new game with message: {message.text}')\n answer = ''\n try:\n answer = get_challenge_text(message.text)\n print('Challenge successfully created')\n except ValueError as exception:\n answer = exception\n print(f'ValueError occurred: {exception}')\n bot.send_message(message.chat.id, answer, parse_mode=default_parse_mode)\n print('[------------------------------]')\n\n\[email protected]_handler(content_types=['text'])\ndef message_listener(message):\n reply_message = message.reply_to_message\n if reply_message is not None:\n print(f'Try to answer with: {message.text}')\n answer = ''\n try:\n if is_correct_answer(message.text, reply_message):\n answer = answer_correct\n solved_challenge = get_solved_challenge_text(reply_message.text\n )\n bot.edit_message_text(chat_id=reply_message.chat.id,\n message_id=reply_message.message_id, text=\n solved_challenge, parse_mode=default_parse_mode)\n print('Correct answer')\n else:\n answer = answer_incorrect\n print('Incorrect answer')\n except Exception as exception:\n answer = exception\n print('Already solved')\n print('[------------------------------]')\n bot.reply_to(message, answer)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]_handler(commands=['start'])\ndef welcome(message):\n print('/start detected')\n print('[------------------------------]')\n bot.send_message(message.chat.id, answer_start)\n\n\[email protected]_handler(commands=['new_game'])\ndef new_game(message):\n print(f'try new game with message: {message.text}')\n answer = ''\n try:\n answer = get_challenge_text(message.text)\n print('Challenge successfully created')\n except ValueError as exception:\n answer = exception\n print(f'ValueError occurred: {exception}')\n bot.send_message(message.chat.id, answer, parse_mode=default_parse_mode)\n print('[------------------------------]')\n\n\[email protected]_handler(content_types=['text'])\ndef message_listener(message):\n reply_message = message.reply_to_message\n if reply_message is not None:\n print(f'Try to answer with: {message.text}')\n answer = ''\n try:\n if is_correct_answer(message.text, reply_message):\n answer = answer_correct\n solved_challenge = get_solved_challenge_text(reply_message.text\n )\n bot.edit_message_text(chat_id=reply_message.chat.id,\n message_id=reply_message.message_id, text=\n solved_challenge, parse_mode=default_parse_mode)\n print('Correct answer')\n else:\n answer = answer_incorrect\n print('Incorrect answer')\n except Exception as exception:\n answer = exception\n print('Already solved')\n print('[------------------------------]')\n bot.reply_to(message, answer)\n\n\nprint('Bot started!')\nprint('[------------------------------]')\nbot.polling(none_stop=True)\n",
"step-3": "<mask token>\nbot = telebot.TeleBot(os.environ.get('API_KEY_TELEGRAM'))\ndefault_parse_mode = 'Markdown'\n\n\[email protected]_handler(commands=['start'])\ndef welcome(message):\n print('/start detected')\n print('[------------------------------]')\n bot.send_message(message.chat.id, answer_start)\n\n\[email protected]_handler(commands=['new_game'])\ndef new_game(message):\n print(f'try new game with message: {message.text}')\n answer = ''\n try:\n answer = get_challenge_text(message.text)\n print('Challenge successfully created')\n except ValueError as exception:\n answer = exception\n print(f'ValueError occurred: {exception}')\n bot.send_message(message.chat.id, answer, parse_mode=default_parse_mode)\n print('[------------------------------]')\n\n\[email protected]_handler(content_types=['text'])\ndef message_listener(message):\n reply_message = message.reply_to_message\n if reply_message is not None:\n print(f'Try to answer with: {message.text}')\n answer = ''\n try:\n if is_correct_answer(message.text, reply_message):\n answer = answer_correct\n solved_challenge = get_solved_challenge_text(reply_message.text\n )\n bot.edit_message_text(chat_id=reply_message.chat.id,\n message_id=reply_message.message_id, text=\n solved_challenge, parse_mode=default_parse_mode)\n print('Correct answer')\n else:\n answer = answer_incorrect\n print('Incorrect answer')\n except Exception as exception:\n answer = exception\n print('Already solved')\n print('[------------------------------]')\n bot.reply_to(message, answer)\n\n\nprint('Bot started!')\nprint('[------------------------------]')\nbot.polling(none_stop=True)\n",
"step-4": "import telebot\nimport os\nfrom misc.answers import answer_incorrect, answer_correct, answer_start\nfrom helper import get_challenge_text, get_solved_challenge_text, is_correct_answer\nbot = telebot.TeleBot(os.environ.get('API_KEY_TELEGRAM'))\ndefault_parse_mode = 'Markdown'\n\n\[email protected]_handler(commands=['start'])\ndef welcome(message):\n print('/start detected')\n print('[------------------------------]')\n bot.send_message(message.chat.id, answer_start)\n\n\[email protected]_handler(commands=['new_game'])\ndef new_game(message):\n print(f'try new game with message: {message.text}')\n answer = ''\n try:\n answer = get_challenge_text(message.text)\n print('Challenge successfully created')\n except ValueError as exception:\n answer = exception\n print(f'ValueError occurred: {exception}')\n bot.send_message(message.chat.id, answer, parse_mode=default_parse_mode)\n print('[------------------------------]')\n\n\[email protected]_handler(content_types=['text'])\ndef message_listener(message):\n reply_message = message.reply_to_message\n if reply_message is not None:\n print(f'Try to answer with: {message.text}')\n answer = ''\n try:\n if is_correct_answer(message.text, reply_message):\n answer = answer_correct\n solved_challenge = get_solved_challenge_text(reply_message.text\n )\n bot.edit_message_text(chat_id=reply_message.chat.id,\n message_id=reply_message.message_id, text=\n solved_challenge, parse_mode=default_parse_mode)\n print('Correct answer')\n else:\n answer = answer_incorrect\n print('Incorrect answer')\n except Exception as exception:\n answer = exception\n print('Already solved')\n print('[------------------------------]')\n bot.reply_to(message, answer)\n\n\nprint('Bot started!')\nprint('[------------------------------]')\nbot.polling(none_stop=True)\n",
"step-5": "import telebot\nimport os\nfrom misc.answers import answer_incorrect, answer_correct, answer_start\nfrom helper import get_challenge_text, get_solved_challenge_text, is_correct_answer\n\nbot = telebot.TeleBot(os.environ.get('API_KEY_TELEGRAM'))\ndefault_parse_mode = \"Markdown\"\n\n\[email protected]_handler(commands=['start'])\ndef welcome(message):\n\tprint(\"/start detected\")\n\tprint(\"[------------------------------]\")\n\tbot.send_message(message.chat.id, answer_start)\n\n\[email protected]_handler(commands=['new_game'])\ndef new_game(message):\n\tprint(f\"try new game with message: {message.text}\")\n\tanswer = \"\"\n\ttry:\n\t\tanswer = get_challenge_text(message.text)\n\t\tprint(\"Challenge successfully created\")\n\texcept ValueError as exception:\n\t\tanswer = exception\n\t\tprint(f\"ValueError occurred: {exception}\")\n\tbot.send_message(message.chat.id, answer, parse_mode=default_parse_mode)\n\tprint(\"[------------------------------]\")\n\n\[email protected]_handler(content_types=['text'])\ndef message_listener(message):\n\treply_message = message.reply_to_message\n\tif reply_message is not None:\n\t\tprint(f\"Try to answer with: {message.text}\")\n\t\tanswer = \"\"\n\t\ttry:\n\t\t\tif is_correct_answer(message.text, reply_message):\n\t\t\t\tanswer = answer_correct\n\t\t\t\tsolved_challenge = get_solved_challenge_text(reply_message.text)\n\t\t\t\t# set challenge status\n\t\t\t\tbot.edit_message_text(chat_id=reply_message.chat.id, message_id=reply_message.message_id,\n\t\t\t\t\t\t\t\t\t text=solved_challenge,\n\t\t\t\t\t\t\t\t\t parse_mode=default_parse_mode)\n\t\t\t\tprint(\"Correct answer\")\n\t\t\telse:\n\t\t\t\tanswer = answer_incorrect\n\t\t\t\tprint(\"Incorrect answer\")\n\t\texcept Exception as exception:\n\t\t\tanswer = exception\n\t\t\tprint(\"Already solved\")\n\t\tprint(\"[------------------------------]\")\n\t\tbot.reply_to(message, answer)\n\n\n# RUN\nprint(\"Bot started!\")\nprint(\"[------------------------------]\")\nbot.polling(none_stop=True)\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
# SPDX-License-Identifier: Apache-2.0
# Copyright (C) 2020 ifm electronic gmbh
#
# THE PROGRAM IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND.
#
"""
This module provides the recording control GUI service for the nexxT framework.
"""
import logging
from pathlib import Path
from nexxT.Qt.QtCore import Qt, QStorageInfo
from nexxT.Qt.QtGui import QAction, QIcon, QTextOption
from nexxT.Qt.QtWidgets import QApplication, QStyle, QWidget, QBoxLayout, QToolBar, QFileDialog
from nexxT.core.Utils import assertMainThread, ElidedLabel
from nexxT.core.Exceptions import PropertyCollectionPropertyNotFound
from nexxT.interface import Services
from nexxT.services.SrvRecordingControl import MVCRecordingControlBase
logger = logging.getLogger(__name__)
class MVCRecordingControlGUI(MVCRecordingControlBase):
"""
This service implements a GUI frontend for the recording service
"""
def __init__(self, config):
assertMainThread()
super().__init__(config)
# state
self._directory = str(Path('.').absolute())
# gui
srv = Services.getService("MainWindow")
config.configLoaded.connect(self._restoreState)
config.configAboutToSave.connect(self._saveState)
self._config = config
recMenu = srv.menuBar().addMenu("&Recording")
style = QApplication.style()
self.actStart = QAction(QIcon.fromTheme("media-record", QIcon(":icons/media-record.svg")),
"Start Recording", self)
self.actStop = QAction(QIcon.fromTheme("media-playback-stop", style.standardIcon(QStyle.SP_MediaStop)),
"Stop Recording", self)
self.actSetDir = QAction(QIcon.fromTheme("document-open-folder", style.standardIcon(QStyle.SP_DirIcon)),
"Choose directory ...", self)
self.actStart.setEnabled(False)
self.actStop.setEnabled(False)
self.actSetDir.setEnabled(False)
self.actStart.triggered.connect(self._startTriggered)
self.actStop.triggered.connect(self._stopTriggered)
self.actSetDir.triggered.connect(self._setDir)
recMenu.addAction(self.actStart)
recMenu.addAction(self.actStop)
recMenu.addAction(self.actSetDir)
self.dockWidget = srv.newDockWidget("RecordingControl", None, Qt.LeftDockWidgetArea,
defaultLoc="PlaybackControl")
self.dockWidgetContents = QWidget(self.dockWidget)
self.dockWidget.setWidget(self.dockWidgetContents)
toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents)
toolLayout.setContentsMargins(0, 0, 0, 0)
toolBar = QToolBar()
toolLayout.addWidget(toolBar)
toolBar.addAction(self.actStart)
toolBar.addAction(self.actStop)
toolBar.addAction(self.actSetDir)
self._directoryLabel = ElidedLabel(self._directory, parent=self.dockWidgetContents)
to = self._directoryLabel.textOption()
to.setWrapMode(QTextOption.NoWrap)
self._directoryLabel.setTextOption(to)
self._directoryLabel.setElideMode(Qt.ElideMiddle)
self._statusLabel = ElidedLabel("(disabled)", parent=self.dockWidgetContents)
to = self._statusLabel.textOption()
to.setWrapMode(QTextOption.NoWrap)
self._statusLabel.setTextOption(to)
self._statusLabel.setElideMode(Qt.ElideMiddle)
toolLayout.addWidget(self._directoryLabel)
toolLayout.addWidget(self._statusLabel, stretch=100)
#toolLayout.addStretch(100)
self.statusUpdate.connect(self._onUpdateStatus)
self.notifyError.connect(self._onNotifyError)
def _startTriggered(self):
self.startRecording(self._directory)
self.actStart.setEnabled(False)
self.actStop.setEnabled(True)
def _stopTriggered(self):
self.stopRecording()
self.actStart.setEnabled(True)
self.actStop.setEnabled(False)
def _setDir(self):
tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,
caption="Select recording target directory",
dir=self._directory)
if tdir != "" and tdir is not None:
self._directory = str(Path(tdir).absolute())
self._directoryLabel.setText(self._directory)
def _supportedFeaturesChanged(self, featureset):
if len(featureset) > 0 and not self.actSetDir.isEnabled():
self.actStart.setEnabled(True)
self.actStop.setEnabled(False)
self.actSetDir.setEnabled(True)
self._statusLabel.setText("inactive")
elif len(featureset) == 0 and self.actSetDir.isEnabled():
self.actStart.setEnabled(False)
self.actStop.setEnabled(False)
self.actSetDir.setEnabled(False)
self._statusLabel.setText("(disabled)")
def _onUpdateStatus(self, _, file, length, bytesWritten):
lines = self._statusLabel.text().split("\n")
if length < 0:
length = None
if bytesWritten < 0:
bytesWritten = None
updated = False
if bytesWritten is None:
bw = "??"
elif bytesWritten < 1024:
bw = f"{bytesWritten:3d} bytes"
elif bytesWritten < 1024*1024:
bw = f"{bytesWritten/1024:.1f} kb"
elif bytesWritten < 1024*1024*1024:
bw = f"{bytesWritten/1024/1024:.1f} Mb"
else:
bw = f"{bytesWritten/1024/1024/1024:.1f} Gb"
if length is None:
sl = "?? s"
elif length < 60:
sl = f"{length:.1f} sec"
else:
sl = f"{length/60:.1f} min"
bytesAv = QStorageInfo(file).bytesAvailable()
if length is not None and bytesWritten is not None and bytesAv >= 0 and bytesWritten > 0:
timeAv = length*bytesAv/bytesWritten - length
if timeAv < 60:
av = f"{timeAv:.1f} sec"
elif timeAv < 3600:
av = f"{timeAv/60:.1f} min"
else:
av = "> 1 hour"
else:
av = "?? s"
if length is not None or bytesWritten is not None:
newl = Path(file).name + ": " + sl + " | " + bw + " R: " + av
else:
newl = None
if newl is not None:
for i, l in enumerate(lines):
if l.startswith(Path(file).name + ":"):
updated = True
lines[i] = newl
break
if not updated:
lines.append(newl)
if lines[0] == "inactive":
lines = lines[1:]
else:
toDel = None
for i, l in enumerate(lines):
if l.startswith(Path(file).name + ":"):
toDel = i
break
if toDel is not None:
lines = lines[:toDel] + lines[toDel+1:]
if len(lines) == 0:
lines.append("inactive")
self._statusLabel.setText("\n".join(lines))
def _onNotifyError(self, originFilter, errorDesc):
lines = self._statusLabel.text().split("\n")
newl = originFilter.objectName() + ": " + "ERROR: " + errorDesc
updated = False
for i, l in enumerate(lines):
if l.startswith(originFilter.objectName() + ":"):
updated = True
lines[i] = newl
break
if not updated:
lines.append(newl)
if lines[0] == "inactive":
lines = lines[1:]
self._statusLabel.setText("\n".join(lines))
def _defineProperties(self):
propertyCollection = self._config.guiState()
propertyCollection.defineProperty("RecordingControl_directory",
str(Path('.').absolute()),
"Target directory for recordings")
def _saveState(self):
"""
Saves the state of the playback control
:return:
"""
assertMainThread()
self._defineProperties()
propertyCollection = self._config.guiState()
try:
propertyCollection.setProperty("RecordingControl_directory", self._directory)
except PropertyCollectionPropertyNotFound:
pass
def _restoreState(self):
"""
Restores the state of the playback control from the given property collection
:return:
"""
assertMainThread()
self._defineProperties()
propertyCollection = self._config.guiState()
logger.debug("before restore dir=%s", self._directory)
d = propertyCollection.getProperty("RecordingControl_directory")
if Path(d).exists():
self._directory = d
self._directoryLabel.setText(self._directory)
logger.debug("after restore dir=%s", self._directory)
|
normal
|
{
"blob_id": "3e4771d074218fb0a77332ee61a4cc49f1c301b7",
"index": 9356,
"step-1": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n <mask token>\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n <mask token>\n <mask token>\n <mask token>\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n <mask token>\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText('inactive')\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText('(disabled)')\n <mask token>\n <mask token>\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n <mask token>\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug('before restore dir=%s', self._directory)\n d = propertyCollection.getProperty('RecordingControl_directory')\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug('after restore dir=%s', self._directory)\n",
"step-3": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n <mask token>\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText('inactive')\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText('(disabled)')\n <mask token>\n <mask token>\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n\n def _saveState(self):\n \"\"\"\n Saves the state of the playback control\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n try:\n propertyCollection.setProperty('RecordingControl_directory',\n self._directory)\n except PropertyCollectionPropertyNotFound:\n pass\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug('before restore dir=%s', self._directory)\n d = propertyCollection.getProperty('RecordingControl_directory')\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug('after restore dir=%s', self._directory)\n",
"step-4": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n \"\"\"\n This service implements a GUI frontend for the recording service\n \"\"\"\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText('inactive')\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText('(disabled)')\n\n def _onUpdateStatus(self, _, file, length, bytesWritten):\n lines = self._statusLabel.text().split('\\n')\n if length < 0:\n length = None\n if bytesWritten < 0:\n bytesWritten = None\n updated = False\n if bytesWritten is None:\n bw = '??'\n elif bytesWritten < 1024:\n bw = f'{bytesWritten:3d} bytes'\n elif bytesWritten < 1024 * 1024:\n bw = f'{bytesWritten / 1024:.1f} kb'\n elif bytesWritten < 1024 * 1024 * 1024:\n bw = f'{bytesWritten / 1024 / 1024:.1f} Mb'\n else:\n bw = f'{bytesWritten / 1024 / 1024 / 1024:.1f} Gb'\n if length is None:\n sl = '?? s'\n elif length < 60:\n sl = f'{length:.1f} sec'\n else:\n sl = f'{length / 60:.1f} min'\n bytesAv = QStorageInfo(file).bytesAvailable()\n if (length is not None and bytesWritten is not None and bytesAv >= \n 0 and bytesWritten > 0):\n timeAv = length * bytesAv / bytesWritten - length\n if timeAv < 60:\n av = f'{timeAv:.1f} sec'\n elif timeAv < 3600:\n av = f'{timeAv / 60:.1f} min'\n else:\n av = '> 1 hour'\n else:\n av = '?? s'\n if length is not None or bytesWritten is not None:\n newl = Path(file).name + ': ' + sl + ' | ' + bw + ' R: ' + av\n else:\n newl = None\n if newl is not None:\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + ':'):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == 'inactive':\n lines = lines[1:]\n else:\n toDel = None\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + ':'):\n toDel = i\n break\n if toDel is not None:\n lines = lines[:toDel] + lines[toDel + 1:]\n if len(lines) == 0:\n lines.append('inactive')\n self._statusLabel.setText('\\n'.join(lines))\n\n def _onNotifyError(self, originFilter, errorDesc):\n lines = self._statusLabel.text().split('\\n')\n newl = originFilter.objectName() + ': ' + 'ERROR: ' + errorDesc\n updated = False\n for i, l in enumerate(lines):\n if l.startswith(originFilter.objectName() + ':'):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == 'inactive':\n lines = lines[1:]\n self._statusLabel.setText('\\n'.join(lines))\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n\n def _saveState(self):\n \"\"\"\n Saves the state of the playback control\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n try:\n propertyCollection.setProperty('RecordingControl_directory',\n self._directory)\n except PropertyCollectionPropertyNotFound:\n pass\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug('before restore dir=%s', self._directory)\n d = propertyCollection.getProperty('RecordingControl_directory')\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug('after restore dir=%s', self._directory)\n",
"step-5": "# SPDX-License-Identifier: Apache-2.0\n# Copyright (C) 2020 ifm electronic gmbh\n#\n# THE PROGRAM IS PROVIDED \"AS IS\" WITHOUT WARRANTY OF ANY KIND.\n#\n\n\"\"\"\nThis module provides the recording control GUI service for the nexxT framework.\n\"\"\"\n\nimport logging\nfrom pathlib import Path\nfrom nexxT.Qt.QtCore import Qt, QStorageInfo\nfrom nexxT.Qt.QtGui import QAction, QIcon, QTextOption\nfrom nexxT.Qt.QtWidgets import QApplication, QStyle, QWidget, QBoxLayout, QToolBar, QFileDialog\nfrom nexxT.core.Utils import assertMainThread, ElidedLabel\nfrom nexxT.core.Exceptions import PropertyCollectionPropertyNotFound\nfrom nexxT.interface import Services\nfrom nexxT.services.SrvRecordingControl import MVCRecordingControlBase\n\nlogger = logging.getLogger(__name__)\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n \"\"\"\n This service implements a GUI frontend for the recording service\n \"\"\"\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n\n # state\n self._directory = str(Path('.').absolute())\n\n # gui\n srv = Services.getService(\"MainWindow\")\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu(\"&Recording\")\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme(\"media-record\", QIcon(\":icons/media-record.svg\")),\n \"Start Recording\", self)\n self.actStop = QAction(QIcon.fromTheme(\"media-playback-stop\", style.standardIcon(QStyle.SP_MediaStop)),\n \"Stop Recording\", self)\n self.actSetDir = QAction(QIcon.fromTheme(\"document-open-folder\", style.standardIcon(QStyle.SP_DirIcon)),\n \"Choose directory ...\", self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n\n self.dockWidget = srv.newDockWidget(\"RecordingControl\", None, Qt.LeftDockWidgetArea,\n defaultLoc=\"PlaybackControl\")\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents)\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n\n self._directoryLabel = ElidedLabel(self._directory, parent=self.dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n\n self._statusLabel = ElidedLabel(\"(disabled)\", parent=self.dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n #toolLayout.addStretch(100)\n\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption=\"Select recording target directory\",\n dir=self._directory)\n if tdir != \"\" and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText(\"inactive\")\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText(\"(disabled)\")\n\n def _onUpdateStatus(self, _, file, length, bytesWritten):\n lines = self._statusLabel.text().split(\"\\n\")\n if length < 0:\n length = None\n if bytesWritten < 0:\n bytesWritten = None\n updated = False\n\n if bytesWritten is None:\n bw = \"??\"\n elif bytesWritten < 1024:\n bw = f\"{bytesWritten:3d} bytes\"\n elif bytesWritten < 1024*1024:\n bw = f\"{bytesWritten/1024:.1f} kb\"\n elif bytesWritten < 1024*1024*1024:\n bw = f\"{bytesWritten/1024/1024:.1f} Mb\"\n else:\n bw = f\"{bytesWritten/1024/1024/1024:.1f} Gb\"\n\n if length is None:\n sl = \"?? s\"\n elif length < 60:\n sl = f\"{length:.1f} sec\"\n else:\n sl = f\"{length/60:.1f} min\"\n\n bytesAv = QStorageInfo(file).bytesAvailable()\n if length is not None and bytesWritten is not None and bytesAv >= 0 and bytesWritten > 0:\n timeAv = length*bytesAv/bytesWritten - length\n if timeAv < 60:\n av = f\"{timeAv:.1f} sec\"\n elif timeAv < 3600:\n av = f\"{timeAv/60:.1f} min\"\n else:\n av = \"> 1 hour\"\n else:\n av = \"?? s\"\n\n if length is not None or bytesWritten is not None:\n newl = Path(file).name + \": \" + sl + \" | \" + bw + \" R: \" + av\n else:\n newl = None\n\n if newl is not None:\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + \":\"):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == \"inactive\":\n lines = lines[1:]\n else:\n toDel = None\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + \":\"):\n toDel = i\n break\n if toDel is not None:\n lines = lines[:toDel] + lines[toDel+1:]\n if len(lines) == 0:\n lines.append(\"inactive\")\n\n self._statusLabel.setText(\"\\n\".join(lines))\n\n def _onNotifyError(self, originFilter, errorDesc):\n lines = self._statusLabel.text().split(\"\\n\")\n newl = originFilter.objectName() + \": \" + \"ERROR: \" + errorDesc\n updated = False\n for i, l in enumerate(lines):\n if l.startswith(originFilter.objectName() + \":\"):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == \"inactive\":\n lines = lines[1:]\n self._statusLabel.setText(\"\\n\".join(lines))\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty(\"RecordingControl_directory\",\n str(Path('.').absolute()),\n \"Target directory for recordings\")\n\n\n def _saveState(self):\n \"\"\"\n Saves the state of the playback control\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n try:\n propertyCollection.setProperty(\"RecordingControl_directory\", self._directory)\n except PropertyCollectionPropertyNotFound:\n pass\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug(\"before restore dir=%s\", self._directory)\n d = propertyCollection.getProperty(\"RecordingControl_directory\")\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug(\"after restore dir=%s\", self._directory)\n",
"step-ids": [
6,
8,
9,
12,
15
]
}
|
[
6,
8,
9,
12,
15
] |
class Solution:
def validIPAddress(self, IP):
"""
:type IP: str
:rtype: str
"""
def validateIPv4(IP):
digits = IP.split('.')
if len(digits) != 4:
return False
for digitstr in digits:
if len(digitstr) > 3 or len(digitstr) <= 0:
return False
try:
digit = int(digitstr)
except:
return False
# check range
if digit > 255 or digit < 0:
return False
# check leading 0s
if len(str(digit)) != len(digitstr):
return False
return True
def validateIPv6(IP):
hexDigits = IP.split(':')
if len(hexDigits) != 8:
return False
for hexDigitStr in hexDigits:
if len(hexDigitStr) > 4 or len(hexDigitStr) <= 0:
return False
for char in hexDigitStr:
# check hexadecimal digit
try:
int(char)
except:
if ord(char.lower()) - ord('a') < 0 or \
ord(char.lower()) - ord('a') > 5:
return False
return True
if validateIPv4(IP):
return 'IPv4'
elif validateIPv6(IP):
return 'IPv6'
else:
return 'Neither'
# print(Solution().validIPAddress("172.16.254.1"))
# print(Solution().validIPAddress("2001:0db8:85a3:0:0:8A2E:0370:7334"))
# print(Solution().validIPAddress("256.256.256.256"))
# print(Solution().validIPAddress("172.16.254.01"))
# print(Solution().validIPAddress("2001:db8:85a3:0:0:8A2E:0370:7334"))
# print(Solution().validIPAddress("2001:0db8:85a3::8A2E:0370:7334"))
# print(Solution().validIPAddress("10:0df8:85a3:0:0:8a2e:037:7334"))
# print(Solution().validIPAddress("120.25.2.10"))
|
normal
|
{
"blob_id": "6216a5e45fee8ade5ec9072c42c1b08f3b0f4c65",
"index": 2433,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def validIPAddress(self, IP):\n \"\"\"\n :type IP: str\n :rtype: str\n \"\"\"\n\n def validateIPv4(IP):\n digits = IP.split('.')\n if len(digits) != 4:\n return False\n for digitstr in digits:\n if len(digitstr) > 3 or len(digitstr) <= 0:\n return False\n try:\n digit = int(digitstr)\n except:\n return False\n if digit > 255 or digit < 0:\n return False\n if len(str(digit)) != len(digitstr):\n return False\n return True\n\n def validateIPv6(IP):\n hexDigits = IP.split(':')\n if len(hexDigits) != 8:\n return False\n for hexDigitStr in hexDigits:\n if len(hexDigitStr) > 4 or len(hexDigitStr) <= 0:\n return False\n for char in hexDigitStr:\n try:\n int(char)\n except:\n if ord(char.lower()) - ord('a') < 0 or ord(char.lower()\n ) - ord('a') > 5:\n return False\n return True\n if validateIPv4(IP):\n return 'IPv4'\n elif validateIPv6(IP):\n return 'IPv6'\n else:\n return 'Neither'\n",
"step-4": "class Solution:\n def validIPAddress(self, IP):\n \"\"\"\n :type IP: str\n :rtype: str\n \"\"\"\n \n def validateIPv4(IP):\n digits = IP.split('.')\n if len(digits) != 4:\n return False\n for digitstr in digits:\n if len(digitstr) > 3 or len(digitstr) <= 0:\n return False\n try: \n digit = int(digitstr)\n except: \n return False\n # check range\n if digit > 255 or digit < 0:\n return False\n # check leading 0s\n if len(str(digit)) != len(digitstr):\n return False\n return True\n \n def validateIPv6(IP):\n hexDigits = IP.split(':')\n if len(hexDigits) != 8:\n return False\n for hexDigitStr in hexDigits:\n if len(hexDigitStr) > 4 or len(hexDigitStr) <= 0:\n return False\n\n for char in hexDigitStr:\n # check hexadecimal digit\n try:\n int(char)\n except:\n if ord(char.lower()) - ord('a') < 0 or \\\n ord(char.lower()) - ord('a') > 5:\n return False\n return True\n\n if validateIPv4(IP):\n return 'IPv4'\n elif validateIPv6(IP):\n return 'IPv6'\n else:\n return 'Neither'\n\n# print(Solution().validIPAddress(\"172.16.254.1\"))\n# print(Solution().validIPAddress(\"2001:0db8:85a3:0:0:8A2E:0370:7334\"))\n# print(Solution().validIPAddress(\"256.256.256.256\"))\n# print(Solution().validIPAddress(\"172.16.254.01\"))\n# print(Solution().validIPAddress(\"2001:db8:85a3:0:0:8A2E:0370:7334\"))\n# print(Solution().validIPAddress(\"2001:0db8:85a3::8A2E:0370:7334\"))\n# print(Solution().validIPAddress(\"10:0df8:85a3:0:0:8a2e:037:7334\"))\n# print(Solution().validIPAddress(\"120.25.2.10\"))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
""" Problem statement:
https://leetcode.com/problems/contains-duplicate-ii/description/
Given an array of integers and an integer k, find out whether
there are two distinct indices i and j in the array such that nums[i] = nums[j]
and the absolute difference between i and j is at most k.
"""
class Solution:
def containsNearbyDuplicate(self, nums, k):
""" Time complexity: O(n). Space complexity: O(n), n is len(nums).
"""
nums_dict = dict() # integer: most recent index
for i, n in enumerate(nums):
if n in nums_dict and abs(nums_dict[n] - i) <= k:
return True
nums_dict[n] = i # update index of integer n in dictionary
return False
if __name__ == "__main__":
sol = Solution()
nums = [1, 2, 3, 4, 1, 6, 8]
k = 4
print(sol.containsNearbyDuplicate(nums, k))
|
normal
|
{
"blob_id": "33c241747062ab0d374082d2a8179335503fa212",
"index": 3320,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def containsNearbyDuplicate(self, nums, k):\n \"\"\" Time complexity: O(n). Space complexity: O(n), n is len(nums).\n \"\"\"\n nums_dict = dict()\n for i, n in enumerate(nums):\n if n in nums_dict and abs(nums_dict[n] - i) <= k:\n return True\n nums_dict[n] = i\n return False\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def containsNearbyDuplicate(self, nums, k):\n \"\"\" Time complexity: O(n). Space complexity: O(n), n is len(nums).\n \"\"\"\n nums_dict = dict()\n for i, n in enumerate(nums):\n if n in nums_dict and abs(nums_dict[n] - i) <= k:\n return True\n nums_dict[n] = i\n return False\n\n\nif __name__ == '__main__':\n sol = Solution()\n nums = [1, 2, 3, 4, 1, 6, 8]\n k = 4\n print(sol.containsNearbyDuplicate(nums, k))\n",
"step-5": "\"\"\" Problem statement:\nhttps://leetcode.com/problems/contains-duplicate-ii/description/\n\nGiven an array of integers and an integer k, find out whether\nthere are two distinct indices i and j in the array such that nums[i] = nums[j]\nand the absolute difference between i and j is at most k.\n\"\"\"\n\n\nclass Solution:\n def containsNearbyDuplicate(self, nums, k):\n \"\"\" Time complexity: O(n). Space complexity: O(n), n is len(nums).\n \"\"\"\n nums_dict = dict() # integer: most recent index\n for i, n in enumerate(nums):\n if n in nums_dict and abs(nums_dict[n] - i) <= k:\n return True\n nums_dict[n] = i # update index of integer n in dictionary\n return False\n\n\nif __name__ == \"__main__\":\n sol = Solution()\n nums = [1, 2, 3, 4, 1, 6, 8]\n k = 4\n print(sol.containsNearbyDuplicate(nums, k))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Figura:
def __init__(self):
print("Tworze obiekt klasy Figura...")
def pobierz_polozenie(self):
print("Metoda pobierz_polozenie klasy Figura.")
def nadaj_polozenie(self):
print("Metoda nadaj_polozenie klasy Figura.")
def wyswietl(self):
print("Metoda wyswietl klasy Figura.")
def wypelnij(self):
print("Metoda wypelnij klasy Figura.")
def nadaj_kolor(self):
print("Metoda nadaj_kolor klasy Figura.")
def usun(self):
print("Metoda usun klasy Figura.")
class Punkt(Figura):
def __init__(self):
print("Tworze obiekt klasy Punkt...")
def wyswietl(self):
print("Metoda wyswietl klasy Punkt.")
def wypelnij(self):
print("Metoda wypelnij klasy Punkt.")
def usun(self):
print("Metoda usun klasy Punkt.")
class Linia(Figura):
def __init__(self):
print("Tworze obiekt klasy Linia...")
def wyswietl(self):
print("Metoda wyswietl klasy Linia.")
def wypelnij(self):
print("Metoda wypelnij klasy Linia.")
def usun(self):
print("Metoda usun klasy Linia.")
class Kwadrat(Figura):
def __init__(self):
print("Tworze obiekt klasy Kwadrat...")
def wyswietl(self):
print("Metoda wyswietl klasy Kwadrat.")
def wypelnij(self):
print("Metoda wypelnij klasy Kwadrat.")
def usun(self):
print("Metoda usun klasy Kwadrat.")
class XXOkrag:
def __init__(self):
print("Tworze obiekt klasy XXOkrag...")
def wyswietlaj(self):
print("Metoda wyswietlaj klasy XXOkrag.")
def wypelniaj(self):
print("Metoda wypelniaj klasy XXOkrag.")
def usuwaj(self):
print("Metoda usuwaj klasy XXOkrag.")
def pobierz_polozenie(self):
print("Metoda pobierz_polozenie klasy XXOkrag.")
def nadaj_polozenie(self):
print("Metoda nadaj_polozenie klasy XXOkrag.")
def ustaw_kolor(self):
print("Metoda ustaw_kolor klasy XXOkrag.")
class Okrag(Figura):
def __init__(self):
self.xokrag = XXOkrag()
def pobierz_polozenie(self):
self.xokrag.pobierz_polozenie()
def nadaj_polozenie(self):
self.xokrag.nadaj_polozenie()
def wyswietl(self):
self.xokrag.wyswietlaj()
def wypelnij(self):
self.xokrag.wypelniaj()
def nadaj_kolor(self):
self.xokrag.ustaw_kolor()
def usun(self):
self.xokrag.usuwaj()
if __name__ == "__main__":
lista_figur = [Linia(), Kwadrat(), Okrag()]
for fig in lista_figur:
fig.wyswietl()
|
normal
|
{
"blob_id": "774bf2b49f6e546f16294edc17e9ac34fa8a9ba8",
"index": 2711,
"step-1": "<mask token>\n\n\nclass Punkt(Figura):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Punkt(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Punkt...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Punkt.')\n <mask token>\n\n def usun(self):\n print('Metoda usun klasy Punkt.')\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Punkt(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Punkt...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Punkt.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Punkt.')\n\n def usun(self):\n print('Metoda usun klasy Punkt.')\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-4": "class Figura:\n <mask token>\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy Figura.')\n <mask token>\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Figura.')\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Punkt(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Punkt...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Punkt.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Punkt.')\n\n def usun(self):\n print('Metoda usun klasy Punkt.')\n\n\nclass Linia(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Linia...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Linia.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Linia.')\n\n def usun(self):\n print('Metoda usun klasy Linia.')\n\n\nclass Kwadrat(Figura):\n\n def __init__(self):\n print('Tworze obiekt klasy Kwadrat...')\n\n def wyswietl(self):\n print('Metoda wyswietl klasy Kwadrat.')\n\n def wypelnij(self):\n print('Metoda wypelnij klasy Kwadrat.')\n\n def usun(self):\n print('Metoda usun klasy Kwadrat.')\n\n\nclass XXOkrag:\n\n def __init__(self):\n print('Tworze obiekt klasy XXOkrag...')\n\n def wyswietlaj(self):\n print('Metoda wyswietlaj klasy XXOkrag.')\n\n def wypelniaj(self):\n print('Metoda wypelniaj klasy XXOkrag.')\n\n def usuwaj(self):\n print('Metoda usuwaj klasy XXOkrag.')\n\n def pobierz_polozenie(self):\n print('Metoda pobierz_polozenie klasy XXOkrag.')\n\n def nadaj_polozenie(self):\n print('Metoda nadaj_polozenie klasy XXOkrag.')\n\n def ustaw_kolor(self):\n print('Metoda ustaw_kolor klasy XXOkrag.')\n\n\nclass Okrag(Figura):\n\n def __init__(self):\n self.xokrag = XXOkrag()\n\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n\n def wypelnij(self):\n self.xokrag.wypelniaj()\n\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n\n def usun(self):\n self.xokrag.usuwaj()\n\n\n<mask token>\n",
"step-5": "class Figura:\n def __init__(self):\n print(\"Tworze obiekt klasy Figura...\")\n def pobierz_polozenie(self):\n print(\"Metoda pobierz_polozenie klasy Figura.\")\n def nadaj_polozenie(self):\n print(\"Metoda nadaj_polozenie klasy Figura.\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Figura.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Figura.\")\n def nadaj_kolor(self):\n print(\"Metoda nadaj_kolor klasy Figura.\")\n def usun(self):\n print(\"Metoda usun klasy Figura.\")\n\nclass Punkt(Figura):\n def __init__(self):\n print(\"Tworze obiekt klasy Punkt...\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Punkt.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Punkt.\")\n def usun(self):\n print(\"Metoda usun klasy Punkt.\")\n\nclass Linia(Figura):\n def __init__(self):\n print(\"Tworze obiekt klasy Linia...\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Linia.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Linia.\")\n def usun(self):\n print(\"Metoda usun klasy Linia.\")\n\nclass Kwadrat(Figura):\n def __init__(self):\n print(\"Tworze obiekt klasy Kwadrat...\")\n def wyswietl(self):\n print(\"Metoda wyswietl klasy Kwadrat.\")\n def wypelnij(self):\n print(\"Metoda wypelnij klasy Kwadrat.\")\n def usun(self):\n print(\"Metoda usun klasy Kwadrat.\")\n\nclass XXOkrag:\n def __init__(self):\n print(\"Tworze obiekt klasy XXOkrag...\")\n def wyswietlaj(self):\n print(\"Metoda wyswietlaj klasy XXOkrag.\")\n def wypelniaj(self):\n print(\"Metoda wypelniaj klasy XXOkrag.\")\n def usuwaj(self):\n print(\"Metoda usuwaj klasy XXOkrag.\")\n def pobierz_polozenie(self):\n print(\"Metoda pobierz_polozenie klasy XXOkrag.\")\n def nadaj_polozenie(self):\n print(\"Metoda nadaj_polozenie klasy XXOkrag.\")\n def ustaw_kolor(self):\n print(\"Metoda ustaw_kolor klasy XXOkrag.\")\n\nclass Okrag(Figura):\n def __init__(self):\n self.xokrag = XXOkrag()\n def pobierz_polozenie(self):\n self.xokrag.pobierz_polozenie()\n def nadaj_polozenie(self):\n self.xokrag.nadaj_polozenie()\n def wyswietl(self):\n self.xokrag.wyswietlaj()\n def wypelnij(self):\n self.xokrag.wypelniaj()\n def nadaj_kolor(self):\n self.xokrag.ustaw_kolor()\n def usun(self):\n self.xokrag.usuwaj()\n\nif __name__ == \"__main__\":\n\n lista_figur = [Linia(), Kwadrat(), Okrag()]\n\n for fig in lista_figur:\n fig.wyswietl()\n",
"step-ids": [
27,
30,
31,
34,
41
]
}
|
[
27,
30,
31,
34,
41
] |
try:
from zcrmsdk.src.com.zoho.crm.api.dc.data_center import DataCenter
except Exception as e:
from .data_center import DataCenter
class EUDataCenter(DataCenter):
"""
This class represents the properties of Zoho CRM in EU Domain.
"""
@classmethod
def PRODUCTION(cls):
"""
This method represents the Zoho CRM Production environment in EU domain
:return: An instance of Environments
"""
return DataCenter.Environment("https://www.zohoapis.eu", cls().get_iam_url(), cls().get_file_upload_url())
@classmethod
def SANDBOX(cls):
"""
This method represents the Zoho CRM Sandbox environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment("https://sandbox.zohoapis.eu", cls().get_iam_url(), cls().get_file_upload_url())
@classmethod
def DEVELOPER(cls):
"""
This method represents the Zoho CRM Developer environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment("https://developer.zohoapis.eu", cls().get_iam_url(), cls().get_file_upload_url())
def get_iam_url(self):
return "https://accounts.zoho.eu/oauth/v2/token"
def get_file_upload_url(self):
return "https://content.zohoapis.eu"
|
normal
|
{
"blob_id": "27c364ccf4a6703f74c95ebb386f8ced38b1eafd",
"index": 4960,
"step-1": "<mask token>\n\n\nclass EUDataCenter(DataCenter):\n <mask token>\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass EUDataCenter(DataCenter):\n <mask token>\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return 'https://accounts.zoho.eu/oauth/v2/token'\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass EUDataCenter(DataCenter):\n \"\"\"\n This class represents the properties of Zoho CRM in EU Domain.\n \"\"\"\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return 'https://accounts.zoho.eu/oauth/v2/token'\n\n def get_file_upload_url(self):\n return 'https://content.zohoapis.eu'\n",
"step-4": "try:\n from zcrmsdk.src.com.zoho.crm.api.dc.data_center import DataCenter\nexcept Exception as e:\n from .data_center import DataCenter\n\n\nclass EUDataCenter(DataCenter):\n \"\"\"\n This class represents the properties of Zoho CRM in EU Domain.\n \"\"\"\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return 'https://accounts.zoho.eu/oauth/v2/token'\n\n def get_file_upload_url(self):\n return 'https://content.zohoapis.eu'\n",
"step-5": "try:\n from zcrmsdk.src.com.zoho.crm.api.dc.data_center import DataCenter\nexcept Exception as e:\n from .data_center import DataCenter\n\n\nclass EUDataCenter(DataCenter):\n\n \"\"\"\n This class represents the properties of Zoho CRM in EU Domain.\n \"\"\"\n\n @classmethod\n def PRODUCTION(cls):\n\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n\n return DataCenter.Environment(\"https://www.zohoapis.eu\", cls().get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n\n return DataCenter.Environment(\"https://sandbox.zohoapis.eu\", cls().get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n\n return DataCenter.Environment(\"https://developer.zohoapis.eu\", cls().get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return \"https://accounts.zoho.eu/oauth/v2/token\"\n\n def get_file_upload_url(self):\n return \"https://content.zohoapis.eu\"\n",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
cadena = input("Introduzca su cadena: ")
separador = input("Introduzca el separador: ")
print(cadena.replace(" ", separador))
|
normal
|
{
"blob_id": "290b8b4c3aeafc84b1e9cce7e6d2a5e770bd8716",
"index": 3444,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(cadena.replace(' ', separador))\n",
"step-3": "cadena = input('Introduzca su cadena: ')\nseparador = input('Introduzca el separador: ')\nprint(cadena.replace(' ', separador))\n",
"step-4": "cadena = input(\"Introduzca su cadena: \")\nseparador = input(\"Introduzca el separador: \")\nprint(cadena.replace(\" \", separador))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from flask import Blueprint, request, jsonify
from to_dict import *
from validacao import *
import sqlite3
from migration import conectar, create_database
from contextlib import closing
aluno = Blueprint("aluno", __name__)
@aluno.route("/hello")
def hello():
return "Hello, aluno"
@aluno.route("/reseta", methods = ["POST"])
def reseta():
sqlaluno = """DELETE FROM aluno"""
sqldisciplina = """DELETE FROM disciplina"""
sqlprofessor = """DELETE FROM professor"""
with closing(conectar()) as conn, closing(conn.cursor()) as cursor:
cursor.execute(sqlaluno)
cursor.execute(sqldisciplina)
cursor.execute(sqlprofessor)
conn.commit()
return jsonify({'sucess': 'reset efetuado com suceso'}), 200
@aluno.route("/alunos", methods = ["GET"])
def alunos_retorna_lista():
sql = """SELECT * FROM aluno"""
resultados = []
with closing(conectar()) as conn, closing(conn.cursor()) as cursor:
cursor.execute(sql)
linhas = cursor.fetchall()
for id, nome in linhas:
resultados.append({"id": id, "nome": nome})
return jsonify(resultados), 200
#return jsonify(alunos), 200
@aluno.route('/alunos/<int:id>', methods = ["GET"])
def aluno_por_id(id):
sql = "SELECT id, nome FROM aluno WHERE id = ?"
with closing(conectar()) as conn, closing(conn.cursor()) as cursor:
cursor.execute(sql, (id, ))
r = cursor.fetchone()
if r == None: return None
return {"id": r[0], "nome": r[1]}
@aluno.route("/alunos", methods = ["POST"])
def adiciona_alunos():
dados = request.get_json()
params = (dados['nome'],)
sql = "INSERT INTO aluno (nome) VALUES (?)"
with closing(conectar()) as conn, closing(conn.cursor()) as cursor:
cursor.execute(sql, (params))
conn.commit()
return jsonify(cursor.lastrowid)
@aluno.route("/alunos/<int:id>", methods = ["PUT"])
def editar_aluno(id):
dados = request.get_json()
params = (dados['nome'], id)
sql = "UPDATE aluno SET nome = ? WHERE id = ?"
with closing(conectar()) as conn, closing(conn.cursor()) as cursor:
cursor.execute(sql, (params))
conn.commit()
return jsonify(dados['nome']), 200
# for aluno in alunos:
# if aluno['id'] == id:
# aluno['nome'] = request.get_json().get('nome')
# return jsonify(aluno), 200
# return jsonify({'erro': 'aluno não encontrado'}), 404
@aluno.route("/alunos/<int:id>", methods = ["DELETE"])
def deletar_aluno(id):
params = (id,)
sql = "DELETE FROM aluno WHERE id = ?"
with closing(conectar()) as conn, closing(conn.cursor()) as cursor:
cursor.execute(sql, (params))
conn.commit()
return jsonify(id), 200
|
normal
|
{
"blob_id": "5068336ca1a180e09a7efd41eea596cdcebb33ae",
"index": 5586,
"step-1": "<mask token>\n\n\[email protected]('/hello')\ndef hello():\n return 'Hello, aluno'\n\n\[email protected]('/reseta', methods=['POST'])\ndef reseta():\n sqlaluno = 'DELETE FROM aluno'\n sqldisciplina = 'DELETE FROM disciplina'\n sqlprofessor = 'DELETE FROM professor'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sqlaluno)\n cursor.execute(sqldisciplina)\n cursor.execute(sqlprofessor)\n conn.commit()\n return jsonify({'sucess': 'reset efetuado com suceso'}), 200\n\n\[email protected]('/alunos', methods=['GET'])\ndef alunos_retorna_lista():\n sql = 'SELECT * FROM aluno'\n resultados = []\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql)\n linhas = cursor.fetchall()\n for id, nome in linhas:\n resultados.append({'id': id, 'nome': nome})\n return jsonify(resultados), 200\n\n\n<mask token>\n\n\[email protected]('/alunos', methods=['POST'])\ndef adiciona_alunos():\n dados = request.get_json()\n params = dados['nome'],\n sql = 'INSERT INTO aluno (nome) VALUES (?)'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(cursor.lastrowid)\n\n\[email protected]('/alunos/<int:id>', methods=['PUT'])\ndef editar_aluno(id):\n dados = request.get_json()\n params = dados['nome'], id\n sql = 'UPDATE aluno SET nome = ? WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(dados['nome']), 200\n\n\[email protected]('/alunos/<int:id>', methods=['DELETE'])\ndef deletar_aluno(id):\n params = id,\n sql = 'DELETE FROM aluno WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(id), 200\n",
"step-2": "<mask token>\n\n\[email protected]('/hello')\ndef hello():\n return 'Hello, aluno'\n\n\[email protected]('/reseta', methods=['POST'])\ndef reseta():\n sqlaluno = 'DELETE FROM aluno'\n sqldisciplina = 'DELETE FROM disciplina'\n sqlprofessor = 'DELETE FROM professor'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sqlaluno)\n cursor.execute(sqldisciplina)\n cursor.execute(sqlprofessor)\n conn.commit()\n return jsonify({'sucess': 'reset efetuado com suceso'}), 200\n\n\[email protected]('/alunos', methods=['GET'])\ndef alunos_retorna_lista():\n sql = 'SELECT * FROM aluno'\n resultados = []\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql)\n linhas = cursor.fetchall()\n for id, nome in linhas:\n resultados.append({'id': id, 'nome': nome})\n return jsonify(resultados), 200\n\n\[email protected]('/alunos/<int:id>', methods=['GET'])\ndef aluno_por_id(id):\n sql = 'SELECT id, nome FROM aluno WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, (id,))\n r = cursor.fetchone()\n if r == None:\n return None\n return {'id': r[0], 'nome': r[1]}\n\n\[email protected]('/alunos', methods=['POST'])\ndef adiciona_alunos():\n dados = request.get_json()\n params = dados['nome'],\n sql = 'INSERT INTO aluno (nome) VALUES (?)'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(cursor.lastrowid)\n\n\[email protected]('/alunos/<int:id>', methods=['PUT'])\ndef editar_aluno(id):\n dados = request.get_json()\n params = dados['nome'], id\n sql = 'UPDATE aluno SET nome = ? WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(dados['nome']), 200\n\n\[email protected]('/alunos/<int:id>', methods=['DELETE'])\ndef deletar_aluno(id):\n params = id,\n sql = 'DELETE FROM aluno WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(id), 200\n",
"step-3": "<mask token>\naluno = Blueprint('aluno', __name__)\n\n\[email protected]('/hello')\ndef hello():\n return 'Hello, aluno'\n\n\[email protected]('/reseta', methods=['POST'])\ndef reseta():\n sqlaluno = 'DELETE FROM aluno'\n sqldisciplina = 'DELETE FROM disciplina'\n sqlprofessor = 'DELETE FROM professor'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sqlaluno)\n cursor.execute(sqldisciplina)\n cursor.execute(sqlprofessor)\n conn.commit()\n return jsonify({'sucess': 'reset efetuado com suceso'}), 200\n\n\[email protected]('/alunos', methods=['GET'])\ndef alunos_retorna_lista():\n sql = 'SELECT * FROM aluno'\n resultados = []\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql)\n linhas = cursor.fetchall()\n for id, nome in linhas:\n resultados.append({'id': id, 'nome': nome})\n return jsonify(resultados), 200\n\n\[email protected]('/alunos/<int:id>', methods=['GET'])\ndef aluno_por_id(id):\n sql = 'SELECT id, nome FROM aluno WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, (id,))\n r = cursor.fetchone()\n if r == None:\n return None\n return {'id': r[0], 'nome': r[1]}\n\n\[email protected]('/alunos', methods=['POST'])\ndef adiciona_alunos():\n dados = request.get_json()\n params = dados['nome'],\n sql = 'INSERT INTO aluno (nome) VALUES (?)'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(cursor.lastrowid)\n\n\[email protected]('/alunos/<int:id>', methods=['PUT'])\ndef editar_aluno(id):\n dados = request.get_json()\n params = dados['nome'], id\n sql = 'UPDATE aluno SET nome = ? WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(dados['nome']), 200\n\n\[email protected]('/alunos/<int:id>', methods=['DELETE'])\ndef deletar_aluno(id):\n params = id,\n sql = 'DELETE FROM aluno WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(id), 200\n",
"step-4": "from flask import Blueprint, request, jsonify\nfrom to_dict import *\nfrom validacao import *\nimport sqlite3\nfrom migration import conectar, create_database\nfrom contextlib import closing\naluno = Blueprint('aluno', __name__)\n\n\[email protected]('/hello')\ndef hello():\n return 'Hello, aluno'\n\n\[email protected]('/reseta', methods=['POST'])\ndef reseta():\n sqlaluno = 'DELETE FROM aluno'\n sqldisciplina = 'DELETE FROM disciplina'\n sqlprofessor = 'DELETE FROM professor'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sqlaluno)\n cursor.execute(sqldisciplina)\n cursor.execute(sqlprofessor)\n conn.commit()\n return jsonify({'sucess': 'reset efetuado com suceso'}), 200\n\n\[email protected]('/alunos', methods=['GET'])\ndef alunos_retorna_lista():\n sql = 'SELECT * FROM aluno'\n resultados = []\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql)\n linhas = cursor.fetchall()\n for id, nome in linhas:\n resultados.append({'id': id, 'nome': nome})\n return jsonify(resultados), 200\n\n\[email protected]('/alunos/<int:id>', methods=['GET'])\ndef aluno_por_id(id):\n sql = 'SELECT id, nome FROM aluno WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, (id,))\n r = cursor.fetchone()\n if r == None:\n return None\n return {'id': r[0], 'nome': r[1]}\n\n\[email protected]('/alunos', methods=['POST'])\ndef adiciona_alunos():\n dados = request.get_json()\n params = dados['nome'],\n sql = 'INSERT INTO aluno (nome) VALUES (?)'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(cursor.lastrowid)\n\n\[email protected]('/alunos/<int:id>', methods=['PUT'])\ndef editar_aluno(id):\n dados = request.get_json()\n params = dados['nome'], id\n sql = 'UPDATE aluno SET nome = ? WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(dados['nome']), 200\n\n\[email protected]('/alunos/<int:id>', methods=['DELETE'])\ndef deletar_aluno(id):\n params = id,\n sql = 'DELETE FROM aluno WHERE id = ?'\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, params)\n conn.commit()\n return jsonify(id), 200\n",
"step-5": "from flask import Blueprint, request, jsonify\nfrom to_dict import *\nfrom validacao import *\nimport sqlite3\nfrom migration import conectar, create_database\nfrom contextlib import closing\n\naluno = Blueprint(\"aluno\", __name__)\n\[email protected](\"/hello\")\ndef hello():\n return \"Hello, aluno\"\n\[email protected](\"/reseta\", methods = [\"POST\"])\ndef reseta():\n sqlaluno = \"\"\"DELETE FROM aluno\"\"\"\n sqldisciplina = \"\"\"DELETE FROM disciplina\"\"\"\n sqlprofessor = \"\"\"DELETE FROM professor\"\"\"\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sqlaluno)\n cursor.execute(sqldisciplina)\n cursor.execute(sqlprofessor)\n conn.commit()\n return jsonify({'sucess': 'reset efetuado com suceso'}), 200\n\[email protected](\"/alunos\", methods = [\"GET\"])\ndef alunos_retorna_lista():\n sql = \"\"\"SELECT * FROM aluno\"\"\"\n resultados = []\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql)\n linhas = cursor.fetchall()\n for id, nome in linhas:\n resultados.append({\"id\": id, \"nome\": nome})\n return jsonify(resultados), 200\n #return jsonify(alunos), 200\n\[email protected]('/alunos/<int:id>', methods = [\"GET\"])\ndef aluno_por_id(id):\n sql = \"SELECT id, nome FROM aluno WHERE id = ?\"\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, (id, ))\n r = cursor.fetchone()\n if r == None: return None\n return {\"id\": r[0], \"nome\": r[1]}\n\n\[email protected](\"/alunos\", methods = [\"POST\"])\ndef adiciona_alunos():\n dados = request.get_json()\n params = (dados['nome'],)\n sql = \"INSERT INTO aluno (nome) VALUES (?)\"\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, (params))\n conn.commit()\n return jsonify(cursor.lastrowid)\n \n\n\[email protected](\"/alunos/<int:id>\", methods = [\"PUT\"])\ndef editar_aluno(id):\n dados = request.get_json()\n params = (dados['nome'], id)\n sql = \"UPDATE aluno SET nome = ? WHERE id = ?\"\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, (params))\n conn.commit()\n return jsonify(dados['nome']), 200\n\n# for aluno in alunos:\n# if aluno['id'] == id:\n# aluno['nome'] = request.get_json().get('nome')\n# return jsonify(aluno), 200\n# return jsonify({'erro': 'aluno não encontrado'}), 404\n\[email protected](\"/alunos/<int:id>\", methods = [\"DELETE\"])\ndef deletar_aluno(id):\n params = (id,)\n sql = \"DELETE FROM aluno WHERE id = ?\"\n with closing(conectar()) as conn, closing(conn.cursor()) as cursor:\n cursor.execute(sql, (params))\n conn.commit()\n return jsonify(id), 200",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
class User:
account = []
def __init__(self,balance,int_rate):
self.balance = balance
self.int_rate = int_rate
User.account.append(self)
def dep(self,amount):
self.balance += amount
return self
def make_withdrawal(self,amount):
if(self.balance-amount) >= 0:
self.balance -= amount
else:
print("Insufficient funds:Charging a $5 fee")
self.balance -= 5
return self
def display_account_info(self):
print(self.balance) #print(f"Balance:{self.balance}")
return(self)
def yield_interest(self):
# self.balance+=(self.balance*self.int_rate)#times by a decimal gets you a smaller number
self.balance=self.balance+self.balance*self.int_rate
return(self)
@classmethod
def we_call_cls(cls):
for account in cls.account:
account.display_account_info()
class Jedi:
def __init__(self,name):
self.name = name #this means that its name is its name.
self.account = {
"Grey": User(5000,.3),
"light": User(300,.33)
}
prey=Jedi('prey')
print(prey.name)
prey.we_call_cls()
|
normal
|
{
"blob_id": "ff3f6d50498f58f3a340e2d690165efcc1a5fb1d",
"index": 6000,
"step-1": "class User:\n <mask token>\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n <mask token>\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\n<mask token>\n",
"step-2": "class User:\n account = []\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n\n def yield_interest(self):\n self.balance = self.balance + self.balance * self.int_rate\n return self\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\n<mask token>\n",
"step-3": "class User:\n account = []\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n\n def yield_interest(self):\n self.balance = self.balance + self.balance * self.int_rate\n return self\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\n<mask token>\nprint(prey.name)\nprey.we_call_cls()\n",
"step-4": "class User:\n account = []\n\n def __init__(self, balance, int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n\n def dep(self, amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self, amount):\n if self.balance - amount >= 0:\n self.balance -= amount\n else:\n print('Insufficient funds:Charging a $5 fee')\n self.balance -= 5\n return self\n\n def display_account_info(self):\n print(self.balance)\n return self\n\n def yield_interest(self):\n self.balance = self.balance + self.balance * self.int_rate\n return self\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n\n\nclass Jedi:\n\n def __init__(self, name):\n self.name = name\n self.account = {'Grey': User(5000, 0.3), 'light': User(300, 0.33)}\n\n\nprey = Jedi('prey')\nprint(prey.name)\nprey.we_call_cls()\n",
"step-5": "class User:\n account = []\n def __init__(self,balance,int_rate):\n self.balance = balance\n self.int_rate = int_rate\n User.account.append(self)\n def dep(self,amount):\n self.balance += amount\n return self\n\n def make_withdrawal(self,amount):\n if(self.balance-amount) >= 0:\n self.balance -= amount\n else:\n print(\"Insufficient funds:Charging a $5 fee\")\n self.balance -= 5\n return self\n \n def display_account_info(self):\n print(self.balance) #print(f\"Balance:{self.balance}\")\n return(self)\n \n def yield_interest(self):\n # self.balance+=(self.balance*self.int_rate)#times by a decimal gets you a smaller number\n self.balance=self.balance+self.balance*self.int_rate\n return(self)\n\n @classmethod\n def we_call_cls(cls):\n for account in cls.account:\n account.display_account_info()\n \n\nclass Jedi:\n def __init__(self,name):\n self.name = name #this means that its name is its name.\n self.account = {\n \"Grey\": User(5000,.3),\n \"light\": User(300,.33)\n }\n\nprey=Jedi('prey')\nprint(prey.name)\n\n\nprey.we_call_cls()\n\n\n\n",
"step-ids": [
8,
10,
11,
12,
13
]
}
|
[
8,
10,
11,
12,
13
] |
#!/usr/bin/env python
# coding: utf-8
# # Cabecera
# In[1]:
# -*- coding: utf-8 -*-
# ------------- Cantidad de segundos que has vivido -------------
# # Definición de variables
# In[2]:
# Definición de variables
anios = 30
dias_por_anio = 365
horas_por_dia = 24
segundos_por_hora = 60
# # Operación
# In[3]:
# Operación
print (anios * dias_por_anio * horas_por_dia * segundos_por_hora)
# In[ ]:
|
normal
|
{
"blob_id": "f153da7e4537f807f6c9d9d268a00443933d8315",
"index": 4167,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(anios * dias_por_anio * horas_por_dia * segundos_por_hora)\n",
"step-3": "anios = 30\ndias_por_anio = 365\nhoras_por_dia = 24\nsegundos_por_hora = 60\nprint(anios * dias_por_anio * horas_por_dia * segundos_por_hora)\n",
"step-4": "#!/usr/bin/env python\n# coding: utf-8\n\n# # Cabecera\n\n# In[1]:\n\n\n# -*- coding: utf-8 -*-\n\n# ------------- Cantidad de segundos que has vivido -------------\n\n\n# # Definición de variables\n\n# In[2]:\n\n\n# Definición de variables\nanios = 30\ndias_por_anio = 365\nhoras_por_dia = 24\nsegundos_por_hora = 60\n\n\n# # Operación\n\n# In[3]:\n\n\n# Operación\nprint (anios * dias_por_anio * horas_por_dia * segundos_por_hora)\n\n\n# In[ ]:\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import json
from faker import Faker
import random
fake = Faker()
from faker.providers import date_time
fake.add_provider(date_time)
class Hour(object):
def __init__(self):
self.dayOfTheWeek = fake.day_of_week()
self.openingTime = str(random.randint(1, 12)) + 'AM'
self.closingTime = str(random.randint(1, 12)) + 'PM'
|
normal
|
{
"blob_id": "e3386b01bb0bdc7064a2e3e9f3edce8a3231721b",
"index": 3664,
"step-1": "<mask token>\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-2": "<mask token>\nfake.add_provider(date_time)\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-3": "<mask token>\nfake = Faker()\n<mask token>\nfake.add_provider(date_time)\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-4": "import json\nfrom faker import Faker\nimport random\nfake = Faker()\nfrom faker.providers import date_time\nfake.add_provider(date_time)\n\n\nclass Hour(object):\n\n def __init__(self):\n self.dayOfTheWeek = fake.day_of_week()\n self.openingTime = str(random.randint(1, 12)) + 'AM'\n self.closingTime = str(random.randint(1, 12)) + 'PM'\n",
"step-5": null,
"step-ids": [
2,
3,
4,
5
]
}
|
[
2,
3,
4,
5
] |
from appium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from appium.webdriver.common.touch_action import TouchAction
import time
import re
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import pymongo
def getSize():
x = driver.get_window_size()['width']
y = driver.get_window_size()['height']
return (x, y)
'''
解释:int start x-开始滑动的x坐标,
int start y -开始滑动的y坐标。
int end x -结束点x坐标,
int end y -结束点y坐标。
duration 滑动时间(默认5毫秒);
'''
def swipeUp(t):
l = getSize()
x1 = int(l[0] * 0.5) #x坐标
y1 = int(l[1] * 0.75) #起始y坐标
y2 = int(l[1] * 0.25) #终点y坐标
driver.swipe(x1, y1, x1, y2,t)
def crawl():
while True:
items = wait.until(EC.presence_of_all_elements_located(
(By.XPATH,'/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.support.v4.view.ViewPager/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.view.ViewGroup/android.support.v7.widget.RecyclerView' )))
swipeUp(1500)
for item in items:
try:
nickname = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderCompany').get_attribute('text')
content = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderStartTime').get_attribute('text')
list_time = content.split("至", 1)
start_time = list_time[0]
deadline = list_time[1]
send = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailStartAdd').get_attribute('text')
receive = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailEndAdd').get_attribute('text')
type = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailGoodsType1').get_attribute('text')
raw_price= item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailFreight1').get_attribute('text')
price = re.findall(r"\d+\.?\d*", raw_price)[0]
raw_distance = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_search_goods_distance').get_attribute('text')
list_raw = re.findall(r"\d+\.?\d*",raw_distance)
distance = list_raw[1]
data = {'nickname': nickname, 'start_time':start_time, 'deadline':deadline,'send':send,'receive':receive,'type':type,'price':price,'distance':distance}
#self.collection.update({'nickname': nickname, 'content': content}, {'$set': data}, True)
print(data)
collection.update_one({'nickname': nickname,'start_time':start_time,'deadline':deadline,'send':send,'receive':receive,'type':type,'price':price,'distance':distance}, {'$set': data},upsert=True)
except BaseException as e:
print(e)
client=pymongo.MongoClient("127.0.0.1",27017)
db=client.kc_data
collection=db.data_detail
desired_caps = {}
desired_caps['platformName'] ='Android'
desired_caps['deviceName']='f866d421'
desired_caps['appPackage']='com.kuaichengwuliu.driver'
desired_caps['appActivity']='.guide.GuideActivity'#'.guide.GuideActivity'
driver_server='http://localhost:4723/wd/hub'
desired_caps['autoAcceptAlerts']="true"
desired_caps['platformVersion'] = '6.0.1'
driver = webdriver.Remote(driver_server,desired_caps)
wait = WebDriverWait(driver, 300)
#WebDriverWait(driver, 20).until(lambda the_driver: the_driver.find_element_by_id("com.kuyu:id/tv_login").is_displayed())
#time.sleep(30)
WebDriverWait(driver, 7).until(lambda the_driver: driver.find_element_by_id("android:id/content").is_displayed())
TouchAction(driver).tap(x=545, y=181).release().perform()
time.sleep(1)
TouchAction(driver).tap(x=161, y=706).release().perform()
time.sleep(1)
TouchAction(driver).tap(x=534, y=1029).release().perform()
time.sleep(1)
TouchAction(driver).tap(x=183, y=1029).release().perform()
time.sleep(1)
TouchAction(driver).tap(x=528, y=701).release().perform()
time.sleep(1)
TouchAction(driver).tap(x=183, y=684).release().perform()
time.sleep(4)
TouchAction(driver).tap(x=161, y=306).release().perform()
time.sleep(4)
TouchAction(driver).tap(x=128, y=303).release().perform()
time.sleep(5)
crawl()
# 输入用户名
#driver.find_element_by_id("com.kuyu:id/et_email").send_keys("******")
# 输入密码
#driver.find_element_by_id("com.kuyu:id/et_pwd").send_keys("******")
# 点击登录
#driver.find_element_by_id("com.kuyu:id/tv_login").click()
# 这里加了一个等待,判断指定的元素出现则为登录成功(等待方法不懂没有关系,以后会再讲解如何设置等待)
#WebDriverWait(driver, 20).until(
# lambda the_driver: the_driver.find_element_by_id("com.kuyu:id/include_study_iv_add").is_displayed())
print(u"登录成功")
#driver.quit()
#TouchAction(driver).press(x=297, y=1073).move_to(x=309, y=459).release().perform()
|
normal
|
{
"blob_id": "6e614d1235a98ef496956001eef46b4447f0bf9b",
"index": 4677,
"step-1": "<mask token>\n\n\ndef getSize():\n x = driver.get_window_size()['width']\n y = driver.get_window_size()['height']\n return x, y\n\n\n<mask token>\n\n\ndef swipeUp(t):\n l = getSize()\n x1 = int(l[0] * 0.5)\n y1 = int(l[1] * 0.75)\n y2 = int(l[1] * 0.25)\n driver.swipe(x1, y1, x1, y2, t)\n\n\ndef crawl():\n while True:\n items = wait.until(EC.presence_of_all_elements_located((By.XPATH,\n '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.support.v4.view.ViewPager/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.view.ViewGroup/android.support.v7.widget.RecyclerView'\n )))\n swipeUp(1500)\n for item in items:\n try:\n nickname = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderCompany'\n ).get_attribute('text')\n content = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderStartTime'\n ).get_attribute('text')\n list_time = content.split('至', 1)\n start_time = list_time[0]\n deadline = list_time[1]\n send = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailStartAdd'\n ).get_attribute('text')\n receive = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailEndAdd'\n ).get_attribute('text')\n type = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailGoodsType1'\n ).get_attribute('text')\n raw_price = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailFreight1'\n ).get_attribute('text')\n price = re.findall('\\\\d+\\\\.?\\\\d*', raw_price)[0]\n raw_distance = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_search_goods_distance'\n ).get_attribute('text')\n list_raw = re.findall('\\\\d+\\\\.?\\\\d*', raw_distance)\n distance = list_raw[1]\n data = {'nickname': nickname, 'start_time': start_time,\n 'deadline': deadline, 'send': send, 'receive': receive,\n 'type': type, 'price': price, 'distance': distance}\n print(data)\n collection.update_one({'nickname': nickname, 'start_time':\n start_time, 'deadline': deadline, 'send': send,\n 'receive': receive, 'type': type, 'price': price,\n 'distance': distance}, {'$set': data}, upsert=True)\n except BaseException as e:\n print(e)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getSize():\n x = driver.get_window_size()['width']\n y = driver.get_window_size()['height']\n return x, y\n\n\n<mask token>\n\n\ndef swipeUp(t):\n l = getSize()\n x1 = int(l[0] * 0.5)\n y1 = int(l[1] * 0.75)\n y2 = int(l[1] * 0.25)\n driver.swipe(x1, y1, x1, y2, t)\n\n\ndef crawl():\n while True:\n items = wait.until(EC.presence_of_all_elements_located((By.XPATH,\n '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.support.v4.view.ViewPager/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.view.ViewGroup/android.support.v7.widget.RecyclerView'\n )))\n swipeUp(1500)\n for item in items:\n try:\n nickname = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderCompany'\n ).get_attribute('text')\n content = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderStartTime'\n ).get_attribute('text')\n list_time = content.split('至', 1)\n start_time = list_time[0]\n deadline = list_time[1]\n send = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailStartAdd'\n ).get_attribute('text')\n receive = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailEndAdd'\n ).get_attribute('text')\n type = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailGoodsType1'\n ).get_attribute('text')\n raw_price = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailFreight1'\n ).get_attribute('text')\n price = re.findall('\\\\d+\\\\.?\\\\d*', raw_price)[0]\n raw_distance = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_search_goods_distance'\n ).get_attribute('text')\n list_raw = re.findall('\\\\d+\\\\.?\\\\d*', raw_distance)\n distance = list_raw[1]\n data = {'nickname': nickname, 'start_time': start_time,\n 'deadline': deadline, 'send': send, 'receive': receive,\n 'type': type, 'price': price, 'distance': distance}\n print(data)\n collection.update_one({'nickname': nickname, 'start_time':\n start_time, 'deadline': deadline, 'send': send,\n 'receive': receive, 'type': type, 'price': price,\n 'distance': distance}, {'$set': data}, upsert=True)\n except BaseException as e:\n print(e)\n\n\n<mask token>\nWebDriverWait(driver, 7).until(lambda the_driver: driver.find_element_by_id\n ('android:id/content').is_displayed())\nTouchAction(driver).tap(x=545, y=181).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=161, y=706).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=534, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=528, y=701).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=684).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=161, y=306).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=128, y=303).release().perform()\ntime.sleep(5)\ncrawl()\nprint(u'登录成功')\n",
"step-3": "<mask token>\n\n\ndef getSize():\n x = driver.get_window_size()['width']\n y = driver.get_window_size()['height']\n return x, y\n\n\n<mask token>\n\n\ndef swipeUp(t):\n l = getSize()\n x1 = int(l[0] * 0.5)\n y1 = int(l[1] * 0.75)\n y2 = int(l[1] * 0.25)\n driver.swipe(x1, y1, x1, y2, t)\n\n\ndef crawl():\n while True:\n items = wait.until(EC.presence_of_all_elements_located((By.XPATH,\n '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.support.v4.view.ViewPager/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.view.ViewGroup/android.support.v7.widget.RecyclerView'\n )))\n swipeUp(1500)\n for item in items:\n try:\n nickname = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderCompany'\n ).get_attribute('text')\n content = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderStartTime'\n ).get_attribute('text')\n list_time = content.split('至', 1)\n start_time = list_time[0]\n deadline = list_time[1]\n send = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailStartAdd'\n ).get_attribute('text')\n receive = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailEndAdd'\n ).get_attribute('text')\n type = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailGoodsType1'\n ).get_attribute('text')\n raw_price = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailFreight1'\n ).get_attribute('text')\n price = re.findall('\\\\d+\\\\.?\\\\d*', raw_price)[0]\n raw_distance = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_search_goods_distance'\n ).get_attribute('text')\n list_raw = re.findall('\\\\d+\\\\.?\\\\d*', raw_distance)\n distance = list_raw[1]\n data = {'nickname': nickname, 'start_time': start_time,\n 'deadline': deadline, 'send': send, 'receive': receive,\n 'type': type, 'price': price, 'distance': distance}\n print(data)\n collection.update_one({'nickname': nickname, 'start_time':\n start_time, 'deadline': deadline, 'send': send,\n 'receive': receive, 'type': type, 'price': price,\n 'distance': distance}, {'$set': data}, upsert=True)\n except BaseException as e:\n print(e)\n\n\nclient = pymongo.MongoClient('127.0.0.1', 27017)\ndb = client.kc_data\ncollection = db.data_detail\ndesired_caps = {}\ndesired_caps['platformName'] = 'Android'\ndesired_caps['deviceName'] = 'f866d421'\ndesired_caps['appPackage'] = 'com.kuaichengwuliu.driver'\ndesired_caps['appActivity'] = '.guide.GuideActivity'\ndriver_server = 'http://localhost:4723/wd/hub'\ndesired_caps['autoAcceptAlerts'] = 'true'\ndesired_caps['platformVersion'] = '6.0.1'\ndriver = webdriver.Remote(driver_server, desired_caps)\nwait = WebDriverWait(driver, 300)\nWebDriverWait(driver, 7).until(lambda the_driver: driver.find_element_by_id\n ('android:id/content').is_displayed())\nTouchAction(driver).tap(x=545, y=181).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=161, y=706).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=534, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=528, y=701).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=684).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=161, y=306).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=128, y=303).release().perform()\ntime.sleep(5)\ncrawl()\nprint(u'登录成功')\n",
"step-4": "from appium import webdriver\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom appium.webdriver.common.touch_action import TouchAction\nimport time\nimport re\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\nimport pymongo\n\n\ndef getSize():\n x = driver.get_window_size()['width']\n y = driver.get_window_size()['height']\n return x, y\n\n\n<mask token>\n\n\ndef swipeUp(t):\n l = getSize()\n x1 = int(l[0] * 0.5)\n y1 = int(l[1] * 0.75)\n y2 = int(l[1] * 0.25)\n driver.swipe(x1, y1, x1, y2, t)\n\n\ndef crawl():\n while True:\n items = wait.until(EC.presence_of_all_elements_located((By.XPATH,\n '/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.support.v4.view.ViewPager/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.view.ViewGroup/android.support.v7.widget.RecyclerView'\n )))\n swipeUp(1500)\n for item in items:\n try:\n nickname = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderCompany'\n ).get_attribute('text')\n content = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderStartTime'\n ).get_attribute('text')\n list_time = content.split('至', 1)\n start_time = list_time[0]\n deadline = list_time[1]\n send = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailStartAdd'\n ).get_attribute('text')\n receive = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailEndAdd'\n ).get_attribute('text')\n type = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailGoodsType1'\n ).get_attribute('text')\n raw_price = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_orderDetailFreight1'\n ).get_attribute('text')\n price = re.findall('\\\\d+\\\\.?\\\\d*', raw_price)[0]\n raw_distance = item.find_element_by_id(\n 'com.kuaichengwuliu.driver:id/tv_search_goods_distance'\n ).get_attribute('text')\n list_raw = re.findall('\\\\d+\\\\.?\\\\d*', raw_distance)\n distance = list_raw[1]\n data = {'nickname': nickname, 'start_time': start_time,\n 'deadline': deadline, 'send': send, 'receive': receive,\n 'type': type, 'price': price, 'distance': distance}\n print(data)\n collection.update_one({'nickname': nickname, 'start_time':\n start_time, 'deadline': deadline, 'send': send,\n 'receive': receive, 'type': type, 'price': price,\n 'distance': distance}, {'$set': data}, upsert=True)\n except BaseException as e:\n print(e)\n\n\nclient = pymongo.MongoClient('127.0.0.1', 27017)\ndb = client.kc_data\ncollection = db.data_detail\ndesired_caps = {}\ndesired_caps['platformName'] = 'Android'\ndesired_caps['deviceName'] = 'f866d421'\ndesired_caps['appPackage'] = 'com.kuaichengwuliu.driver'\ndesired_caps['appActivity'] = '.guide.GuideActivity'\ndriver_server = 'http://localhost:4723/wd/hub'\ndesired_caps['autoAcceptAlerts'] = 'true'\ndesired_caps['platformVersion'] = '6.0.1'\ndriver = webdriver.Remote(driver_server, desired_caps)\nwait = WebDriverWait(driver, 300)\nWebDriverWait(driver, 7).until(lambda the_driver: driver.find_element_by_id\n ('android:id/content').is_displayed())\nTouchAction(driver).tap(x=545, y=181).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=161, y=706).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=534, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=528, y=701).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=684).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=161, y=306).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=128, y=303).release().perform()\ntime.sleep(5)\ncrawl()\nprint(u'登录成功')\n",
"step-5": "from appium import webdriver\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom appium.webdriver.common.touch_action import TouchAction\nimport time\nimport re\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\nimport pymongo\n\ndef getSize():\n x = driver.get_window_size()['width']\n y = driver.get_window_size()['height']\n return (x, y)\n\n'''\n解释:int start x-开始滑动的x坐标,\n\n int start y -开始滑动的y坐标。\n\n int end x -结束点x坐标,\n\n int end y -结束点y坐标。\n\n duration 滑动时间(默认5毫秒);\n'''\ndef swipeUp(t):\n l = getSize()\n x1 = int(l[0] * 0.5) #x坐标\n y1 = int(l[1] * 0.75) #起始y坐标\n y2 = int(l[1] * 0.25) #终点y坐标\n driver.swipe(x1, y1, x1, y2,t)\n\ndef crawl():\n while True:\n items = wait.until(EC.presence_of_all_elements_located(\n (By.XPATH,'/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.support.v4.view.ViewPager/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.view.ViewGroup/android.support.v7.widget.RecyclerView' )))\n swipeUp(1500)\n for item in items:\n try:\n nickname = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderCompany').get_attribute('text')\n content = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderStartTime').get_attribute('text')\n list_time = content.split(\"至\", 1)\n start_time = list_time[0]\n deadline = list_time[1]\n send = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailStartAdd').get_attribute('text')\n receive = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailEndAdd').get_attribute('text')\n type = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailGoodsType1').get_attribute('text')\n raw_price= item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_orderDetailFreight1').get_attribute('text')\n price = re.findall(r\"\\d+\\.?\\d*\", raw_price)[0]\n raw_distance = item.find_element_by_id('com.kuaichengwuliu.driver:id/tv_search_goods_distance').get_attribute('text')\n list_raw = re.findall(r\"\\d+\\.?\\d*\",raw_distance)\n distance = list_raw[1]\n data = {'nickname': nickname, 'start_time':start_time, 'deadline':deadline,'send':send,'receive':receive,'type':type,'price':price,'distance':distance}\n #self.collection.update({'nickname': nickname, 'content': content}, {'$set': data}, True)\n print(data)\n\n collection.update_one({'nickname': nickname,'start_time':start_time,'deadline':deadline,'send':send,'receive':receive,'type':type,'price':price,'distance':distance}, {'$set': data},upsert=True)\n\n except BaseException as e:\n print(e)\n\n\n\nclient=pymongo.MongoClient(\"127.0.0.1\",27017)\ndb=client.kc_data\ncollection=db.data_detail\ndesired_caps = {}\ndesired_caps['platformName'] ='Android'\ndesired_caps['deviceName']='f866d421'\ndesired_caps['appPackage']='com.kuaichengwuliu.driver'\ndesired_caps['appActivity']='.guide.GuideActivity'#'.guide.GuideActivity'\ndriver_server='http://localhost:4723/wd/hub'\ndesired_caps['autoAcceptAlerts']=\"true\"\ndesired_caps['platformVersion'] = '6.0.1'\ndriver = webdriver.Remote(driver_server,desired_caps)\nwait = WebDriverWait(driver, 300)\n\n#WebDriverWait(driver, 20).until(lambda the_driver: the_driver.find_element_by_id(\"com.kuyu:id/tv_login\").is_displayed())\n#time.sleep(30)\nWebDriverWait(driver, 7).until(lambda the_driver: driver.find_element_by_id(\"android:id/content\").is_displayed())\nTouchAction(driver).tap(x=545, y=181).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=161, y=706).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=534, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=1029).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=528, y=701).release().perform()\ntime.sleep(1)\nTouchAction(driver).tap(x=183, y=684).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=161, y=306).release().perform()\ntime.sleep(4)\nTouchAction(driver).tap(x=128, y=303).release().perform()\ntime.sleep(5)\ncrawl()\n\n\n# 输入用户名\n#driver.find_element_by_id(\"com.kuyu:id/et_email\").send_keys(\"******\")\n# 输入密码\n#driver.find_element_by_id(\"com.kuyu:id/et_pwd\").send_keys(\"******\")\n# 点击登录\n#driver.find_element_by_id(\"com.kuyu:id/tv_login\").click()\n# 这里加了一个等待,判断指定的元素出现则为登录成功(等待方法不懂没有关系,以后会再讲解如何设置等待)\n#WebDriverWait(driver, 20).until(\n# lambda the_driver: the_driver.find_element_by_id(\"com.kuyu:id/include_study_iv_add\").is_displayed())\nprint(u\"登录成功\")\n#driver.quit()\n#TouchAction(driver).press(x=297, y=1073).move_to(x=309, y=459).release().perform()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import argparse
import glob
import importlib
import inspect
import math
import os
import re
import subprocess
import sys
import moviepy.audio.fx.all as afx
import moviepy.video.fx.all as vfx
import numpy as np
from _appmanager import get_executable
from _shutil import format_time, get_time_str, getch, print2
from moviepy.config import change_settings
from moviepy.editor import *
from open_with.open_with import open_with
import codeapi
import core
import coreapi
import datastruct
SCRIPT_ROOT = os.path.dirname(os.path.abspath(__file__))
ignore_undefined = False
if 1:
change_settings({"FFMPEG_BINARY": get_executable("ffmpeg")})
# def _get_markers(file):
# marker_file = file + ".marker.txt"
# if os.path.exists(marker_file):
# with open(marker_file, "r") as f:
# s = f.read()
# return [float(x) for x in s.split()]
# else:
# return None
# def _load_and_expand_img(f):
# fg = Image.open(f).convert("RGBA")
# bg = Image.new("RGB", (1920, 1080))
# bg.paste(fg, ((bg.width - fg.width) // 2, (bg.height - fg.height) // 2), fg)
# return np.array(bg)
def _update_mpy_clip(
clip, subclip, speed, frame, norm, loop, duration, pos, scale, vol, **kwargs,
):
assert duration is not None
# video clip operations / fx
if subclip is not None:
if isinstance(subclip, (int, float)):
clip = clip.subclip(subclip).set_duration(duration)
else:
subclip_duration = subclip[1] - subclip[0]
if duration > subclip_duration:
c1 = clip.subclip(subclip[0], subclip[1])
c2 = clip.to_ImageClip(subclip[1]).set_duration(
duration - subclip_duration
)
clip = concatenate_videoclips([c1, c2])
# HACK: workaround for a bug: 'CompositeAudioClip' object has no attribute 'fps'
if clip.audio is not None:
clip = clip.set_audio(clip.audio.set_fps(44100))
else:
clip = clip.subclip(subclip[0], subclip[1]).set_duration(duration)
if speed is not None:
clip = clip.fx(
# pylint: disable=maybe-no-member
vfx.speedx,
speed,
)
if frame is not None:
clip = clip.to_ImageClip(frame).set_duration(duration)
# Loop or change duration
if loop:
clip = clip.fx(
# pylint: disable=maybe-no-member
vfx.loop
)
if subclip is None:
clip = clip.set_duration(duration)
if pos is not None:
# (x, y) marks the center location of the of the clip instead of the top
# left corner.
if pos == "center":
clip = clip.set_position(("center", "center"))
elif isinstance(pos, (list, tuple)):
pos = list(pos)
half_size = [x // 2 for x in clip.size]
for i in range(2):
if isinstance(pos[i], (int, float)):
pos[i] = pos[i] - half_size[i]
pos[i] = int(coreapi.global_scale * pos[i])
clip = clip.set_position(pos)
else:
clip = clip.set_position(pos)
if scale[0] != 1.0 or scale[1] != 1.0:
clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))
return clip
def _update_clip_duration(track):
def is_connected(prev_clip, cur_clip):
return math.isclose(
prev_clip.start + prev_clip.duration, cur_clip.start, rel_tol=1e-3,
)
prev_clip_info = None
for clip_info in track:
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
prev_clip_info.duration = clip_info.start - prev_clip_info.start
prev_clip_info.auto_extend = False
assert prev_clip_info.duration > 0
# Apply fadeout to previous clip if it's not connected with
# current clip.
if prev_clip_info.crossfade > 0 and not is_connected(
prev_clip_info, clip_info
):
prev_clip_info.fadeout = prev_clip_info.crossfade
prev_clip_info = clip_info
# Update last clip duration
if prev_clip_info is not None:
if prev_clip_info.auto_extend:
duration = prev_clip_info.duration
# Extend the last video clip to match the voice track
if "re" in coreapi.pos_dict:
duration = max(duration, coreapi.pos_dict["re"] - clip_info.start)
prev_clip_info.duration = duration
prev_clip_info.auto_extend = False
if prev_clip_info.crossfade > 0:
prev_clip_info.fadeout = prev_clip_info.crossfade
def _export_video(*, resolution, audio_only):
resolution = [int(x * coreapi.global_scale) for x in resolution]
audio_clips = []
# Update clip duration for each track
for track in datastruct.video_tracks.values():
_update_clip_duration(track)
# TODO: post-process video track clips
# Update MoviePy clip object in each track.
video_clips = []
for track_name, track in datastruct.video_tracks.items():
for i, clip_info in enumerate(track):
assert clip_info.mpy_clip is not None
assert clip_info.duration is not None
# Unlink audio clip from video clip (adjust audio duration)
if clip_info.no_audio:
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
elif clip_info.mpy_clip.audio is not None:
audio_clip = clip_info.mpy_clip.audio
clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)
# Audio timing
# TODO: audio subclip
if clip_info.subclip is not None:
duration = clip_info.subclip[1] - clip_info.subclip[0]
audio_clip = audio_clip.subclip(
clip_info.subclip[0], clip_info.subclip[1]
)
else:
duration = clip_info.duration
duration = min(duration, audio_clip.duration)
audio_clip = audio_clip.set_duration(duration)
audio_clip = audio_clip.set_start(clip_info.start)
# Adjust volume
if clip_info.norm:
audio_clip = audio_clip.fx(
# pylint: disable=maybe-no-member
afx.audio_normalize
)
if clip_info.vol is not None:
if isinstance(clip_info.vol, (int, float)):
audio_clip = audio_clip.fx(
# pylint: disable=maybe-no-member
afx.volumex,
clip_info.vol,
)
else:
audio_clip = _adjust_mpy_audio_clip_volume(
audio_clip, clip_info.vol
)
audio_clips.append(audio_clip)
# If the next clip has crossfade enabled
crossfade_duration = track[i + 1].crossfade if (i < len(track) - 1) else 0
if crossfade_duration:
# clip_info.fadeout = crossfade_duration # Fadeout current clip
clip_info.duration += crossfade_duration
clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **vars(clip_info))
# Deal with video fade in / out / crossfade
if clip_info.fadein:
assert isinstance(clip_info.fadein, (int, float))
# TODO: crossfadein and crossfadeout is very slow in moviepy
if track_name != "vid":
clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(
clip_info.fadein
)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(
# pylint: disable=maybe-no-member
vfx.fadein,
clip_info.fadein,
)
elif (
clip_info.crossfade > 0
): # crossfade and fadein should not happen at the same time
video_clips.append(
clip_info.mpy_clip.set_duration(clip_info.crossfade)
.crossfadein(clip_info.crossfade)
.set_start(clip_info.start)
)
clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.crossfade)
clip_info.start += clip_info.crossfade
if clip_info.fadeout:
assert isinstance(clip_info.fadeout, (int, float))
if track_name != "vid":
# pylint: disable=maybe-no-member
clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(
clip_info.fadeout
)
else:
clip_info.mpy_clip = clip_info.mpy_clip.fx(
# pylint: disable=maybe-no-member
vfx.fadeout,
clip_info.fadeout,
)
video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))
if len(video_clips) == 0:
video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).set_duration(2))
# raise Exception("no video clips??")
final_clip = CompositeVideoClip(video_clips, size=resolution)
# Resize here is too late, does not speed up the video encoding at all.
# final_clip = final_clip.resize(width=480)
# Deal with audio clips
for _, track in datastruct.audio_tracks.items():
clips = []
for clip_info in track.clips:
if clip_info.loop:
# HACK: reload the clip.
#
# still don't know why using loaded mpy_clip directly will cause
# "IndexError: index -200001 is out of bounds for axis 0 with
# size 0"...
clip = AudioFileClip(clip_info.file, buffersize=400000)
else:
clip = clip_info.mpy_clip
if clip_info.subclip is not None:
clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])
duration = clip_info.duration
if duration is not None:
if clip_info.loop:
# pylint: disable=maybe-no-member
clip = clip.fx(afx.audio_loop, duration=duration)
else:
duration = min(duration, clip.duration)
if clip_info.subclip:
duration = min(
duration, clip_info.subclip[1] - clip_info.subclip[0]
)
clip = clip.set_duration(duration)
if clip_info.start is not None:
clip = clip.set_start(clip_info.start)
# Adjust volume by keypoints
if len(clip_info.vol_keypoints) > 0:
clip = _adjust_mpy_audio_clip_volume(clip, clip_info.vol_keypoints)
clips.append(clip)
if len(clips) > 0:
clip = CompositeAudioClip(clips)
audio_clips.append(clip)
if final_clip.audio:
audio_clips.append(final_clip.audio)
if len(audio_clips) > 0:
final_audio_clip = CompositeAudioClip(audio_clips)
# XXX: Workaround for exception: 'CompositeAudioClip' object has no attribute 'fps'.
# See: https://github.com/Zulko/moviepy/issues/863
# final_audio_clip.fps = 44100
final_clip = final_clip.set_audio(final_audio_clip)
# final_clip.show(10.5, interactive=True)
os.makedirs("tmp/out", exist_ok=True)
if audio_only:
final_audio_clip.fps = 44100
final_audio_clip.write_audiofile("%s.mp3" % out_filename)
open_with("%s.mp3" % out_filename, program_id=0)
else:
final_clip.write_videofile(
"%s.mp4" % out_filename,
temp_audiofile="%s.mp3" % out_filename,
remove_temp=False,
codec="libx264",
threads=8,
fps=coreapi.FPS,
ffmpeg_params=["-crf", "19"],
)
subprocess.Popen(
["mpv", "--force-window", "--geometry=1920x1080", f"{out_filename}.mp4"],
close_fds=True,
)
def _adjust_mpy_audio_clip_volume(clip, vol_keypoints):
xp = []
fp = []
print("vol_keypoints:", vol_keypoints)
for (p, vol) in vol_keypoints:
if isinstance(vol, (int, float)):
xp.append(p)
fp.append(vol)
else:
raise Exception("unsupported bgm parameter type:" % type(vol))
def volume_adjust(gf, t):
factor = np.interp(t, xp, fp)
factor = np.vstack([factor, factor]).T
return factor * gf(t)
return clip.fl(volume_adjust)
# def _export_srt():
# with open("out.srt", "w", encoding="utf-8") as f:
# f.write("\n".join(_srt_lines))
def _convert_to_readable_time(seconds):
seconds = int(seconds)
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
if hour > 0:
return "%d:%02d:%02d" % (hour, minutes, seconds)
else:
return "%02d:%02d" % (minutes, seconds)
def _write_timestamp(t, section_name):
os.makedirs(os.path.dirname(out_filename), exist_ok=True)
if not hasattr(_write_timestamp, "f"):
_write_timestamp.f = open("%s.txt" % out_filename, "w", encoding="utf-8")
_write_timestamp.f.write("%s (%s)\n" % (section_name, _convert_to_readable_time(t)))
_write_timestamp.f.flush()
@core.api
def include(file):
with open(file, "r", encoding="utf-8") as f:
s = f.read()
cwd = os.getcwd()
os.chdir(os.path.dirname(os.path.abspath(file)))
_parse_text(s)
os.chdir(cwd)
def _remove_unused_recordings(s):
used_recordings = set()
unused_recordings = []
apis = {"record": (lambda f, **kargs: used_recordings.add(f))}
_parse_text(s, apis=apis)
files = [f for f in glob.glob("record/*") if os.path.isfile(f)]
files = [f.replace("\\", "/") for f in files]
for f in files:
if f not in used_recordings:
unused_recordings.append(f)
print2("Used : %d" % len(used_recordings), color="green")
print2("Unused : %d" % len(unused_recordings), color="red")
assert len(used_recordings) + len(unused_recordings) == len(files)
print("Press y to clean up: ", end="", flush=True)
if getch() == "y":
for f in unused_recordings:
try:
os.remove(f)
except:
print("WARNING: failed to remove: %s" % f)
def _parse_text(text, apis=core.apis, **kwargs):
def find_next(text, needle, p):
pos = text.find(needle, p)
if pos < 0:
pos = len(text)
return pos
# Remove all comments
text = re.sub(r"<!--[\d\D]*?-->", "", text)
p = 0 # Current position
while p < len(text):
if text[p : p + 2] == "{{":
end = find_next(text, "}}", p)
python_code = text[p + 2 : end].strip()
p = end + 2
if ignore_undefined:
try:
exec(python_code, apis)
except NameError: # API is not defined
pass # simply ignore
else:
exec(python_code, apis)
continue
if text[p : p + 1] == "#":
end = find_next(text, "\n", p)
line = text[p:end].strip()
_write_timestamp(coreapi.pos_dict["a"], line)
p = end + 1
continue
match = re.match("---((?:[0-9]*[.])?[0-9]+)?\n", text[p:])
if match is not None:
if match.group(1) is not None:
coreapi.audio_gap(float(match.group(1)))
else:
coreapi.audio_gap(0.2)
p += match.end(0) + 1
continue
# Parse regular text
end = find_next(text, "\n", p)
line = text[p:end].strip()
p = end + 1
if line != "" and "parse_line" in apis:
apis["parse_line"](line)
# Call it at the end
core.on_api_func(None)
def _show_stats(s):
TIME_PER_CHAR = 0.1334154351395731
total = 0
def parse_line(line):
nonlocal total
total += len(line)
_parse_text(s, apis={"parse_line": parse_line}, ignore_undefined=True)
total_secs = TIME_PER_CHAR * total
print("Estimated Time: %s" % format_time(total_secs))
input()
def load_config():
import yaml
CONFIG_FILE = "config.yaml"
DEFAULT_CONFIG = {"fps": 30}
if os.path.exists(CONFIG_FILE):
with open(CONFIG_FILE, "r") as f:
config = yaml.load(f.read(), Loader=yaml.FullLoader)
else:
with open(CONFIG_FILE, "w", newline="\n") as f:
yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)
config = DEFAULT_CONFIG
coreapi.fps(config["fps"])
if __name__ == "__main__":
out_filename = "tmp/out/" + get_time_str()
parser = argparse.ArgumentParser()
parser.add_argument("--stdin", default=False, action="store_true")
parser.add_argument("--proj_dir", type=str, default=None)
parser.add_argument("-i", "--input", type=str, default=None)
parser.add_argument("-a", "--audio_only", action="store_true", default=False)
parser.add_argument(
"--remove_unused_recordings", action="store_true", default=False
)
parser.add_argument("--show_stats", action="store_true", default=False)
parser.add_argument("--preview", action="store_true", default=False)
args = parser.parse_args()
if args.proj_dir is not None:
os.chdir(args.proj_dir)
elif args.input:
os.chdir(os.path.dirname(args.input))
print("Project dir: %s" % os.getcwd())
# Load custom APIs (api.py) if exists
if os.path.exists("api.py"):
sys.path.append(os.getcwd())
mymodule = importlib.import_module("api")
global_functions = inspect.getmembers(mymodule, inspect.isfunction)
core.apis.update({k: v for k, v in global_functions})
# HACK
if args.audio_only:
coreapi.audio_only()
# Read text
if args.stdin:
s = sys.stdin.read()
elif args.input:
with open(args.input, "r", encoding="utf-8") as f:
s = f.read()
else:
raise Exception("Either --stdin or --input should be specified.")
load_config()
if args.preview:
coreapi.preview()
if args.remove_unused_recordings:
ignore_undefined = True
_remove_unused_recordings(s)
elif args.show_stats:
ignore_undefined = True
_show_stats(s)
else:
_parse_text(s, apis=core.apis)
_export_video(resolution=(1920, 1080), audio_only=args.audio_only)
|
normal
|
{
"blob_id": "9e21a39358d97633b49ad83805990c29c19a80ed",
"index": 8599,
"step-1": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\n<mask token>\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\n<mask token>\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\n<mask token>\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n if not hasattr(_write_timestamp, 'f'):\n _write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=\n 'utf-8')\n _write_timestamp.f.write('%s (%s)\\n' % (section_name,\n _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\n<mask token>\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\n<mask token>\n",
"step-4": "<mask token>\nif 1:\n change_settings({'FFMPEG_BINARY': get_executable('ffmpeg')})\n\n\ndef _update_mpy_clip(clip, subclip, speed, frame, norm, loop, duration, pos,\n scale, vol, **kwargs):\n assert duration is not None\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(duration -\n subclip_duration)\n clip = concatenate_videoclips([c1, c2])\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(\n duration)\n if speed is not None:\n clip = clip.fx(vfx.speedx, speed)\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n if loop:\n clip = clip.fx(vfx.loop)\n if subclip is None:\n clip = clip.set_duration(duration)\n if pos is not None:\n if pos == 'center':\n clip = clip.set_position(('center', 'center'))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [(x // 2) for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n return clip\n\n\ndef _update_clip_duration(track):\n\n def is_connected(prev_clip, cur_clip):\n return math.isclose(prev_clip.start + prev_clip.duration, cur_clip.\n start, rel_tol=0.001)\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = (clip_info.start - prev_clip_info\n .start)\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n if prev_clip_info.crossfade > 0 and not is_connected(prev_clip_info\n , clip_info):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n prev_clip_info = clip_info\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n if 're' in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict['re'] - clip_info\n .start)\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n audio_clips = []\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(clip_info.subclip[0],\n clip_info.subclip[1])\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n if clip_info.norm:\n audio_clip = audio_clip.fx(afx.audio_normalize)\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(afx.volumex, clip_info.vol)\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(audio_clip,\n clip_info.vol)\n audio_clips.append(audio_clip)\n crossfade_duration = track[i + 1].crossfade if i < len(track\n ) - 1 else 0\n if crossfade_duration:\n clip_info.duration += crossfade_duration\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **\n vars(clip_info))\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadein,\n clip_info.fadein)\n elif clip_info.crossfade > 0:\n video_clips.append(clip_info.mpy_clip.set_duration(\n clip_info.crossfade).crossfadein(clip_info.crossfade).\n set_start(clip_info.start))\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.\n crossfade)\n clip_info.start += clip_info.crossfade\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != 'vid':\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout)\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(vfx.fadeout,\n clip_info.fadeout)\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).\n set_duration(2))\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(duration, clip_info.subclip[1] -\n clip_info.subclip[0])\n clip = clip.set_duration(duration)\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.\n vol_keypoints)\n clips.append(clip)\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n final_clip = final_clip.set_audio(final_audio_clip)\n os.makedirs('tmp/out', exist_ok=True)\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile('%s.mp3' % out_filename)\n open_with('%s.mp3' % out_filename, program_id=0)\n else:\n final_clip.write_videofile('%s.mp4' % out_filename, temp_audiofile=\n '%s.mp3' % out_filename, remove_temp=False, codec='libx264',\n threads=8, fps=coreapi.FPS, ffmpeg_params=['-crf', '19'])\n subprocess.Popen(['mpv', '--force-window', '--geometry=1920x1080',\n f'{out_filename}.mp4'], close_fds=True)\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n print('vol_keypoints:', vol_keypoints)\n for p, vol in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception('unsupported bgm parameter type:' % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n return clip.fl(volume_adjust)\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n if hour > 0:\n return '%d:%02d:%02d' % (hour, minutes, seconds)\n else:\n return '%02d:%02d' % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n if not hasattr(_write_timestamp, 'f'):\n _write_timestamp.f = open('%s.txt' % out_filename, 'w', encoding=\n 'utf-8')\n _write_timestamp.f.write('%s (%s)\\n' % (section_name,\n _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\[email protected]\ndef include(file):\n with open(file, 'r', encoding='utf-8') as f:\n s = f.read()\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n apis = {'record': lambda f, **kargs: used_recordings.add(f)}\n _parse_text(s, apis=apis)\n files = [f for f in glob.glob('record/*') if os.path.isfile(f)]\n files = [f.replace('\\\\', '/') for f in files]\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n print2('Used : %d' % len(used_recordings), color='green')\n print2('Unused : %d' % len(unused_recordings), color='red')\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print('Press y to clean up: ', end='', flush=True)\n if getch() == 'y':\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print('WARNING: failed to remove: %s' % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n text = re.sub('<!--[\\\\d\\\\D]*?-->', '', text)\n p = 0\n while p < len(text):\n if text[p:p + 2] == '{{':\n end = find_next(text, '}}', p)\n python_code = text[p + 2:end].strip()\n p = end + 2\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError:\n pass\n else:\n exec(python_code, apis)\n continue\n if text[p:p + 1] == '#':\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict['a'], line)\n p = end + 1\n continue\n match = re.match('---((?:[0-9]*[.])?[0-9]+)?\\n', text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n end = find_next(text, '\\n', p)\n line = text[p:end].strip()\n p = end + 1\n if line != '' and 'parse_line' in apis:\n apis['parse_line'](line)\n core.on_api_func(None)\n\n\ndef _show_stats(s):\n TIME_PER_CHAR = 0.1334154351395731\n total = 0\n\n def parse_line(line):\n nonlocal total\n total += len(line)\n _parse_text(s, apis={'parse_line': parse_line}, ignore_undefined=True)\n total_secs = TIME_PER_CHAR * total\n print('Estimated Time: %s' % format_time(total_secs))\n input()\n\n\ndef load_config():\n import yaml\n CONFIG_FILE = 'config.yaml'\n DEFAULT_CONFIG = {'fps': 30}\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, 'r') as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, 'w', newline='\\n') as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n coreapi.fps(config['fps'])\n\n\nif __name__ == '__main__':\n out_filename = 'tmp/out/' + get_time_str()\n parser = argparse.ArgumentParser()\n parser.add_argument('--stdin', default=False, action='store_true')\n parser.add_argument('--proj_dir', type=str, default=None)\n parser.add_argument('-i', '--input', type=str, default=None)\n parser.add_argument('-a', '--audio_only', action='store_true', default=\n False)\n parser.add_argument('--remove_unused_recordings', action='store_true',\n default=False)\n parser.add_argument('--show_stats', action='store_true', default=False)\n parser.add_argument('--preview', action='store_true', default=False)\n args = parser.parse_args()\n if args.proj_dir is not None:\n os.chdir(args.proj_dir)\n elif args.input:\n os.chdir(os.path.dirname(args.input))\n print('Project dir: %s' % os.getcwd())\n if os.path.exists('api.py'):\n sys.path.append(os.getcwd())\n mymodule = importlib.import_module('api')\n global_functions = inspect.getmembers(mymodule, inspect.isfunction)\n core.apis.update({k: v for k, v in global_functions})\n if args.audio_only:\n coreapi.audio_only()\n if args.stdin:\n s = sys.stdin.read()\n elif args.input:\n with open(args.input, 'r', encoding='utf-8') as f:\n s = f.read()\n else:\n raise Exception('Either --stdin or --input should be specified.')\n load_config()\n if args.preview:\n coreapi.preview()\n if args.remove_unused_recordings:\n ignore_undefined = True\n _remove_unused_recordings(s)\n elif args.show_stats:\n ignore_undefined = True\n _show_stats(s)\n else:\n _parse_text(s, apis=core.apis)\n _export_video(resolution=(1920, 1080), audio_only=args.audio_only)\n",
"step-5": "import argparse\nimport glob\nimport importlib\nimport inspect\nimport math\nimport os\nimport re\nimport subprocess\nimport sys\n\nimport moviepy.audio.fx.all as afx\nimport moviepy.video.fx.all as vfx\nimport numpy as np\nfrom _appmanager import get_executable\nfrom _shutil import format_time, get_time_str, getch, print2\nfrom moviepy.config import change_settings\nfrom moviepy.editor import *\nfrom open_with.open_with import open_with\n\nimport codeapi\nimport core\nimport coreapi\nimport datastruct\n\nSCRIPT_ROOT = os.path.dirname(os.path.abspath(__file__))\n\nignore_undefined = False\n\nif 1:\n change_settings({\"FFMPEG_BINARY\": get_executable(\"ffmpeg\")})\n\n\n# def _get_markers(file):\n# marker_file = file + \".marker.txt\"\n# if os.path.exists(marker_file):\n# with open(marker_file, \"r\") as f:\n# s = f.read()\n# return [float(x) for x in s.split()]\n# else:\n# return None\n\n\n# def _load_and_expand_img(f):\n# fg = Image.open(f).convert(\"RGBA\")\n# bg = Image.new(\"RGB\", (1920, 1080))\n# bg.paste(fg, ((bg.width - fg.width) // 2, (bg.height - fg.height) // 2), fg)\n# return np.array(bg)\n\n\ndef _update_mpy_clip(\n clip, subclip, speed, frame, norm, loop, duration, pos, scale, vol, **kwargs,\n):\n assert duration is not None\n\n # video clip operations / fx\n if subclip is not None:\n if isinstance(subclip, (int, float)):\n clip = clip.subclip(subclip).set_duration(duration)\n\n else:\n subclip_duration = subclip[1] - subclip[0]\n if duration > subclip_duration:\n c1 = clip.subclip(subclip[0], subclip[1])\n c2 = clip.to_ImageClip(subclip[1]).set_duration(\n duration - subclip_duration\n )\n clip = concatenate_videoclips([c1, c2])\n\n # HACK: workaround for a bug: 'CompositeAudioClip' object has no attribute 'fps'\n if clip.audio is not None:\n clip = clip.set_audio(clip.audio.set_fps(44100))\n else:\n clip = clip.subclip(subclip[0], subclip[1]).set_duration(duration)\n\n if speed is not None:\n clip = clip.fx(\n # pylint: disable=maybe-no-member\n vfx.speedx,\n speed,\n )\n\n if frame is not None:\n clip = clip.to_ImageClip(frame).set_duration(duration)\n\n # Loop or change duration\n if loop:\n clip = clip.fx(\n # pylint: disable=maybe-no-member\n vfx.loop\n )\n\n if subclip is None:\n clip = clip.set_duration(duration)\n\n if pos is not None:\n # (x, y) marks the center location of the of the clip instead of the top\n # left corner.\n if pos == \"center\":\n clip = clip.set_position((\"center\", \"center\"))\n elif isinstance(pos, (list, tuple)):\n pos = list(pos)\n half_size = [x // 2 for x in clip.size]\n for i in range(2):\n if isinstance(pos[i], (int, float)):\n pos[i] = pos[i] - half_size[i]\n pos[i] = int(coreapi.global_scale * pos[i])\n clip = clip.set_position(pos)\n else:\n clip = clip.set_position(pos)\n\n if scale[0] != 1.0 or scale[1] != 1.0:\n clip = clip.resize((int(clip.w * scale[0]), int(clip.h * scale[1])))\n\n return clip\n\n\ndef _update_clip_duration(track):\n def is_connected(prev_clip, cur_clip):\n return math.isclose(\n prev_clip.start + prev_clip.duration, cur_clip.start, rel_tol=1e-3,\n )\n\n prev_clip_info = None\n for clip_info in track:\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n prev_clip_info.duration = clip_info.start - prev_clip_info.start\n prev_clip_info.auto_extend = False\n assert prev_clip_info.duration > 0\n\n # Apply fadeout to previous clip if it's not connected with\n # current clip.\n if prev_clip_info.crossfade > 0 and not is_connected(\n prev_clip_info, clip_info\n ):\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n prev_clip_info = clip_info\n\n # Update last clip duration\n if prev_clip_info is not None:\n if prev_clip_info.auto_extend:\n duration = prev_clip_info.duration\n\n # Extend the last video clip to match the voice track\n if \"re\" in coreapi.pos_dict:\n duration = max(duration, coreapi.pos_dict[\"re\"] - clip_info.start)\n\n prev_clip_info.duration = duration\n prev_clip_info.auto_extend = False\n\n if prev_clip_info.crossfade > 0:\n prev_clip_info.fadeout = prev_clip_info.crossfade\n\n\ndef _export_video(*, resolution, audio_only):\n resolution = [int(x * coreapi.global_scale) for x in resolution]\n\n audio_clips = []\n\n # Update clip duration for each track\n for track in datastruct.video_tracks.values():\n _update_clip_duration(track)\n\n # TODO: post-process video track clips\n\n # Update MoviePy clip object in each track.\n video_clips = []\n for track_name, track in datastruct.video_tracks.items():\n for i, clip_info in enumerate(track):\n assert clip_info.mpy_clip is not None\n assert clip_info.duration is not None\n\n # Unlink audio clip from video clip (adjust audio duration)\n if clip_info.no_audio:\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n\n elif clip_info.mpy_clip.audio is not None:\n audio_clip = clip_info.mpy_clip.audio\n clip_info.mpy_clip = clip_info.mpy_clip.set_audio(None)\n\n # Audio timing\n # TODO: audio subclip\n if clip_info.subclip is not None:\n duration = clip_info.subclip[1] - clip_info.subclip[0]\n audio_clip = audio_clip.subclip(\n clip_info.subclip[0], clip_info.subclip[1]\n )\n else:\n duration = clip_info.duration\n duration = min(duration, audio_clip.duration)\n audio_clip = audio_clip.set_duration(duration)\n audio_clip = audio_clip.set_start(clip_info.start)\n\n # Adjust volume\n if clip_info.norm:\n audio_clip = audio_clip.fx(\n # pylint: disable=maybe-no-member\n afx.audio_normalize\n )\n if clip_info.vol is not None:\n if isinstance(clip_info.vol, (int, float)):\n audio_clip = audio_clip.fx(\n # pylint: disable=maybe-no-member\n afx.volumex,\n clip_info.vol,\n )\n else:\n audio_clip = _adjust_mpy_audio_clip_volume(\n audio_clip, clip_info.vol\n )\n\n audio_clips.append(audio_clip)\n\n # If the next clip has crossfade enabled\n crossfade_duration = track[i + 1].crossfade if (i < len(track) - 1) else 0\n if crossfade_duration:\n # clip_info.fadeout = crossfade_duration # Fadeout current clip\n clip_info.duration += crossfade_duration\n\n clip_info.mpy_clip = _update_mpy_clip(clip_info.mpy_clip, **vars(clip_info))\n\n # Deal with video fade in / out / crossfade\n if clip_info.fadein:\n assert isinstance(clip_info.fadein, (int, float))\n # TODO: crossfadein and crossfadeout is very slow in moviepy\n if track_name != \"vid\":\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadein(\n clip_info.fadein\n )\n else:\n clip_info.mpy_clip = clip_info.mpy_clip.fx(\n # pylint: disable=maybe-no-member\n vfx.fadein,\n clip_info.fadein,\n )\n\n elif (\n clip_info.crossfade > 0\n ): # crossfade and fadein should not happen at the same time\n video_clips.append(\n clip_info.mpy_clip.set_duration(clip_info.crossfade)\n .crossfadein(clip_info.crossfade)\n .set_start(clip_info.start)\n )\n\n clip_info.mpy_clip = clip_info.mpy_clip.subclip(clip_info.crossfade)\n clip_info.start += clip_info.crossfade\n\n if clip_info.fadeout:\n assert isinstance(clip_info.fadeout, (int, float))\n if track_name != \"vid\":\n # pylint: disable=maybe-no-member\n clip_info.mpy_clip = clip_info.mpy_clip.crossfadeout(\n clip_info.fadeout\n )\n else:\n\n clip_info.mpy_clip = clip_info.mpy_clip.fx(\n # pylint: disable=maybe-no-member\n vfx.fadeout,\n clip_info.fadeout,\n )\n\n video_clips.append(clip_info.mpy_clip.set_start(clip_info.start))\n\n if len(video_clips) == 0:\n video_clips.append(ColorClip((200, 200), color=(0, 1, 0)).set_duration(2))\n # raise Exception(\"no video clips??\")\n final_clip = CompositeVideoClip(video_clips, size=resolution)\n\n # Resize here is too late, does not speed up the video encoding at all.\n # final_clip = final_clip.resize(width=480)\n\n # Deal with audio clips\n for _, track in datastruct.audio_tracks.items():\n clips = []\n for clip_info in track.clips:\n if clip_info.loop:\n # HACK: reload the clip.\n #\n # still don't know why using loaded mpy_clip directly will cause\n # \"IndexError: index -200001 is out of bounds for axis 0 with\n # size 0\"...\n clip = AudioFileClip(clip_info.file, buffersize=400000)\n else:\n clip = clip_info.mpy_clip\n\n if clip_info.subclip is not None:\n clip = clip.subclip(clip_info.subclip[0], clip_info.subclip[1])\n\n duration = clip_info.duration\n if duration is not None:\n if clip_info.loop:\n # pylint: disable=maybe-no-member\n clip = clip.fx(afx.audio_loop, duration=duration)\n else:\n duration = min(duration, clip.duration)\n if clip_info.subclip:\n duration = min(\n duration, clip_info.subclip[1] - clip_info.subclip[0]\n )\n clip = clip.set_duration(duration)\n\n if clip_info.start is not None:\n clip = clip.set_start(clip_info.start)\n\n # Adjust volume by keypoints\n if len(clip_info.vol_keypoints) > 0:\n clip = _adjust_mpy_audio_clip_volume(clip, clip_info.vol_keypoints)\n\n clips.append(clip)\n\n if len(clips) > 0:\n clip = CompositeAudioClip(clips)\n audio_clips.append(clip)\n\n if final_clip.audio:\n audio_clips.append(final_clip.audio)\n\n if len(audio_clips) > 0:\n final_audio_clip = CompositeAudioClip(audio_clips)\n\n # XXX: Workaround for exception: 'CompositeAudioClip' object has no attribute 'fps'.\n # See: https://github.com/Zulko/moviepy/issues/863\n # final_audio_clip.fps = 44100\n\n final_clip = final_clip.set_audio(final_audio_clip)\n\n # final_clip.show(10.5, interactive=True)\n\n os.makedirs(\"tmp/out\", exist_ok=True)\n\n if audio_only:\n final_audio_clip.fps = 44100\n final_audio_clip.write_audiofile(\"%s.mp3\" % out_filename)\n open_with(\"%s.mp3\" % out_filename, program_id=0)\n\n else:\n final_clip.write_videofile(\n \"%s.mp4\" % out_filename,\n temp_audiofile=\"%s.mp3\" % out_filename,\n remove_temp=False,\n codec=\"libx264\",\n threads=8,\n fps=coreapi.FPS,\n ffmpeg_params=[\"-crf\", \"19\"],\n )\n\n subprocess.Popen(\n [\"mpv\", \"--force-window\", \"--geometry=1920x1080\", f\"{out_filename}.mp4\"],\n close_fds=True,\n )\n\n\ndef _adjust_mpy_audio_clip_volume(clip, vol_keypoints):\n xp = []\n fp = []\n\n print(\"vol_keypoints:\", vol_keypoints)\n for (p, vol) in vol_keypoints:\n if isinstance(vol, (int, float)):\n xp.append(p)\n fp.append(vol)\n else:\n raise Exception(\"unsupported bgm parameter type:\" % type(vol))\n\n def volume_adjust(gf, t):\n factor = np.interp(t, xp, fp)\n factor = np.vstack([factor, factor]).T\n return factor * gf(t)\n\n return clip.fl(volume_adjust)\n\n\n# def _export_srt():\n# with open(\"out.srt\", \"w\", encoding=\"utf-8\") as f:\n# f.write(\"\\n\".join(_srt_lines))\n\n\ndef _convert_to_readable_time(seconds):\n seconds = int(seconds)\n seconds = seconds % (24 * 3600)\n hour = seconds // 3600\n seconds %= 3600\n minutes = seconds // 60\n seconds %= 60\n\n if hour > 0:\n return \"%d:%02d:%02d\" % (hour, minutes, seconds)\n else:\n return \"%02d:%02d\" % (minutes, seconds)\n\n\ndef _write_timestamp(t, section_name):\n os.makedirs(os.path.dirname(out_filename), exist_ok=True)\n\n if not hasattr(_write_timestamp, \"f\"):\n _write_timestamp.f = open(\"%s.txt\" % out_filename, \"w\", encoding=\"utf-8\")\n\n _write_timestamp.f.write(\"%s (%s)\\n\" % (section_name, _convert_to_readable_time(t)))\n _write_timestamp.f.flush()\n\n\[email protected]\ndef include(file):\n with open(file, \"r\", encoding=\"utf-8\") as f:\n s = f.read()\n\n cwd = os.getcwd()\n os.chdir(os.path.dirname(os.path.abspath(file)))\n _parse_text(s)\n os.chdir(cwd)\n\n\ndef _remove_unused_recordings(s):\n used_recordings = set()\n unused_recordings = []\n\n apis = {\"record\": (lambda f, **kargs: used_recordings.add(f))}\n _parse_text(s, apis=apis)\n\n files = [f for f in glob.glob(\"record/*\") if os.path.isfile(f)]\n files = [f.replace(\"\\\\\", \"/\") for f in files]\n\n for f in files:\n if f not in used_recordings:\n unused_recordings.append(f)\n\n print2(\"Used : %d\" % len(used_recordings), color=\"green\")\n print2(\"Unused : %d\" % len(unused_recordings), color=\"red\")\n assert len(used_recordings) + len(unused_recordings) == len(files)\n print(\"Press y to clean up: \", end=\"\", flush=True)\n if getch() == \"y\":\n for f in unused_recordings:\n try:\n os.remove(f)\n except:\n print(\"WARNING: failed to remove: %s\" % f)\n\n\ndef _parse_text(text, apis=core.apis, **kwargs):\n def find_next(text, needle, p):\n pos = text.find(needle, p)\n if pos < 0:\n pos = len(text)\n return pos\n\n # Remove all comments\n text = re.sub(r\"<!--[\\d\\D]*?-->\", \"\", text)\n\n p = 0 # Current position\n while p < len(text):\n if text[p : p + 2] == \"{{\":\n end = find_next(text, \"}}\", p)\n python_code = text[p + 2 : end].strip()\n p = end + 2\n\n if ignore_undefined:\n try:\n exec(python_code, apis)\n except NameError: # API is not defined\n pass # simply ignore\n else:\n exec(python_code, apis)\n\n continue\n\n if text[p : p + 1] == \"#\":\n end = find_next(text, \"\\n\", p)\n\n line = text[p:end].strip()\n _write_timestamp(coreapi.pos_dict[\"a\"], line)\n\n p = end + 1\n continue\n\n match = re.match(\"---((?:[0-9]*[.])?[0-9]+)?\\n\", text[p:])\n if match is not None:\n if match.group(1) is not None:\n coreapi.audio_gap(float(match.group(1)))\n else:\n coreapi.audio_gap(0.2)\n p += match.end(0) + 1\n continue\n\n # Parse regular text\n end = find_next(text, \"\\n\", p)\n line = text[p:end].strip()\n p = end + 1\n\n if line != \"\" and \"parse_line\" in apis:\n apis[\"parse_line\"](line)\n\n # Call it at the end\n core.on_api_func(None)\n\n\ndef _show_stats(s):\n TIME_PER_CHAR = 0.1334154351395731\n\n total = 0\n\n def parse_line(line):\n nonlocal total\n total += len(line)\n\n _parse_text(s, apis={\"parse_line\": parse_line}, ignore_undefined=True)\n\n total_secs = TIME_PER_CHAR * total\n print(\"Estimated Time: %s\" % format_time(total_secs))\n\n input()\n\n\ndef load_config():\n import yaml\n\n CONFIG_FILE = \"config.yaml\"\n DEFAULT_CONFIG = {\"fps\": 30}\n\n if os.path.exists(CONFIG_FILE):\n with open(CONFIG_FILE, \"r\") as f:\n config = yaml.load(f.read(), Loader=yaml.FullLoader)\n else:\n with open(CONFIG_FILE, \"w\", newline=\"\\n\") as f:\n yaml.dump(DEFAULT_CONFIG, f, default_flow_style=False)\n config = DEFAULT_CONFIG\n\n coreapi.fps(config[\"fps\"])\n\n\nif __name__ == \"__main__\":\n out_filename = \"tmp/out/\" + get_time_str()\n\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--stdin\", default=False, action=\"store_true\")\n parser.add_argument(\"--proj_dir\", type=str, default=None)\n parser.add_argument(\"-i\", \"--input\", type=str, default=None)\n parser.add_argument(\"-a\", \"--audio_only\", action=\"store_true\", default=False)\n parser.add_argument(\n \"--remove_unused_recordings\", action=\"store_true\", default=False\n )\n parser.add_argument(\"--show_stats\", action=\"store_true\", default=False)\n parser.add_argument(\"--preview\", action=\"store_true\", default=False)\n\n args = parser.parse_args()\n\n if args.proj_dir is not None:\n os.chdir(args.proj_dir)\n elif args.input:\n os.chdir(os.path.dirname(args.input))\n print(\"Project dir: %s\" % os.getcwd())\n\n # Load custom APIs (api.py) if exists\n if os.path.exists(\"api.py\"):\n sys.path.append(os.getcwd())\n mymodule = importlib.import_module(\"api\")\n global_functions = inspect.getmembers(mymodule, inspect.isfunction)\n core.apis.update({k: v for k, v in global_functions})\n\n # HACK\n if args.audio_only:\n coreapi.audio_only()\n\n # Read text\n if args.stdin:\n s = sys.stdin.read()\n\n elif args.input:\n with open(args.input, \"r\", encoding=\"utf-8\") as f:\n s = f.read()\n\n else:\n raise Exception(\"Either --stdin or --input should be specified.\")\n\n load_config()\n\n if args.preview:\n coreapi.preview()\n\n if args.remove_unused_recordings:\n ignore_undefined = True\n _remove_unused_recordings(s)\n elif args.show_stats:\n ignore_undefined = True\n _show_stats(s)\n else:\n _parse_text(s, apis=core.apis)\n _export_video(resolution=(1920, 1080), audio_only=args.audio_only)\n",
"step-ids": [
8,
9,
10,
12,
15
]
}
|
[
8,
9,
10,
12,
15
] |
from enum import Enum
class AggregationTypes(Enum):
NO_AGG = 'NO-AGG'
STATIC = 'STATIC'
SUB_HOUR = 'SUB-HOUR'
DYNAMIC = 'DYNAMIC'
|
normal
|
{
"blob_id": "436b89b91aed14525f847e6488b452b7ca0e1b70",
"index": 5322,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AggregationTypes(Enum):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AggregationTypes(Enum):\n NO_AGG = 'NO-AGG'\n STATIC = 'STATIC'\n SUB_HOUR = 'SUB-HOUR'\n DYNAMIC = 'DYNAMIC'\n",
"step-4": "from enum import Enum\n\n\nclass AggregationTypes(Enum):\n NO_AGG = 'NO-AGG'\n STATIC = 'STATIC'\n SUB_HOUR = 'SUB-HOUR'\n DYNAMIC = 'DYNAMIC'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from microbit import *
import radio
radio.on()
# receiver will show the distance to the beacon
# the number of receivers should be easily adjustable
while True:
message=radio.receive_full()
# the stronger the signal the higher the number
if message:
strength = message[1]+100
displaystrength = (int((strength/10)+1))
display.show(str(displaystrength))
sleep(200)
# if beacon is too far, also usable as a sixth level of light intensity
else:
display.show(Image.NO)
|
normal
|
{
"blob_id": "dffa5e2f34788c6f5a5ccc7d8375317a830288b5",
"index": 7994,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nradio.on()\nwhile True:\n message = radio.receive_full()\n if message:\n strength = message[1] + 100\n displaystrength = int(strength / 10 + 1)\n display.show(str(displaystrength))\n sleep(200)\n else:\n display.show(Image.NO)\n",
"step-3": "from microbit import *\nimport radio\nradio.on()\nwhile True:\n message = radio.receive_full()\n if message:\n strength = message[1] + 100\n displaystrength = int(strength / 10 + 1)\n display.show(str(displaystrength))\n sleep(200)\n else:\n display.show(Image.NO)\n",
"step-4": "from microbit import *\nimport radio\nradio.on()\n\n# receiver will show the distance to the beacon\n# the number of receivers should be easily adjustable\nwhile True:\n message=radio.receive_full()\n # the stronger the signal the higher the number\n if message:\n strength = message[1]+100\n displaystrength = (int((strength/10)+1))\n display.show(str(displaystrength))\n sleep(200)\n # if beacon is too far, also usable as a sixth level of light intensity\n else:\n display.show(Image.NO)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import xmlrpclib
import socket
import time
import math
import re
from roundup.exceptions import Reject
REVPAT = re.compile(r'(r[0-9]+\b|rev(ision)? [0-9]+\b)')
def extract_classinfo(db, klass, nodeid, newvalues):
if None == nodeid:
node = newvalues
content = newvalues['content']
else:
node = db.getnode(klass.classname, nodeid)
content = klass.get(nodeid, 'content')
if node.has_key('creation') or node.has_key('date'):
nodets = node.get('creation', node.get('date')).timestamp()
else:
nodets = time.time()
if node.has_key('author') or node.has_key('creator'):
authorid = node.get('author', node.get('creator'))
else:
authorid = db.getuid()
authorage = nodets - db.getnode('user', authorid)['creation'].timestamp()
tokens = ["klass:%s" % klass.classname,
"author:%s" % authorid,
"authorage:%d" % int(math.log(authorage)),
"hasrev:%s" % (REVPAT.search(content) is not None)]
return (content, tokens)
def check_spambayes(db, content, tokens):
try:
spambayes_uri = db.config.detectors['SPAMBAYES_URI']
except KeyError, e:
return (False, str(e))
try:
server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False)
except IOError, e:
return (False, str(e))
try:
prob = server.score({'content':content}, tokens, {})
return (True, prob)
except (socket.error, xmlrpclib.Error), e:
return (False, str(e))
def check_spam(db, klass, nodeid, newvalues):
"""Auditor to score a website submission."""
if newvalues.has_key('spambayes_score'):
if not db.security.hasPermission('SB: May Classify', db.getuid()):
raise ValueError, "You don't have permission to spamclassify messages"
# Don't do anything if we're explicitly setting the score
return
if not newvalues.has_key('content'):
# No need to invoke spambayes if the content of the message
# is unchanged.
return
(content, tokens) = extract_classinfo(db, klass, nodeid, newvalues)
(success, other) = check_spambayes(db, content, tokens)
if success:
newvalues['spambayes_score'] = other
newvalues['spambayes_misclassified'] = False
else:
newvalues['spambayes_score'] = -1
newvalues['spambayes_misclassified'] = True
def init(database):
"""Initialize auditor."""
database.msg.audit('create', check_spam)
database.msg.audit('set', check_spam)
database.file.audit('create', check_spam)
database.file.audit('set', check_spam)
|
normal
|
{
"blob_id": "3ec0c20fb2dfed9930885885288cc5d47f4f5ee5",
"index": 6196,
"step-1": "\nimport xmlrpclib\nimport socket\nimport time\nimport math\nimport re\n\nfrom roundup.exceptions import Reject\n\nREVPAT = re.compile(r'(r[0-9]+\\b|rev(ision)? [0-9]+\\b)')\n\ndef extract_classinfo(db, klass, nodeid, newvalues):\n if None == nodeid:\n node = newvalues\n content = newvalues['content']\n else:\n node = db.getnode(klass.classname, nodeid)\n content = klass.get(nodeid, 'content')\n\n if node.has_key('creation') or node.has_key('date'):\n nodets = node.get('creation', node.get('date')).timestamp()\n else:\n nodets = time.time()\n\n if node.has_key('author') or node.has_key('creator'):\n authorid = node.get('author', node.get('creator'))\n else:\n authorid = db.getuid()\n\n authorage = nodets - db.getnode('user', authorid)['creation'].timestamp()\n\n tokens = [\"klass:%s\" % klass.classname,\n \"author:%s\" % authorid,\n \"authorage:%d\" % int(math.log(authorage)),\n \"hasrev:%s\" % (REVPAT.search(content) is not None)]\n\n\n return (content, tokens)\n\ndef check_spambayes(db, content, tokens):\n try:\n spambayes_uri = db.config.detectors['SPAMBAYES_URI']\n except KeyError, e:\n return (False, str(e))\n\n try:\n server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False)\n except IOError, e:\n return (False, str(e))\n\n\n try:\n prob = server.score({'content':content}, tokens, {})\n return (True, prob)\n except (socket.error, xmlrpclib.Error), e:\n return (False, str(e))\n\n\ndef check_spam(db, klass, nodeid, newvalues):\n \"\"\"Auditor to score a website submission.\"\"\"\n\n\n if newvalues.has_key('spambayes_score'):\n if not db.security.hasPermission('SB: May Classify', db.getuid()):\n raise ValueError, \"You don't have permission to spamclassify messages\"\n # Don't do anything if we're explicitly setting the score\n return\n\n if not newvalues.has_key('content'):\n # No need to invoke spambayes if the content of the message\n # is unchanged.\n return\n\n (content, tokens) = extract_classinfo(db, klass, nodeid, newvalues)\n (success, other) = check_spambayes(db, content, tokens)\n if success:\n newvalues['spambayes_score'] = other\n newvalues['spambayes_misclassified'] = False\n else:\n newvalues['spambayes_score'] = -1\n newvalues['spambayes_misclassified'] = True\n\ndef init(database):\n \"\"\"Initialize auditor.\"\"\"\n database.msg.audit('create', check_spam)\n database.msg.audit('set', check_spam)\n database.file.audit('create', check_spam)\n database.file.audit('set', check_spam)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from odoo import api, tools, fields, models, _
import base64
from odoo import modules
class InheritUser(models.Model):
_inherit = 'pos.config'
related_pos_user = fields.One2many('pos.session.users', 'pos_config', string='Related User')
class InheritSession(models.Model):
_name = 'pos.session.users'
user = fields.Many2one('res.users')
pos_config = fields.Many2one('pos.config')
class InheritUser(models.Model):
_inherit = 'res.users'
pos_sessions = fields.Many2many('pos.config', string='Point of Sale Accessible')
@api.multi
def write(self, vals):
if 'pos_sessions' in vals:
if vals['pos_sessions'][0][2]:
self.env["pos.session.users"].search(
[('user', '=', self.id)]).unlink()
for pos_session in vals['pos_sessions'][0][2]:
self.env['pos.session.users'].create({'pos_config': pos_session, 'user': self.id})
else:
self.env["pos.session.users"].search(
[('user', '=', self.id)]).unlink()
result = super(InheritUser, self).write(vals)
return result
@api.model
def create(self, vals):
create_id = super(InheritUser, self).create(vals)
if vals['pos_sessions'][0][2]:
for pos_session in vals['pos_sessions'][0][2]:
self.env['pos.session.users'].create({'pos_config': pos_session, 'user': create_id.id})
return create_id
|
normal
|
{
"blob_id": "2cff5fdfc86793592dd97de90ba9c3a11870b356",
"index": 8987,
"step-1": "<mask token>\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-2": "<mask token>\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-3": "<mask token>\n\n\nclass InheritUser(models.Model):\n <mask token>\n <mask token>\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-4": "<mask token>\n\n\nclass InheritUser(models.Model):\n _inherit = 'pos.config'\n related_pos_user = fields.One2many('pos.session.users', 'pos_config',\n string='Related User')\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n pos_sessions = fields.Many2many('pos.config', string=\n 'Point of Sale Accessible')\n\n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': self.id})\n else:\n self.env['pos.session.users'].search([('user', '=', self.id)]\n ).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config':\n pos_session, 'user': create_id.id})\n return create_id\n",
"step-5": "from odoo import api, tools, fields, models, _\nimport base64\nfrom odoo import modules\n\n\nclass InheritUser(models.Model):\n _inherit = 'pos.config'\n\n related_pos_user = fields.One2many('pos.session.users', 'pos_config', string='Related User')\n\n\nclass InheritSession(models.Model):\n _name = 'pos.session.users'\n\n user = fields.Many2one('res.users')\n pos_config = fields.Many2one('pos.config')\n\n\nclass InheritUser(models.Model):\n _inherit = 'res.users'\n\n pos_sessions = fields.Many2many('pos.config', string='Point of Sale Accessible')\n\n \n @api.multi\n def write(self, vals):\n if 'pos_sessions' in vals:\n if vals['pos_sessions'][0][2]:\n self.env[\"pos.session.users\"].search(\n [('user', '=', self.id)]).unlink()\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config': pos_session, 'user': self.id})\n else:\n self.env[\"pos.session.users\"].search(\n [('user', '=', self.id)]).unlink()\n result = super(InheritUser, self).write(vals)\n return result\n\n @api.model\n def create(self, vals):\n create_id = super(InheritUser, self).create(vals)\n if vals['pos_sessions'][0][2]:\n for pos_session in vals['pos_sessions'][0][2]:\n self.env['pos.session.users'].create({'pos_config': pos_session, 'user': create_id.id})\n return create_id\n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
from django.apps import AppConfig
class ShortenConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'shorten'
|
normal
|
{
"blob_id": "8c2920db7fc49d56aa8da6289cd22272ed3e3283",
"index": 4402,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ShortenConfig(AppConfig):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ShortenConfig(AppConfig):\n default_auto_field = 'django.db.models.BigAutoField'\n name = 'shorten'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass ShortenConfig(AppConfig):\n default_auto_field = 'django.db.models.BigAutoField'\n name = 'shorten'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from flask import Flask, jsonify
import dataExtraction as dataEx
from flask_cors import CORS,cross_origin
from analyseSentiment import twitterDataExtaraction
from flask_pymongo import PyMongo
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://localhost:27017/scrapingDB"
mongo = PyMongo(app)
db = mongo.db
cors = CORS(app, resources={r"/api/*": {"origins": "*"}})
# Visualisation service part
@app.route('/visualisation/confirmed/<string:country>')
@cross_origin()
def confirmedCases(country):
array = dataEx.getData("Confirmed",country).tolist()
return jsonify({"confirmed" : array})
@app.route('/visualisation/recovered/<string:country>')
@cross_origin()
def recoveredCases(country):
array = dataEx.getData("Recovered", country).tolist()
return jsonify({"recovered": array})
@app.route('/visualisation/death/<string:country>')
@cross_origin()
def deathCases(country):
array = dataEx.getData("Deaths", country).tolist()
return jsonify({"deaths": array})
@app.route('/visualisation/maxofall/<string:country>')
@cross_origin()
def maxofall(country):
array = dataEx.getMaxOfAll(country).tolist()
return jsonify({"confirmed" : array[0], "recovered" : array[1], "death" : array[2]})
@app.route('/visualisation/newdata/<string:country>')
@cross_origin()
def NewData(country):
array = dataEx.getNewData(country)[0]
lastUpdate = dataEx.getNewData(country)[1]
return jsonify({"totalCases" :array[0], "death" :array[1], "recovered" :array[2], "lastUpdate" :lastUpdate})
@app.route('/visualisation/regionsData')
@cross_origin()
def dataByregion():
array = dataEx.getRegionsData()
return jsonify({"regions":array[0], "affectedNum": array[1], "update": array[2], "somme":array[3]})
@app.route('/visualisation/StatistiqueMonde')
@cross_origin()
def getStatistiqueMonde():
array = dataEx.getStatistiqueMonde()
return jsonify({"totalCases": array[0], "death": array[1], "recovered": array[2]})
@app.route('/visualisation/clusterAge')
@cross_origin()
def getClusterAge():
array = dataEx.getDataClusterAge()
return jsonify({"countries": array[0].tolist(), "x": array[1].tolist(),"y":array[2].tolist(), "cluster": array[3].tolist()})
@app.route('/visualisation/clusterTest')
@cross_origin()
def getClusterTest():
array = dataEx.getDataClusterTest()
print(array)
return jsonify({"countries": array[0].tolist(), "x": array[1].tolist(),"y":array[2].tolist(), "cluster": array[3].tolist()})
@app.route('/visualisation/ageClusterMean')
@cross_origin()
def getMeanClusterAge():
array = dataEx.getDataClusterAge()[4]
print(array)
return jsonify({"meanClusters": array.tolist()})
@app.route('/visualisation/testClusterMean')
@cross_origin()
def getMeanClusterTest():
array = dataEx.getDataClusterTest()[4]
return jsonify({"meanClusters": array.tolist()})
@app.route("/analysesentiment/covid19/", defaults={'tags': '#covid19','tags2': ''})
@app.route('/analysesentiment/covid19/<string:tags>/<string:tags2>')
@cross_origin()
def analyseSentiment(tags,tags2):
array = twitterDataExtaraction(tags,tags2)
return jsonify({"neutral": array[0], "negative": array[1], "positive": array[2]})
@app.route('/mongodb/nature')
@cross_origin()
def getNature():
cursor = db.nature.find().skip(db.nature.count_documents({}) - 1)
return jsonify({"neutral": cursor[0]['neutral'], "negative": cursor[0]['negative'], "positive": cursor[0]['positive']})
@app.route('/mongodb/economy')
@cross_origin()
def getEconomy():
cursor = db.economy.find().skip(db.economy.count_documents({}) - 1)
return jsonify({"neutral": cursor[0]['neutral'], "negative": cursor[0]['negative'], "positive": cursor[0]['positive']})
@app.route('/mongodb/mentalhealth')
@cross_origin()
def getMentalhealth():
cursor = db.mentalhealth.find().skip(db.mentalhealth.count_documents({}) - 1)
return jsonify({"neutral": cursor[0]['neutral'], "negative": cursor[0]['negative'], "positive": cursor[0]['positive']})
@app.route('/mongodb/politics')
@cross_origin()
def getPolitics():
cursor = db.politics.find().skip(db.politics.count_documents({}) - 1)
return jsonify({"neutral": cursor[0]['neutral'], "negative": cursor[0]['negative'], "positive": cursor[0]['positive']})
@app.route('/visualisation/clusteringAge')
@cross_origin()
def getClusteringAge():
app.config["MONGO_URI"] = "mongodb://localhost:27017/ClusteringDB"
mongo = PyMongo(app)
db = mongo.db
array = db.clusteringAge.find().skip(db.clusteringAge.count_documents({}) - 1)
return jsonify({"countries": array[0]['countries'], "x": array[0]['x'],"y":array[0]['y'], "cluster": array[0]['cluster']})
@app.route('/visualisation/clusteringTest')
@cross_origin()
def getClusteringTest():
app.config["MONGO_URI"] = "mongodb://localhost:27017/ClusteringDB"
mongo = PyMongo(app)
db = mongo.db
array = db.clusteringTest.find().skip(db.clusteringTest.count_documents({}) - 1)
return jsonify(
{"countries": array[0]['countries'], "x": array[0]['x'], "y": array[0]['y'], "cluster": array[0]['cluster']})
if __name__ == "__main__":
app.run(debug=True)
|
normal
|
{
"blob_id": "17505f5c14190df3311c04c19f687937481b920b",
"index": 1168,
"step-1": "<mask token>\n\n\[email protected]('/visualisation/confirmed/<string:country>')\n@cross_origin()\ndef confirmedCases(country):\n array = dataEx.getData('Confirmed', country).tolist()\n return jsonify({'confirmed': array})\n\n\[email protected]('/visualisation/recovered/<string:country>')\n@cross_origin()\ndef recoveredCases(country):\n array = dataEx.getData('Recovered', country).tolist()\n return jsonify({'recovered': array})\n\n\n<mask token>\n\n\[email protected]('/visualisation/maxofall/<string:country>')\n@cross_origin()\ndef maxofall(country):\n array = dataEx.getMaxOfAll(country).tolist()\n return jsonify({'confirmed': array[0], 'recovered': array[1], 'death':\n array[2]})\n\n\[email protected]('/visualisation/newdata/<string:country>')\n@cross_origin()\ndef NewData(country):\n array = dataEx.getNewData(country)[0]\n lastUpdate = dataEx.getNewData(country)[1]\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2], 'lastUpdate': lastUpdate})\n\n\[email protected]('/visualisation/regionsData')\n@cross_origin()\ndef dataByregion():\n array = dataEx.getRegionsData()\n return jsonify({'regions': array[0], 'affectedNum': array[1], 'update':\n array[2], 'somme': array[3]})\n\n\[email protected]('/visualisation/StatistiqueMonde')\n@cross_origin()\ndef getStatistiqueMonde():\n array = dataEx.getStatistiqueMonde()\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2]})\n\n\[email protected]('/visualisation/clusterAge')\n@cross_origin()\ndef getClusterAge():\n array = dataEx.getDataClusterAge()\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/clusterTest')\n@cross_origin()\ndef getClusterTest():\n array = dataEx.getDataClusterTest()\n print(array)\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/ageClusterMean')\n@cross_origin()\ndef getMeanClusterAge():\n array = dataEx.getDataClusterAge()[4]\n print(array)\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/visualisation/testClusterMean')\n@cross_origin()\ndef getMeanClusterTest():\n array = dataEx.getDataClusterTest()[4]\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/analysesentiment/covid19/', defaults={'tags': '#covid19',\n 'tags2': ''})\[email protected]('/analysesentiment/covid19/<string:tags>/<string:tags2>')\n@cross_origin()\ndef analyseSentiment(tags, tags2):\n array = twitterDataExtaraction(tags, tags2)\n return jsonify({'neutral': array[0], 'negative': array[1], 'positive':\n array[2]})\n\n\[email protected]('/mongodb/nature')\n@cross_origin()\ndef getNature():\n cursor = db.nature.find().skip(db.nature.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/economy')\n@cross_origin()\ndef getEconomy():\n cursor = db.economy.find().skip(db.economy.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/mentalhealth')\n@cross_origin()\ndef getMentalhealth():\n cursor = db.mentalhealth.find().skip(db.mentalhealth.count_documents({}\n ) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/politics')\n@cross_origin()\ndef getPolitics():\n cursor = db.politics.find().skip(db.politics.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/visualisation/clusteringAge')\n@cross_origin()\ndef getClusteringAge():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringAge.find().skip(db.clusteringAge.count_documents({\n }) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\[email protected]('/visualisation/clusteringTest')\n@cross_origin()\ndef getClusteringTest():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringTest.find().skip(db.clusteringTest.count_documents\n ({}) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/visualisation/confirmed/<string:country>')\n@cross_origin()\ndef confirmedCases(country):\n array = dataEx.getData('Confirmed', country).tolist()\n return jsonify({'confirmed': array})\n\n\[email protected]('/visualisation/recovered/<string:country>')\n@cross_origin()\ndef recoveredCases(country):\n array = dataEx.getData('Recovered', country).tolist()\n return jsonify({'recovered': array})\n\n\[email protected]('/visualisation/death/<string:country>')\n@cross_origin()\ndef deathCases(country):\n array = dataEx.getData('Deaths', country).tolist()\n return jsonify({'deaths': array})\n\n\[email protected]('/visualisation/maxofall/<string:country>')\n@cross_origin()\ndef maxofall(country):\n array = dataEx.getMaxOfAll(country).tolist()\n return jsonify({'confirmed': array[0], 'recovered': array[1], 'death':\n array[2]})\n\n\[email protected]('/visualisation/newdata/<string:country>')\n@cross_origin()\ndef NewData(country):\n array = dataEx.getNewData(country)[0]\n lastUpdate = dataEx.getNewData(country)[1]\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2], 'lastUpdate': lastUpdate})\n\n\[email protected]('/visualisation/regionsData')\n@cross_origin()\ndef dataByregion():\n array = dataEx.getRegionsData()\n return jsonify({'regions': array[0], 'affectedNum': array[1], 'update':\n array[2], 'somme': array[3]})\n\n\[email protected]('/visualisation/StatistiqueMonde')\n@cross_origin()\ndef getStatistiqueMonde():\n array = dataEx.getStatistiqueMonde()\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2]})\n\n\[email protected]('/visualisation/clusterAge')\n@cross_origin()\ndef getClusterAge():\n array = dataEx.getDataClusterAge()\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/clusterTest')\n@cross_origin()\ndef getClusterTest():\n array = dataEx.getDataClusterTest()\n print(array)\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/ageClusterMean')\n@cross_origin()\ndef getMeanClusterAge():\n array = dataEx.getDataClusterAge()[4]\n print(array)\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/visualisation/testClusterMean')\n@cross_origin()\ndef getMeanClusterTest():\n array = dataEx.getDataClusterTest()[4]\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/analysesentiment/covid19/', defaults={'tags': '#covid19',\n 'tags2': ''})\[email protected]('/analysesentiment/covid19/<string:tags>/<string:tags2>')\n@cross_origin()\ndef analyseSentiment(tags, tags2):\n array = twitterDataExtaraction(tags, tags2)\n return jsonify({'neutral': array[0], 'negative': array[1], 'positive':\n array[2]})\n\n\[email protected]('/mongodb/nature')\n@cross_origin()\ndef getNature():\n cursor = db.nature.find().skip(db.nature.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/economy')\n@cross_origin()\ndef getEconomy():\n cursor = db.economy.find().skip(db.economy.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/mentalhealth')\n@cross_origin()\ndef getMentalhealth():\n cursor = db.mentalhealth.find().skip(db.mentalhealth.count_documents({}\n ) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/politics')\n@cross_origin()\ndef getPolitics():\n cursor = db.politics.find().skip(db.politics.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/visualisation/clusteringAge')\n@cross_origin()\ndef getClusteringAge():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringAge.find().skip(db.clusteringAge.count_documents({\n }) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\[email protected]('/visualisation/clusteringTest')\n@cross_origin()\ndef getClusteringTest():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringTest.find().skip(db.clusteringTest.count_documents\n ({}) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\[email protected]('/visualisation/confirmed/<string:country>')\n@cross_origin()\ndef confirmedCases(country):\n array = dataEx.getData('Confirmed', country).tolist()\n return jsonify({'confirmed': array})\n\n\[email protected]('/visualisation/recovered/<string:country>')\n@cross_origin()\ndef recoveredCases(country):\n array = dataEx.getData('Recovered', country).tolist()\n return jsonify({'recovered': array})\n\n\[email protected]('/visualisation/death/<string:country>')\n@cross_origin()\ndef deathCases(country):\n array = dataEx.getData('Deaths', country).tolist()\n return jsonify({'deaths': array})\n\n\[email protected]('/visualisation/maxofall/<string:country>')\n@cross_origin()\ndef maxofall(country):\n array = dataEx.getMaxOfAll(country).tolist()\n return jsonify({'confirmed': array[0], 'recovered': array[1], 'death':\n array[2]})\n\n\[email protected]('/visualisation/newdata/<string:country>')\n@cross_origin()\ndef NewData(country):\n array = dataEx.getNewData(country)[0]\n lastUpdate = dataEx.getNewData(country)[1]\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2], 'lastUpdate': lastUpdate})\n\n\[email protected]('/visualisation/regionsData')\n@cross_origin()\ndef dataByregion():\n array = dataEx.getRegionsData()\n return jsonify({'regions': array[0], 'affectedNum': array[1], 'update':\n array[2], 'somme': array[3]})\n\n\[email protected]('/visualisation/StatistiqueMonde')\n@cross_origin()\ndef getStatistiqueMonde():\n array = dataEx.getStatistiqueMonde()\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2]})\n\n\[email protected]('/visualisation/clusterAge')\n@cross_origin()\ndef getClusterAge():\n array = dataEx.getDataClusterAge()\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/clusterTest')\n@cross_origin()\ndef getClusterTest():\n array = dataEx.getDataClusterTest()\n print(array)\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/ageClusterMean')\n@cross_origin()\ndef getMeanClusterAge():\n array = dataEx.getDataClusterAge()[4]\n print(array)\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/visualisation/testClusterMean')\n@cross_origin()\ndef getMeanClusterTest():\n array = dataEx.getDataClusterTest()[4]\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/analysesentiment/covid19/', defaults={'tags': '#covid19',\n 'tags2': ''})\[email protected]('/analysesentiment/covid19/<string:tags>/<string:tags2>')\n@cross_origin()\ndef analyseSentiment(tags, tags2):\n array = twitterDataExtaraction(tags, tags2)\n return jsonify({'neutral': array[0], 'negative': array[1], 'positive':\n array[2]})\n\n\[email protected]('/mongodb/nature')\n@cross_origin()\ndef getNature():\n cursor = db.nature.find().skip(db.nature.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/economy')\n@cross_origin()\ndef getEconomy():\n cursor = db.economy.find().skip(db.economy.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/mentalhealth')\n@cross_origin()\ndef getMentalhealth():\n cursor = db.mentalhealth.find().skip(db.mentalhealth.count_documents({}\n ) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/politics')\n@cross_origin()\ndef getPolitics():\n cursor = db.politics.find().skip(db.politics.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/visualisation/clusteringAge')\n@cross_origin()\ndef getClusteringAge():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringAge.find().skip(db.clusteringAge.count_documents({\n }) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\[email protected]('/visualisation/clusteringTest')\n@cross_origin()\ndef getClusteringTest():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringTest.find().skip(db.clusteringTest.count_documents\n ({}) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask, jsonify\nimport dataExtraction as dataEx\nfrom flask_cors import CORS, cross_origin\nfrom analyseSentiment import twitterDataExtaraction\nfrom flask_pymongo import PyMongo\napp = Flask(__name__)\napp.config['MONGO_URI'] = 'mongodb://localhost:27017/scrapingDB'\nmongo = PyMongo(app)\ndb = mongo.db\ncors = CORS(app, resources={'/api/*': {'origins': '*'}})\n\n\[email protected]('/visualisation/confirmed/<string:country>')\n@cross_origin()\ndef confirmedCases(country):\n array = dataEx.getData('Confirmed', country).tolist()\n return jsonify({'confirmed': array})\n\n\[email protected]('/visualisation/recovered/<string:country>')\n@cross_origin()\ndef recoveredCases(country):\n array = dataEx.getData('Recovered', country).tolist()\n return jsonify({'recovered': array})\n\n\[email protected]('/visualisation/death/<string:country>')\n@cross_origin()\ndef deathCases(country):\n array = dataEx.getData('Deaths', country).tolist()\n return jsonify({'deaths': array})\n\n\[email protected]('/visualisation/maxofall/<string:country>')\n@cross_origin()\ndef maxofall(country):\n array = dataEx.getMaxOfAll(country).tolist()\n return jsonify({'confirmed': array[0], 'recovered': array[1], 'death':\n array[2]})\n\n\[email protected]('/visualisation/newdata/<string:country>')\n@cross_origin()\ndef NewData(country):\n array = dataEx.getNewData(country)[0]\n lastUpdate = dataEx.getNewData(country)[1]\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2], 'lastUpdate': lastUpdate})\n\n\[email protected]('/visualisation/regionsData')\n@cross_origin()\ndef dataByregion():\n array = dataEx.getRegionsData()\n return jsonify({'regions': array[0], 'affectedNum': array[1], 'update':\n array[2], 'somme': array[3]})\n\n\[email protected]('/visualisation/StatistiqueMonde')\n@cross_origin()\ndef getStatistiqueMonde():\n array = dataEx.getStatistiqueMonde()\n return jsonify({'totalCases': array[0], 'death': array[1], 'recovered':\n array[2]})\n\n\[email protected]('/visualisation/clusterAge')\n@cross_origin()\ndef getClusterAge():\n array = dataEx.getDataClusterAge()\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/clusterTest')\n@cross_origin()\ndef getClusterTest():\n array = dataEx.getDataClusterTest()\n print(array)\n return jsonify({'countries': array[0].tolist(), 'x': array[1].tolist(),\n 'y': array[2].tolist(), 'cluster': array[3].tolist()})\n\n\[email protected]('/visualisation/ageClusterMean')\n@cross_origin()\ndef getMeanClusterAge():\n array = dataEx.getDataClusterAge()[4]\n print(array)\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/visualisation/testClusterMean')\n@cross_origin()\ndef getMeanClusterTest():\n array = dataEx.getDataClusterTest()[4]\n return jsonify({'meanClusters': array.tolist()})\n\n\[email protected]('/analysesentiment/covid19/', defaults={'tags': '#covid19',\n 'tags2': ''})\[email protected]('/analysesentiment/covid19/<string:tags>/<string:tags2>')\n@cross_origin()\ndef analyseSentiment(tags, tags2):\n array = twitterDataExtaraction(tags, tags2)\n return jsonify({'neutral': array[0], 'negative': array[1], 'positive':\n array[2]})\n\n\[email protected]('/mongodb/nature')\n@cross_origin()\ndef getNature():\n cursor = db.nature.find().skip(db.nature.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/economy')\n@cross_origin()\ndef getEconomy():\n cursor = db.economy.find().skip(db.economy.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/mentalhealth')\n@cross_origin()\ndef getMentalhealth():\n cursor = db.mentalhealth.find().skip(db.mentalhealth.count_documents({}\n ) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/mongodb/politics')\n@cross_origin()\ndef getPolitics():\n cursor = db.politics.find().skip(db.politics.count_documents({}) - 1)\n return jsonify({'neutral': cursor[0]['neutral'], 'negative': cursor[0][\n 'negative'], 'positive': cursor[0]['positive']})\n\n\[email protected]('/visualisation/clusteringAge')\n@cross_origin()\ndef getClusteringAge():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringAge.find().skip(db.clusteringAge.count_documents({\n }) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\[email protected]('/visualisation/clusteringTest')\n@cross_origin()\ndef getClusteringTest():\n app.config['MONGO_URI'] = 'mongodb://localhost:27017/ClusteringDB'\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringTest.find().skip(db.clusteringTest.count_documents\n ({}) - 1)\n return jsonify({'countries': array[0]['countries'], 'x': array[0]['x'],\n 'y': array[0]['y'], 'cluster': array[0]['cluster']})\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask, jsonify\nimport dataExtraction as dataEx\nfrom flask_cors import CORS,cross_origin\nfrom analyseSentiment import twitterDataExtaraction\nfrom flask_pymongo import PyMongo\n\napp = Flask(__name__)\napp.config[\"MONGO_URI\"] = \"mongodb://localhost:27017/scrapingDB\"\nmongo = PyMongo(app)\ndb = mongo.db\ncors = CORS(app, resources={r\"/api/*\": {\"origins\": \"*\"}})\n# Visualisation service part\n\[email protected]('/visualisation/confirmed/<string:country>')\n@cross_origin()\ndef confirmedCases(country):\n array = dataEx.getData(\"Confirmed\",country).tolist()\n return jsonify({\"confirmed\" : array})\n\[email protected]('/visualisation/recovered/<string:country>')\n@cross_origin()\ndef recoveredCases(country):\n array = dataEx.getData(\"Recovered\", country).tolist()\n return jsonify({\"recovered\": array})\n\[email protected]('/visualisation/death/<string:country>')\n@cross_origin()\ndef deathCases(country):\n array = dataEx.getData(\"Deaths\", country).tolist()\n return jsonify({\"deaths\": array})\[email protected]('/visualisation/maxofall/<string:country>')\n@cross_origin()\ndef maxofall(country):\n array = dataEx.getMaxOfAll(country).tolist()\n return jsonify({\"confirmed\" : array[0], \"recovered\" : array[1], \"death\" : array[2]})\[email protected]('/visualisation/newdata/<string:country>')\n@cross_origin()\ndef NewData(country):\n array = dataEx.getNewData(country)[0]\n lastUpdate = dataEx.getNewData(country)[1]\n return jsonify({\"totalCases\" :array[0], \"death\" :array[1], \"recovered\" :array[2], \"lastUpdate\" :lastUpdate})\[email protected]('/visualisation/regionsData')\n@cross_origin()\ndef dataByregion():\n array = dataEx.getRegionsData()\n return jsonify({\"regions\":array[0], \"affectedNum\": array[1], \"update\": array[2], \"somme\":array[3]})\n\[email protected]('/visualisation/StatistiqueMonde')\n@cross_origin()\ndef getStatistiqueMonde():\n array = dataEx.getStatistiqueMonde()\n return jsonify({\"totalCases\": array[0], \"death\": array[1], \"recovered\": array[2]})\n\[email protected]('/visualisation/clusterAge')\n@cross_origin()\ndef getClusterAge():\n array = dataEx.getDataClusterAge()\n return jsonify({\"countries\": array[0].tolist(), \"x\": array[1].tolist(),\"y\":array[2].tolist(), \"cluster\": array[3].tolist()})\n\[email protected]('/visualisation/clusterTest')\n@cross_origin()\ndef getClusterTest():\n array = dataEx.getDataClusterTest()\n print(array)\n return jsonify({\"countries\": array[0].tolist(), \"x\": array[1].tolist(),\"y\":array[2].tolist(), \"cluster\": array[3].tolist()})\n\[email protected]('/visualisation/ageClusterMean')\n@cross_origin()\ndef getMeanClusterAge():\n array = dataEx.getDataClusterAge()[4]\n print(array)\n return jsonify({\"meanClusters\": array.tolist()})\[email protected]('/visualisation/testClusterMean')\n@cross_origin()\ndef getMeanClusterTest():\n array = dataEx.getDataClusterTest()[4]\n return jsonify({\"meanClusters\": array.tolist()})\n\n\n\[email protected](\"/analysesentiment/covid19/\", defaults={'tags': '#covid19','tags2': ''})\[email protected]('/analysesentiment/covid19/<string:tags>/<string:tags2>')\n@cross_origin()\ndef analyseSentiment(tags,tags2):\n array = twitterDataExtaraction(tags,tags2)\n return jsonify({\"neutral\": array[0], \"negative\": array[1], \"positive\": array[2]})\n\n\[email protected]('/mongodb/nature')\n@cross_origin()\ndef getNature():\n cursor = db.nature.find().skip(db.nature.count_documents({}) - 1)\n return jsonify({\"neutral\": cursor[0]['neutral'], \"negative\": cursor[0]['negative'], \"positive\": cursor[0]['positive']})\n\[email protected]('/mongodb/economy')\n@cross_origin()\ndef getEconomy():\n cursor = db.economy.find().skip(db.economy.count_documents({}) - 1)\n return jsonify({\"neutral\": cursor[0]['neutral'], \"negative\": cursor[0]['negative'], \"positive\": cursor[0]['positive']})\n\n\[email protected]('/mongodb/mentalhealth')\n@cross_origin()\ndef getMentalhealth():\n cursor = db.mentalhealth.find().skip(db.mentalhealth.count_documents({}) - 1)\n return jsonify({\"neutral\": cursor[0]['neutral'], \"negative\": cursor[0]['negative'], \"positive\": cursor[0]['positive']})\n\n\[email protected]('/mongodb/politics')\n@cross_origin()\ndef getPolitics():\n cursor = db.politics.find().skip(db.politics.count_documents({}) - 1)\n return jsonify({\"neutral\": cursor[0]['neutral'], \"negative\": cursor[0]['negative'], \"positive\": cursor[0]['positive']})\n\[email protected]('/visualisation/clusteringAge')\n@cross_origin()\ndef getClusteringAge():\n app.config[\"MONGO_URI\"] = \"mongodb://localhost:27017/ClusteringDB\"\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringAge.find().skip(db.clusteringAge.count_documents({}) - 1)\n return jsonify({\"countries\": array[0]['countries'], \"x\": array[0]['x'],\"y\":array[0]['y'], \"cluster\": array[0]['cluster']})\n\n\[email protected]('/visualisation/clusteringTest')\n@cross_origin()\ndef getClusteringTest():\n app.config[\"MONGO_URI\"] = \"mongodb://localhost:27017/ClusteringDB\"\n mongo = PyMongo(app)\n db = mongo.db\n array = db.clusteringTest.find().skip(db.clusteringTest.count_documents({}) - 1)\n return jsonify(\n {\"countries\": array[0]['countries'], \"x\": array[0]['x'], \"y\": array[0]['y'], \"cluster\": array[0]['cluster']})\n\nif __name__ == \"__main__\":\n app.run(debug=True)",
"step-ids": [
17,
18,
19,
21,
22
]
}
|
[
17,
18,
19,
21,
22
] |
import doseresponse as dr
import numpy as np
import scipy.stats as st
import numpy.random as npr
import argparse
import itertools as it
# get rid of for real version
import pandas as pd
import os
seed = 1
npr.seed(seed)
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--samples", type=int, help="number of Hill and pIC50 samples for use in AP model",default=500)
parser.add_argument("-a", "--all", action='store_true', help='construct posterior predictive CDFs for Hill and pIC50 for all drugs and channels', default=False)
parser.add_argument("--num-cores", type=int, help="number of cores to parallelise drug/channel combinations",default=1)
parser.add_argument("-np", "--no-plots", action='store_true', help="don't make any plots, just save posterior predictive samples", default=False)
parser.add_argument("-tu", "--top-up", action='store_true', help="to use with --all, run on all drugs who don't already have MCMC files", default=False)
parser.add_argument("-sy", "--synthetic", action='store_true', help="use synthetic data (only one drug/channel combination exists currently", default=False)
parser.add_argument("-Ne", "--num_expts", type=int, help="how many experiments to fit to", default=0)
parser.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv")
args = parser.parse_args()
dr.setup(args.data_file)
drugs_to_run, channels_to_run = dr.list_drug_channel_options(args.all)
def construct_posterior_predictive_cdfs(alphas,betas,mus,ss):
num_x_pts = 501
hill_min = 0.
hill_max = 4.
pic50_min = -2.
pic50_max = 12.
hill_x_range = np.linspace(hill_min,hill_max,num_x_pts)
pic50_x_range = np.linspace(pic50_min,pic50_max,num_x_pts)
num_iterations = len(alphas) # assuming burn already discarded
hill_pdf_sum = np.zeros(num_x_pts)
hill_cdf_sum = np.zeros(num_x_pts)
pic50_pdf_sum = np.zeros(num_x_pts)
pic50_cdf_sum = np.zeros(num_x_pts)
fisk = st.fisk.cdf
fisk_pdf = st.fisk.pdf
logistic = st.logistic.cdf
logistic_pdf = st.logistic.pdf
for i in xrange(num_iterations):
hill_cdf_sum += fisk(hill_x_range,c=betas[i],scale=alphas[i],loc=0)
hill_pdf_sum += fisk_pdf(hill_x_range,c=betas[i],scale=alphas[i],loc=0)
pic50_cdf_sum += logistic(pic50_x_range,mus[i],ss[i])
pic50_pdf_sum += logistic_pdf(pic50_x_range,mus[i],ss[i])
hill_cdf_sum /= num_iterations
pic50_cdf_sum /= num_iterations
hill_pdf_sum /= num_iterations
pic50_pdf_sum /= num_iterations
return hill_x_range, hill_cdf_sum, pic50_x_range, pic50_cdf_sum, hill_pdf_sum, pic50_pdf_sum
def run(drug_channel):
drug, channel = drug_channel
print "\n\n{} + {}\n\n".format(drug,channel)
num_expts, experiment_numbers, experiments = dr.load_crumb_data(drug,channel)
if (0 < args.num_expts < num_expts):
num_expts = args.num_expts
save_samples_for_APs = False
else:
print "Fitting to all experiments\n"
save_samples_for_APs = True
drug, channel, output_dir, chain_dir, figs_dir, chain_file = dr.hierarchical_output_dirs_and_chain_file(drug,channel,num_expts)
try:
mcmc = np.loadtxt(chain_file,usecols=range(4))
except IOError:
print "tried loading", chain_file
print "No MCMC file found for {} + {}\n".format(drug,channel)
return None
total_iterations = mcmc.shape[0]
burn = total_iterations/4
mcmc = mcmc[burn:,:]
hill_x_range, hill_cdf_sum, pic50_x_range, pic50_cdf_sum, hill_pdf_sum, pic50_pdf_sum = construct_posterior_predictive_cdfs(mcmc[:,0],mcmc[:,1],mcmc[:,2],mcmc[:,3])
if (not args.no_plots):
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
labels = ["Hill","pIC50"]
fig = plt.figure(figsize=(8,4))
ax1 = fig.add_subplot(121)
ax1.plot(hill_x_range,hill_cdf_sum)
ax1.set_xlim(hill_x_range[0],hill_x_range[-1])
ax1.set_ylim(0,1)
ax1.set_xlabel("Hill")
ax1.set_ylabel("Cumulative distribution")
ax1.grid()
ax2 = fig.add_subplot(122,sharey=ax1)
ax2.plot(pic50_x_range,pic50_cdf_sum)
ax2.set_xlim(pic50_x_range[0],pic50_x_range[-1])
ax2.set_xlabel("pIC50")
ax2.grid()
plt.setp(ax2.get_yticklabels(), visible=False)
fig.tight_layout()
fig.savefig(figs_dir+"{}_{}_posterior_predictive_cdfs.png".format(drug,channel))
plt.close()
xs = [hill_x_range,pic50_x_range]
ys = [hill_pdf_sum,pic50_pdf_sum]
labels = ['$Hill$','$pIC50$']
file_labels = ['hill','pic50']
for i in xrange(2):
fig = plt.figure(figsize=(5,4))
ax = fig.add_subplot(111)
ax.plot(xs[i],ys[i],color='blue')
ax.grid()
ax.set_xlabel(labels[i])
ax.set_ylabel('Probability density')
ax.set_title('{} posterior predictive'.format(labels[i][1:-1]))
fig.tight_layout()
fig.savefig(figs_dir+"{}_{}_{}_posterior_predictive.png".format(drug,channel,file_labels[i]))
plt.close()
hill_cdf_file, pic50_cdf_file = dr.hierarchical_posterior_predictive_cdf_files(drug,channel,num_expts)
np.savetxt(hill_cdf_file,np.vstack((hill_x_range, hill_cdf_sum)).T)
np.savetxt(pic50_cdf_file,np.vstack((pic50_x_range, pic50_cdf_sum)).T)
hill_uniform_samples = npr.rand(args.samples)
pic50_uniform_samples = npr.rand(args.samples)
hill_interpolated_inverse_cdf_samples = np.interp(hill_uniform_samples,hill_cdf_sum,hill_x_range)
pic50_interpolated_inverse_cdf_samples = np.interp(pic50_uniform_samples,pic50_cdf_sum,pic50_x_range)
# save a number of MCMC samples for use in AP models
# we currently have it set to 500
# in theory, the more samples, the better the AP histograms will look!
if save_samples_for_APs:
samples_file = dr.hierarchical_hill_and_pic50_samples_for_AP_file(drug,channel)
with open(samples_file,'w') as outfile:
outfile.write('# {} samples of (Hill,pIC50) drawn from their posterior predictive distributions, as defined by MCMC samples\n'.format(args.samples))
np.savetxt(outfile,np.vstack((hill_interpolated_inverse_cdf_samples,pic50_interpolated_inverse_cdf_samples)).T)
print "\n{} + {} done!\n".format(drug,channel)
return None
drugs_channels = it.product(drugs_to_run,channels_to_run)
if (args.num_cores<=1) or (len(drugs_to_run)==1):
for drug_channel in drugs_channels:
#run(drug_channel)
# try/except is good when running multiple MCMCs and leaving them overnight,say
# if one or more crash then the others will survive!
# however, if you need more "control", comment out the try/except, and uncomment the other run(drug_channel) line
try:
run(drug_channel)
except Exception,e:
print e
print "Failed to run {} + {}!".format(drug_channel[0],drug_channel[1])
# run multiple MCMCs in parallel
elif (args.num_cores>1):
import multiprocessing as mp
num_cores = min(args.num_cores, mp.cpu_count()-1)
pool = mp.Pool(processes=num_cores)
pool.map_async(run,drugs_channels).get(9999999)
pool.close()
pool.join()
|
normal
|
{
"blob_id": "2f6baf4de40224f5a3d00ded35e751184ab59d0d",
"index": 9201,
"step-1": "import doseresponse as dr\nimport numpy as np\nimport scipy.stats as st\n\nimport numpy.random as npr\nimport argparse\nimport itertools as it\n\n# get rid of for real version\nimport pandas as pd\nimport os\n\nseed = 1\nnpr.seed(seed)\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"-s\", \"--samples\", type=int, help=\"number of Hill and pIC50 samples for use in AP model\",default=500)\nparser.add_argument(\"-a\", \"--all\", action='store_true', help='construct posterior predictive CDFs for Hill and pIC50 for all drugs and channels', default=False)\nparser.add_argument(\"--num-cores\", type=int, help=\"number of cores to parallelise drug/channel combinations\",default=1)\nparser.add_argument(\"-np\", \"--no-plots\", action='store_true', help=\"don't make any plots, just save posterior predictive samples\", default=False)\nparser.add_argument(\"-tu\", \"--top-up\", action='store_true', help=\"to use with --all, run on all drugs who don't already have MCMC files\", default=False)\nparser.add_argument(\"-sy\", \"--synthetic\", action='store_true', help=\"use synthetic data (only one drug/channel combination exists currently\", default=False)\nparser.add_argument(\"-Ne\", \"--num_expts\", type=int, help=\"how many experiments to fit to\", default=0)\nparser.add_argument(\"--data-file\", type=str, help=\"csv file from which to read in data, in same format as provided crumb_data.csv\")\n\nargs = parser.parse_args()\n\ndr.setup(args.data_file)\n\ndrugs_to_run, channels_to_run = dr.list_drug_channel_options(args.all)\n\ndef construct_posterior_predictive_cdfs(alphas,betas,mus,ss):\n num_x_pts = 501\n hill_min = 0.\n hill_max = 4.\n pic50_min = -2.\n pic50_max = 12.\n hill_x_range = np.linspace(hill_min,hill_max,num_x_pts)\n pic50_x_range = np.linspace(pic50_min,pic50_max,num_x_pts)\n num_iterations = len(alphas) # assuming burn already discarded\n hill_pdf_sum = np.zeros(num_x_pts)\n hill_cdf_sum = np.zeros(num_x_pts)\n pic50_pdf_sum = np.zeros(num_x_pts)\n pic50_cdf_sum = np.zeros(num_x_pts)\n fisk = st.fisk.cdf\n fisk_pdf = st.fisk.pdf\n logistic = st.logistic.cdf\n logistic_pdf = st.logistic.pdf\n for i in xrange(num_iterations):\n hill_cdf_sum += fisk(hill_x_range,c=betas[i],scale=alphas[i],loc=0)\n hill_pdf_sum += fisk_pdf(hill_x_range,c=betas[i],scale=alphas[i],loc=0)\n pic50_cdf_sum += logistic(pic50_x_range,mus[i],ss[i])\n pic50_pdf_sum += logistic_pdf(pic50_x_range,mus[i],ss[i])\n hill_cdf_sum /= num_iterations\n pic50_cdf_sum /= num_iterations\n hill_pdf_sum /= num_iterations\n pic50_pdf_sum /= num_iterations\n return hill_x_range, hill_cdf_sum, pic50_x_range, pic50_cdf_sum, hill_pdf_sum, pic50_pdf_sum\n\ndef run(drug_channel):\n\n drug, channel = drug_channel\n \n print \"\\n\\n{} + {}\\n\\n\".format(drug,channel)\n \n num_expts, experiment_numbers, experiments = dr.load_crumb_data(drug,channel)\n if (0 < args.num_expts < num_expts):\n num_expts = args.num_expts\n save_samples_for_APs = False\n else:\n print \"Fitting to all experiments\\n\"\n save_samples_for_APs = True\n \n \n drug, channel, output_dir, chain_dir, figs_dir, chain_file = dr.hierarchical_output_dirs_and_chain_file(drug,channel,num_expts)\n \n\n try:\n mcmc = np.loadtxt(chain_file,usecols=range(4))\n except IOError:\n print \"tried loading\", chain_file\n print \"No MCMC file found for {} + {}\\n\".format(drug,channel)\n return None\n total_iterations = mcmc.shape[0]\n burn = total_iterations/4\n mcmc = mcmc[burn:,:]\n \n \n\n hill_x_range, hill_cdf_sum, pic50_x_range, pic50_cdf_sum, hill_pdf_sum, pic50_pdf_sum = construct_posterior_predictive_cdfs(mcmc[:,0],mcmc[:,1],mcmc[:,2],mcmc[:,3])\n \n if (not args.no_plots):\n import matplotlib\n matplotlib.use('Agg')\n import matplotlib.pyplot as plt \n labels = [\"Hill\",\"pIC50\"]\n fig = plt.figure(figsize=(8,4))\n ax1 = fig.add_subplot(121)\n ax1.plot(hill_x_range,hill_cdf_sum)\n ax1.set_xlim(hill_x_range[0],hill_x_range[-1])\n ax1.set_ylim(0,1)\n ax1.set_xlabel(\"Hill\")\n ax1.set_ylabel(\"Cumulative distribution\")\n ax1.grid()\n ax2 = fig.add_subplot(122,sharey=ax1)\n ax2.plot(pic50_x_range,pic50_cdf_sum)\n ax2.set_xlim(pic50_x_range[0],pic50_x_range[-1])\n ax2.set_xlabel(\"pIC50\")\n ax2.grid()\n plt.setp(ax2.get_yticklabels(), visible=False)\n fig.tight_layout()\n fig.savefig(figs_dir+\"{}_{}_posterior_predictive_cdfs.png\".format(drug,channel))\n plt.close()\n xs = [hill_x_range,pic50_x_range]\n ys = [hill_pdf_sum,pic50_pdf_sum]\n labels = ['$Hill$','$pIC50$']\n file_labels = ['hill','pic50']\n for i in xrange(2):\n fig = plt.figure(figsize=(5,4))\n ax = fig.add_subplot(111)\n ax.plot(xs[i],ys[i],color='blue')\n ax.grid()\n ax.set_xlabel(labels[i])\n ax.set_ylabel('Probability density')\n ax.set_title('{} posterior predictive'.format(labels[i][1:-1]))\n fig.tight_layout()\n fig.savefig(figs_dir+\"{}_{}_{}_posterior_predictive.png\".format(drug,channel,file_labels[i]))\n plt.close()\n\n hill_cdf_file, pic50_cdf_file = dr.hierarchical_posterior_predictive_cdf_files(drug,channel,num_expts)\n\n np.savetxt(hill_cdf_file,np.vstack((hill_x_range, hill_cdf_sum)).T)\n np.savetxt(pic50_cdf_file,np.vstack((pic50_x_range, pic50_cdf_sum)).T)\n\n\n hill_uniform_samples = npr.rand(args.samples)\n pic50_uniform_samples = npr.rand(args.samples)\n\n hill_interpolated_inverse_cdf_samples = np.interp(hill_uniform_samples,hill_cdf_sum,hill_x_range)\n pic50_interpolated_inverse_cdf_samples = np.interp(pic50_uniform_samples,pic50_cdf_sum,pic50_x_range)\n\n # save a number of MCMC samples for use in AP models\n # we currently have it set to 500\n # in theory, the more samples, the better the AP histograms will look!\n if save_samples_for_APs:\n samples_file = dr.hierarchical_hill_and_pic50_samples_for_AP_file(drug,channel)\n with open(samples_file,'w') as outfile:\n outfile.write('# {} samples of (Hill,pIC50) drawn from their posterior predictive distributions, as defined by MCMC samples\\n'.format(args.samples))\n np.savetxt(outfile,np.vstack((hill_interpolated_inverse_cdf_samples,pic50_interpolated_inverse_cdf_samples)).T)\n\n\n print \"\\n{} + {} done!\\n\".format(drug,channel)\n return None\n \ndrugs_channels = it.product(drugs_to_run,channels_to_run)\nif (args.num_cores<=1) or (len(drugs_to_run)==1):\n for drug_channel in drugs_channels:\n #run(drug_channel)\n \n # try/except is good when running multiple MCMCs and leaving them overnight,say\n # if one or more crash then the others will survive!\n # however, if you need more \"control\", comment out the try/except, and uncomment the other run(drug_channel) line\n try:\n run(drug_channel)\n except Exception,e:\n print e\n print \"Failed to run {} + {}!\".format(drug_channel[0],drug_channel[1])\n# run multiple MCMCs in parallel\nelif (args.num_cores>1):\n import multiprocessing as mp\n num_cores = min(args.num_cores, mp.cpu_count()-1)\n pool = mp.Pool(processes=num_cores)\n pool.map_async(run,drugs_channels).get(9999999)\n pool.close()\n pool.join()\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from django.contrib import admin
from django.contrib.staticfiles.urls import static # 本Ch11.1
from django.urls import path, include
from . import settings_common, settings_dev # 本Ch11.1
import debug_toolbar
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('login_test_app.urls')),
path('accounts/', include('allauth.urls')), # allauthデフォルトURL:本P218
path('__debug__/', include(debug_toolbar.urls)),
]
# 開発サーバーでMEDIA_ROOT,MEDIA_URLを渡したdjango.contrib.staticfiles.urls.static関数から
# 返されたルーティングを追加する
urlpatterns +=static(settings_common.MEDIA_URL, document_root=settings_dev.MEDIA_ROOT)
|
normal
|
{
"blob_id": "ce626afa7c0fd2e190afd92b57a0ebebf19f9e9b",
"index": 6842,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev\n .MEDIA_ROOT)\n",
"step-3": "<mask token>\nurlpatterns = [path('admin/', admin.site.urls), path('', include(\n 'login_test_app.urls')), path('accounts/', include('allauth.urls')),\n path('__debug__/', include(debug_toolbar.urls))]\nurlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev\n .MEDIA_ROOT)\n",
"step-4": "from django.contrib import admin\nfrom django.contrib.staticfiles.urls import static\nfrom django.urls import path, include\nfrom . import settings_common, settings_dev\nimport debug_toolbar\nurlpatterns = [path('admin/', admin.site.urls), path('', include(\n 'login_test_app.urls')), path('accounts/', include('allauth.urls')),\n path('__debug__/', include(debug_toolbar.urls))]\nurlpatterns += static(settings_common.MEDIA_URL, document_root=settings_dev\n .MEDIA_ROOT)\n",
"step-5": "from django.contrib import admin\nfrom django.contrib.staticfiles.urls import static # 本Ch11.1\nfrom django.urls import path, include\n\nfrom . import settings_common, settings_dev # 本Ch11.1\nimport debug_toolbar\n\nurlpatterns = [\n path('admin/', admin.site.urls),\n path('', include('login_test_app.urls')),\n path('accounts/', include('allauth.urls')), # allauthデフォルトURL:本P218\n path('__debug__/', include(debug_toolbar.urls)),\n\n]\n\n# 開発サーバーでMEDIA_ROOT,MEDIA_URLを渡したdjango.contrib.staticfiles.urls.static関数から\n# 返されたルーティングを追加する\nurlpatterns +=static(settings_common.MEDIA_URL, document_root=settings_dev.MEDIA_ROOT)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
from django.apps import AppConfig
class AcademyConfig(AppConfig):
name = 'academy'
verbose_name = u"Академия"
|
normal
|
{
"blob_id": "619d2df45d0823930484f030a9a78e71ec718cb7",
"index": 6661,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AcademyConfig(AppConfig):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AcademyConfig(AppConfig):\n name = 'academy'\n verbose_name = u'Академия'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass AcademyConfig(AppConfig):\n name = 'academy'\n verbose_name = u'Академия'\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom django.apps import AppConfig\n\n\nclass AcademyConfig(AppConfig):\n\tname = 'academy'\n\tverbose_name = u\"Академия\"\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python3
import os
import netifaces
# nicList = netifaces.interfaces()
NICList = [i for i in netifaces.interfaces() if i != "lo"]
for i in NICList:
os.system("sudo ifconfig " + i + " promisc")
os.system("sudo python ./src/top.py")
|
normal
|
{
"blob_id": "b38d23a7de3c805ddde4ed2d236e3c6e7bb5e2d0",
"index": 118,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in NICList:\n os.system('sudo ifconfig ' + i + ' promisc')\nos.system('sudo python ./src/top.py')\n",
"step-3": "<mask token>\nNICList = [i for i in netifaces.interfaces() if i != 'lo']\nfor i in NICList:\n os.system('sudo ifconfig ' + i + ' promisc')\nos.system('sudo python ./src/top.py')\n",
"step-4": "import os\nimport netifaces\nNICList = [i for i in netifaces.interfaces() if i != 'lo']\nfor i in NICList:\n os.system('sudo ifconfig ' + i + ' promisc')\nos.system('sudo python ./src/top.py')\n",
"step-5": "#!/usr/bin/python3\nimport os\nimport netifaces\n\n# nicList = netifaces.interfaces()\nNICList = [i for i in netifaces.interfaces() if i != \"lo\"]\n\nfor i in NICList:\n os.system(\"sudo ifconfig \" + i + \" promisc\")\nos.system(\"sudo python ./src/top.py\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
print(os.name)
#print(os.environ)
print(os.environ.get('PATH'))
print(os.path.abspath('.'))
os.path.join(os.path.abspath('.'),'testdir')
os.mkdir(os.path.abspath('.'))
|
normal
|
{
"blob_id": "fd059ae6e5eb3f7dc18dff6f9ed206002cea5fb2",
"index": 9788,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(os.name)\nprint(os.environ.get('PATH'))\nprint(os.path.abspath('.'))\nos.path.join(os.path.abspath('.'), 'testdir')\nos.mkdir(os.path.abspath('.'))\n",
"step-3": "import os\nprint(os.name)\nprint(os.environ.get('PATH'))\nprint(os.path.abspath('.'))\nos.path.join(os.path.abspath('.'), 'testdir')\nos.mkdir(os.path.abspath('.'))\n",
"step-4": "import os\nprint(os.name)\n#print(os.environ)\nprint(os.environ.get('PATH'))\nprint(os.path.abspath('.'))\nos.path.join(os.path.abspath('.'),'testdir')\nos.mkdir(os.path.abspath('.'))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""Woma objects for dealing with HTTP.
Request and Response inherit from webob's Request and Response objects, so see
http://docs.webob.org/en/latest/ for full documentation. The only things
documented here are the customizations.
"""
from webob import Request as BaseRequest
from webob import Response as BaseResponse
class Client(object):
"""Make requests to a wsgi app and return the response."""
def __init__(self, app):
self.app = app
def request(self, path, method, body=None):
path = path or '/'
request = BaseRequest.blank(path)
request.method = method
request.text = body or ''
return request.get_response(self.app)
def get(self, path=None):
return self.request(path, 'GET')
def post(self, path=None, body=None):
return self.request(path, 'POST', body)
def put(self, path=None, body=None):
return self.request(path, 'PUT', body)
class Request(BaseRequest):
"""A webob.Request with additional properties."""
@property
def kwargs(self):
"""Returns 'router.kwargs' from environ if present, or {} otherwise."""
return self.environ.get('router.kwargs', {})
class Response(BaseResponse):
"""A webob.Response that can be initialized with defaults from request."""
@classmethod
def for_request(cls, request):
"""Initialize a Response with defaults based on the request.
>>> request = Request({})
>>> request.headers['Content-Type'] = 'text/html; charset=latin1'
>>> response = Response.for_request(request)
>>> response.content_type
'text/html'
>>> response.charset
'latin1'
"""
return cls(
status_code=200,
content_type=request.content_type or 'text/plain',
charset=request.charset or 'UTF-8')
def write(self, text):
"""An alias for `response.text = text`.
>>> response = Response()
>>> response.write('some text')
>>> response.text
'some text'
"""
self.text = text
|
normal
|
{
"blob_id": "ca11e9cf0bcfcbd714c45b5c95bd2c2044b65909",
"index": 384,
"step-1": "<mask token>\n\n\nclass Client(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Request(BaseRequest):\n \"\"\"A webob.Request with additional properties.\"\"\"\n\n @property\n def kwargs(self):\n \"\"\"Returns 'router.kwargs' from environ if present, or {} otherwise.\"\"\"\n return self.environ.get('router.kwargs', {})\n\n\nclass Response(BaseResponse):\n \"\"\"A webob.Response that can be initialized with defaults from request.\"\"\"\n\n @classmethod\n def for_request(cls, request):\n \"\"\"Initialize a Response with defaults based on the request.\n\n >>> request = Request({})\n >>> request.headers['Content-Type'] = 'text/html; charset=latin1'\n\n >>> response = Response.for_request(request)\n >>> response.content_type\n 'text/html'\n >>> response.charset\n 'latin1'\n\n \"\"\"\n return cls(status_code=200, content_type=request.content_type or\n 'text/plain', charset=request.charset or 'UTF-8')\n\n def write(self, text):\n \"\"\"An alias for `response.text = text`.\n\n >>> response = Response()\n >>> response.write('some text')\n >>> response.text\n 'some text'\n\n \"\"\"\n self.text = text\n",
"step-2": "<mask token>\n\n\nclass Client(object):\n <mask token>\n\n def __init__(self, app):\n self.app = app\n <mask token>\n\n def get(self, path=None):\n return self.request(path, 'GET')\n <mask token>\n\n def put(self, path=None, body=None):\n return self.request(path, 'PUT', body)\n\n\nclass Request(BaseRequest):\n \"\"\"A webob.Request with additional properties.\"\"\"\n\n @property\n def kwargs(self):\n \"\"\"Returns 'router.kwargs' from environ if present, or {} otherwise.\"\"\"\n return self.environ.get('router.kwargs', {})\n\n\nclass Response(BaseResponse):\n \"\"\"A webob.Response that can be initialized with defaults from request.\"\"\"\n\n @classmethod\n def for_request(cls, request):\n \"\"\"Initialize a Response with defaults based on the request.\n\n >>> request = Request({})\n >>> request.headers['Content-Type'] = 'text/html; charset=latin1'\n\n >>> response = Response.for_request(request)\n >>> response.content_type\n 'text/html'\n >>> response.charset\n 'latin1'\n\n \"\"\"\n return cls(status_code=200, content_type=request.content_type or\n 'text/plain', charset=request.charset or 'UTF-8')\n\n def write(self, text):\n \"\"\"An alias for `response.text = text`.\n\n >>> response = Response()\n >>> response.write('some text')\n >>> response.text\n 'some text'\n\n \"\"\"\n self.text = text\n",
"step-3": "<mask token>\n\n\nclass Client(object):\n <mask token>\n\n def __init__(self, app):\n self.app = app\n\n def request(self, path, method, body=None):\n path = path or '/'\n request = BaseRequest.blank(path)\n request.method = method\n request.text = body or ''\n return request.get_response(self.app)\n\n def get(self, path=None):\n return self.request(path, 'GET')\n <mask token>\n\n def put(self, path=None, body=None):\n return self.request(path, 'PUT', body)\n\n\nclass Request(BaseRequest):\n \"\"\"A webob.Request with additional properties.\"\"\"\n\n @property\n def kwargs(self):\n \"\"\"Returns 'router.kwargs' from environ if present, or {} otherwise.\"\"\"\n return self.environ.get('router.kwargs', {})\n\n\nclass Response(BaseResponse):\n \"\"\"A webob.Response that can be initialized with defaults from request.\"\"\"\n\n @classmethod\n def for_request(cls, request):\n \"\"\"Initialize a Response with defaults based on the request.\n\n >>> request = Request({})\n >>> request.headers['Content-Type'] = 'text/html; charset=latin1'\n\n >>> response = Response.for_request(request)\n >>> response.content_type\n 'text/html'\n >>> response.charset\n 'latin1'\n\n \"\"\"\n return cls(status_code=200, content_type=request.content_type or\n 'text/plain', charset=request.charset or 'UTF-8')\n\n def write(self, text):\n \"\"\"An alias for `response.text = text`.\n\n >>> response = Response()\n >>> response.write('some text')\n >>> response.text\n 'some text'\n\n \"\"\"\n self.text = text\n",
"step-4": "<mask token>\n\n\nclass Client(object):\n \"\"\"Make requests to a wsgi app and return the response.\"\"\"\n\n def __init__(self, app):\n self.app = app\n\n def request(self, path, method, body=None):\n path = path or '/'\n request = BaseRequest.blank(path)\n request.method = method\n request.text = body or ''\n return request.get_response(self.app)\n\n def get(self, path=None):\n return self.request(path, 'GET')\n\n def post(self, path=None, body=None):\n return self.request(path, 'POST', body)\n\n def put(self, path=None, body=None):\n return self.request(path, 'PUT', body)\n\n\nclass Request(BaseRequest):\n \"\"\"A webob.Request with additional properties.\"\"\"\n\n @property\n def kwargs(self):\n \"\"\"Returns 'router.kwargs' from environ if present, or {} otherwise.\"\"\"\n return self.environ.get('router.kwargs', {})\n\n\nclass Response(BaseResponse):\n \"\"\"A webob.Response that can be initialized with defaults from request.\"\"\"\n\n @classmethod\n def for_request(cls, request):\n \"\"\"Initialize a Response with defaults based on the request.\n\n >>> request = Request({})\n >>> request.headers['Content-Type'] = 'text/html; charset=latin1'\n\n >>> response = Response.for_request(request)\n >>> response.content_type\n 'text/html'\n >>> response.charset\n 'latin1'\n\n \"\"\"\n return cls(status_code=200, content_type=request.content_type or\n 'text/plain', charset=request.charset or 'UTF-8')\n\n def write(self, text):\n \"\"\"An alias for `response.text = text`.\n\n >>> response = Response()\n >>> response.write('some text')\n >>> response.text\n 'some text'\n\n \"\"\"\n self.text = text\n",
"step-5": "\"\"\"Woma objects for dealing with HTTP.\n\nRequest and Response inherit from webob's Request and Response objects, so see\nhttp://docs.webob.org/en/latest/ for full documentation. The only things\ndocumented here are the customizations.\n\n\"\"\"\nfrom webob import Request as BaseRequest\nfrom webob import Response as BaseResponse\n\n\nclass Client(object):\n \"\"\"Make requests to a wsgi app and return the response.\"\"\"\n\n def __init__(self, app):\n self.app = app\n\n def request(self, path, method, body=None):\n path = path or '/'\n request = BaseRequest.blank(path)\n request.method = method\n request.text = body or ''\n return request.get_response(self.app)\n\n def get(self, path=None):\n return self.request(path, 'GET')\n\n def post(self, path=None, body=None):\n return self.request(path, 'POST', body)\n\n def put(self, path=None, body=None):\n return self.request(path, 'PUT', body)\n\n\nclass Request(BaseRequest):\n \"\"\"A webob.Request with additional properties.\"\"\"\n\n @property\n def kwargs(self):\n \"\"\"Returns 'router.kwargs' from environ if present, or {} otherwise.\"\"\"\n return self.environ.get('router.kwargs', {})\n\n\nclass Response(BaseResponse):\n \"\"\"A webob.Response that can be initialized with defaults from request.\"\"\"\n\n @classmethod\n def for_request(cls, request):\n \"\"\"Initialize a Response with defaults based on the request.\n\n >>> request = Request({})\n >>> request.headers['Content-Type'] = 'text/html; charset=latin1'\n\n >>> response = Response.for_request(request)\n >>> response.content_type\n 'text/html'\n >>> response.charset\n 'latin1'\n\n \"\"\"\n return cls(\n status_code=200,\n content_type=request.content_type or 'text/plain',\n charset=request.charset or 'UTF-8')\n\n def write(self, text):\n \"\"\"An alias for `response.text = text`.\n\n >>> response = Response()\n >>> response.write('some text')\n >>> response.text\n 'some text'\n\n \"\"\"\n self.text = text\n",
"step-ids": [
8,
11,
12,
14,
16
]
}
|
[
8,
11,
12,
14,
16
] |
"""
Django settings for gamelibrary project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# SECURITY WARNING: don't run with debug turned on in production!
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'admin_honeypot',
'bootstrap3',
'el_pagination',
'compressor',
# 'pipeline',
'accounts',
'bot',
'home',
'pages',
'serve_media',
'events',
'gallery',
'groups',
'django_rq',
'surveys',
]
MIDDLEWARE_CLASSES = [
# 'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'django.middleware.cache.FetchFromCacheMiddleware',
'csp.middleware.CSPMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'templates/error_pages')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOCALE_PATHS = [
os.path.join(BASE_DIR, 'static/locale/'),
]
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'assets/')
# Redirect to here after Login
LOGIN_REDIRECT_URL = '/home'
TELEGRAM_TOKEN = os.environ.get('GROUPSOME_TELEGRAM_TOKEN')
TELEGRAM_WEBHOOK_SECRET = os.environ.get('GROUPSOME_TELEGRAM_WEBHOOK_SECRET')
TELEGRAM_BOT_USERNAME = "groupsomebot"
# Media root directory
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_SERVE_USING_NGINX = False
# Needed for Endless Scrolling
# TEMPLATE_CONTEXT_PROCESSORS += (
# 'django.core.context_processors.request',
# )
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
'pipeline.finders.PipelineFinder',
)
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
PIPELINE = {
'PIPELINE_ENABLED': True,
'COMPILERS': (
'pipeline.compilers.stylus.StylusCompiler',
),
'STYLESHEETS': {
'main': {
'source_filenames': (
'style/main.styl',
),
'output_filename': 'style/main.css',
}
},
'STYLUS_ARGUMENTS': '-c',
}
CSP_STYLE_SRC = ("'self'", "'unsafe-inline'", "fonts.googleapis.com")
CSP_FONT_SRC = ("'self'", "fonts.gstatic.com")
|
normal
|
{
"blob_id": "b42414b7d8ed80d8794ab7c49dfde1e5df0721f1",
"index": 1318,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nALLOWED_HOSTS = []\nINSTALLED_APPS = ['django.contrib.admin', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles',\n 'admin_honeypot', 'bootstrap3', 'el_pagination', 'compressor',\n 'accounts', 'bot', 'home', 'pages', 'serve_media', 'events', 'gallery',\n 'groups', 'django_rq', 'surveys']\nMIDDLEWARE_CLASSES = ['django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.locale.LocaleMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'csp.middleware.CSPMiddleware']\nROOT_URLCONF = 'config.urls'\nTEMPLATES = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR,\n 'templates/error_pages')], 'APP_DIRS': True, 'OPTIONS': {\n 'context_processors': ['django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages']}}]\nWSGI_APPLICATION = 'config.wsgi.application'\nAUTH_PASSWORD_VALIDATORS = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nLOCALE_PATHS = [os.path.join(BASE_DIR, 'static/locale/')]\nSTATIC_URL = '/static/'\nSTATIC_ROOT = os.path.join(BASE_DIR, 'assets/')\nLOGIN_REDIRECT_URL = '/home'\nTELEGRAM_TOKEN = os.environ.get('GROUPSOME_TELEGRAM_TOKEN')\nTELEGRAM_WEBHOOK_SECRET = os.environ.get('GROUPSOME_TELEGRAM_WEBHOOK_SECRET')\nTELEGRAM_BOT_USERNAME = 'groupsomebot'\nMEDIA_ROOT = os.path.join(BASE_DIR, 'media')\nMEDIA_SERVE_USING_NGINX = False\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n 'compressor.finders.CompressorFinder', 'pipeline.finders.PipelineFinder')\nSTATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'\nPIPELINE = {'PIPELINE_ENABLED': True, 'COMPILERS': (\n 'pipeline.compilers.stylus.StylusCompiler',), 'STYLESHEETS': {'main': {\n 'source_filenames': ('style/main.styl',), 'output_filename':\n 'style/main.css'}}, 'STYLUS_ARGUMENTS': '-c'}\nCSP_STYLE_SRC = \"'self'\", \"'unsafe-inline'\", 'fonts.googleapis.com'\nCSP_FONT_SRC = \"'self'\", 'fonts.gstatic.com'\n",
"step-3": "<mask token>\nimport os\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nALLOWED_HOSTS = []\nINSTALLED_APPS = ['django.contrib.admin', 'django.contrib.auth',\n 'django.contrib.contenttypes', 'django.contrib.sessions',\n 'django.contrib.messages', 'django.contrib.staticfiles',\n 'admin_honeypot', 'bootstrap3', 'el_pagination', 'compressor',\n 'accounts', 'bot', 'home', 'pages', 'serve_media', 'events', 'gallery',\n 'groups', 'django_rq', 'surveys']\nMIDDLEWARE_CLASSES = ['django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.locale.LocaleMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'csp.middleware.CSPMiddleware']\nROOT_URLCONF = 'config.urls'\nTEMPLATES = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR,\n 'templates/error_pages')], 'APP_DIRS': True, 'OPTIONS': {\n 'context_processors': ['django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages']}}]\nWSGI_APPLICATION = 'config.wsgi.application'\nAUTH_PASSWORD_VALIDATORS = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nLOCALE_PATHS = [os.path.join(BASE_DIR, 'static/locale/')]\nSTATIC_URL = '/static/'\nSTATIC_ROOT = os.path.join(BASE_DIR, 'assets/')\nLOGIN_REDIRECT_URL = '/home'\nTELEGRAM_TOKEN = os.environ.get('GROUPSOME_TELEGRAM_TOKEN')\nTELEGRAM_WEBHOOK_SECRET = os.environ.get('GROUPSOME_TELEGRAM_WEBHOOK_SECRET')\nTELEGRAM_BOT_USERNAME = 'groupsomebot'\nMEDIA_ROOT = os.path.join(BASE_DIR, 'media')\nMEDIA_SERVE_USING_NGINX = False\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n 'compressor.finders.CompressorFinder', 'pipeline.finders.PipelineFinder')\nSTATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'\nPIPELINE = {'PIPELINE_ENABLED': True, 'COMPILERS': (\n 'pipeline.compilers.stylus.StylusCompiler',), 'STYLESHEETS': {'main': {\n 'source_filenames': ('style/main.styl',), 'output_filename':\n 'style/main.css'}}, 'STYLUS_ARGUMENTS': '-c'}\nCSP_STYLE_SRC = \"'self'\", \"'unsafe-inline'\", 'fonts.googleapis.com'\nCSP_FONT_SRC = \"'self'\", 'fonts.gstatic.com'\n",
"step-4": "\"\"\"\nDjango settings for gamelibrary project.\n\nGenerated by 'django-admin startproject' using Django 1.9.5.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/1.9/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/1.9/ref/settings/\n\"\"\"\n\nimport os\n# from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\n\n\n# SECURITY WARNING: don't run with debug turned on in production!\n\nALLOWED_HOSTS = []\n\n\n# Application definition\n\nINSTALLED_APPS = [\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'admin_honeypot',\n 'bootstrap3',\n 'el_pagination',\n 'compressor',\n # 'pipeline',\n 'accounts',\n 'bot',\n 'home',\n 'pages',\n 'serve_media',\n 'events',\n 'gallery',\n 'groups',\n 'django_rq',\n 'surveys',\n]\n\nMIDDLEWARE_CLASSES = [\n # 'django.middleware.cache.UpdateCacheMiddleware',\n 'django.middleware.security.SecurityMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.locale.LocaleMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n # 'django.middleware.cache.FetchFromCacheMiddleware',\n 'csp.middleware.CSPMiddleware',\n]\n\nROOT_URLCONF = 'config.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'templates/error_pages')],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'config.wsgi.application'\n\n\n# Password validation\n# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n\n\n# Internationalization\n# https://docs.djangoproject.com/en/1.9/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\nLOCALE_PATHS = [\n os.path.join(BASE_DIR, 'static/locale/'),\n]\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/1.9/howto/static-files/\n\nSTATIC_URL = '/static/'\n\nSTATIC_ROOT = os.path.join(BASE_DIR, 'assets/')\n\n# Redirect to here after Login\nLOGIN_REDIRECT_URL = '/home'\n\nTELEGRAM_TOKEN = os.environ.get('GROUPSOME_TELEGRAM_TOKEN')\nTELEGRAM_WEBHOOK_SECRET = os.environ.get('GROUPSOME_TELEGRAM_WEBHOOK_SECRET')\nTELEGRAM_BOT_USERNAME = \"groupsomebot\"\n\n# Media root directory\nMEDIA_ROOT = os.path.join(BASE_DIR, 'media')\nMEDIA_SERVE_USING_NGINX = False\n\n# Needed for Endless Scrolling\n# TEMPLATE_CONTEXT_PROCESSORS += (\n# 'django.core.context_processors.request',\n# )\n\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n 'compressor.finders.CompressorFinder',\n 'pipeline.finders.PipelineFinder',\n)\n\nSTATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'\n\nPIPELINE = {\n 'PIPELINE_ENABLED': True,\n 'COMPILERS': (\n 'pipeline.compilers.stylus.StylusCompiler',\n ),\n 'STYLESHEETS': {\n 'main': {\n 'source_filenames': (\n 'style/main.styl',\n ),\n 'output_filename': 'style/main.css',\n }\n },\n 'STYLUS_ARGUMENTS': '-c',\n}\n\nCSP_STYLE_SRC = (\"'self'\", \"'unsafe-inline'\", \"fonts.googleapis.com\")\nCSP_FONT_SRC = (\"'self'\", \"fonts.gstatic.com\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/python
#coding:utf-8
import glob, os
#from collections import OrderedDict
aa = os.popen("grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt").read().strip()
#os.system("grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt")
#bb = aa.split('-')[1]
res = []
fileName = file('/opt/csvt01/logs/tmp.txt')
while True:
line = fileName.readline()
if len(line) ==0:break
a = line.split('-')[1]
res.append(a)
fileName.close()
#print res
a = {}
for i in res:
if res.count(i)>1:
a[i] = res.count(i)
#print (a)
def fun(s):
d = sorted(s.iteritems(),key=lambda t:t[1],reverse=True)
return d
d = fun(a)
for i in d:
print i[0]
|
normal
|
{
"blob_id": "fc8f3be408f4d21de2ae18776cd60177c82bea77",
"index": 2068,
"step-1": "#!/usr/bin/python \n#coding:utf-8 \nimport glob, os\n#from collections import OrderedDict\naa = os.popen(\"grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt\").read().strip()\n#os.system(\"grep -E 'register|cp' all.log |grep -v 'bohan' | awk '{ print $6 }' > /opt/csvt01/logs/tmp.txt\")\n#bb = aa.split('-')[1]\n\nres = []\n\n\nfileName = file('/opt/csvt01/logs/tmp.txt')\n\nwhile True:\n line = fileName.readline()\n if len(line) ==0:break\n a = line.split('-')[1]\n res.append(a)\n \nfileName.close()\n#print res\n\na = {}\nfor i in res:\n if res.count(i)>1:\n a[i] = res.count(i)\n#print (a)\n\n\n\ndef fun(s):\n d = sorted(s.iteritems(),key=lambda t:t[1],reverse=True)\n return d\n \nd = fun(a)\n\nfor i in d:\n print i[0]\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Article(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=100, default='admin')
content = models.TextField(max_length=5000)
hide_content = models.TextField(max_length=1000, default='', blank=True)
create_time = models.DateTimeField(auto_now_add=True)
visible = models.BooleanField(default=True)
description = models.CharField(max_length=200, default='', blank=True)
# comment = models.ForeignKey(Comment, related_name='article_comment')
cover = models.CharField(max_length=100, default='/')
def __unicode__(self):
return self.title
class Comment(models.Model):
article = models.ForeignKey(Article, related_name='article_comment')
content = models.TextField(max_length=1000, default='')
create_time = models.DateTimeField(auto_now_add=True)
user_id = models.CharField(max_length=100, blank=False)
def __unicode__(self):
return self.content
class ZoneSubject(models.Model):
title = models.CharField(max_length=20, default='', blank=True)
image_url = models.CharField(max_length=500)
create_time = models.DateTimeField(default=timezone.now)
def __unicode__(self):
return self.image_url
class Search_record(models.Model):
title = models.CharField(max_length=100)
url = models.CharField(max_length=100)
searchCount = models.IntegerField(blank=True)
zhihu_type = models.IntegerField(blank=True, null=True)
def __unicode__(self):
return self.title
class DoubanTopic(models.Model):
group_id = models.CharField(max_length=10)
url = models.CharField(max_length=100)
title = models.CharField(max_length=100)
create_time = models.CharField(max_length=20)
author_name = models.CharField(max_length=50)
author_url = models.CharField(max_length=200)
user_img_small = models.CharField(max_length=200)
visible = models.BooleanField(default=True)
def __unicode__(self):
return self.title
class ZhihuSubject(models.Model):
title = models.CharField(max_length=200)
url = models.CharField(max_length=100)
zhihu_type = models.IntegerField()
def __unicode__(self):
return self.title
class ZhihuQuestion(models.Model):
subject = models.ForeignKey(ZhihuSubject, related_name='subject_question')
answer_url = models.CharField(max_length=200)
author = models.CharField(max_length=100)
author_url = models.CharField(max_length=200,null=True)
title = models.CharField(max_length=100, default='')
def __unicode__(self):
return self.title
class ZhihuImage(models.Model):
question = models.ForeignKey(ZhihuQuestion, related_name='question_image')
origin_url = models.CharField(max_length=200)
def __unicode__(self):
return self.origin_url
class DoubanImage(models.Model):
topic = models.ForeignKey(DoubanTopic, related_name='topic_image')
origin_url = models.CharField(max_length=200)
cos_url = models.CharField(max_length=200, default='',blank=True)
type = models.IntegerField(default=0)
def __unicode__(self):
return self.origin_url
|
normal
|
{
"blob_id": "0bc53130a4248178f4c3fabbae7d2546f0d5b8fd",
"index": 5996,
"step-1": "<mask token>\n\n\nclass Search_record(models.Model):\n title = models.CharField(max_length=100)\n url = models.CharField(max_length=100)\n searchCount = models.IntegerField(blank=True)\n zhihu_type = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass DoubanTopic(models.Model):\n group_id = models.CharField(max_length=10)\n url = models.CharField(max_length=100)\n title = models.CharField(max_length=100)\n create_time = models.CharField(max_length=20)\n author_name = models.CharField(max_length=50)\n author_url = models.CharField(max_length=200)\n user_img_small = models.CharField(max_length=200)\n visible = models.BooleanField(default=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuSubject(models.Model):\n title = models.CharField(max_length=200)\n url = models.CharField(max_length=100)\n zhihu_type = models.IntegerField()\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuQuestion(models.Model):\n subject = models.ForeignKey(ZhihuSubject, related_name='subject_question')\n answer_url = models.CharField(max_length=200)\n author = models.CharField(max_length=100)\n author_url = models.CharField(max_length=200, null=True)\n title = models.CharField(max_length=100, default='')\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuImage(models.Model):\n question = models.ForeignKey(ZhihuQuestion, related_name='question_image')\n origin_url = models.CharField(max_length=200)\n\n def __unicode__(self):\n return self.origin_url\n\n\nclass DoubanImage(models.Model):\n topic = models.ForeignKey(DoubanTopic, related_name='topic_image')\n origin_url = models.CharField(max_length=200)\n cos_url = models.CharField(max_length=200, default='', blank=True)\n type = models.IntegerField(default=0)\n\n def __unicode__(self):\n return self.origin_url\n",
"step-2": "<mask token>\n\n\nclass Comment(models.Model):\n article = models.ForeignKey(Article, related_name='article_comment')\n content = models.TextField(max_length=1000, default='')\n create_time = models.DateTimeField(auto_now_add=True)\n user_id = models.CharField(max_length=100, blank=False)\n\n def __unicode__(self):\n return self.content\n\n\nclass ZoneSubject(models.Model):\n title = models.CharField(max_length=20, default='', blank=True)\n image_url = models.CharField(max_length=500)\n create_time = models.DateTimeField(default=timezone.now)\n\n def __unicode__(self):\n return self.image_url\n\n\nclass Search_record(models.Model):\n title = models.CharField(max_length=100)\n url = models.CharField(max_length=100)\n searchCount = models.IntegerField(blank=True)\n zhihu_type = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass DoubanTopic(models.Model):\n group_id = models.CharField(max_length=10)\n url = models.CharField(max_length=100)\n title = models.CharField(max_length=100)\n create_time = models.CharField(max_length=20)\n author_name = models.CharField(max_length=50)\n author_url = models.CharField(max_length=200)\n user_img_small = models.CharField(max_length=200)\n visible = models.BooleanField(default=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuSubject(models.Model):\n title = models.CharField(max_length=200)\n url = models.CharField(max_length=100)\n zhihu_type = models.IntegerField()\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuQuestion(models.Model):\n subject = models.ForeignKey(ZhihuSubject, related_name='subject_question')\n answer_url = models.CharField(max_length=200)\n author = models.CharField(max_length=100)\n author_url = models.CharField(max_length=200, null=True)\n title = models.CharField(max_length=100, default='')\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuImage(models.Model):\n question = models.ForeignKey(ZhihuQuestion, related_name='question_image')\n origin_url = models.CharField(max_length=200)\n\n def __unicode__(self):\n return self.origin_url\n\n\nclass DoubanImage(models.Model):\n topic = models.ForeignKey(DoubanTopic, related_name='topic_image')\n origin_url = models.CharField(max_length=200)\n cos_url = models.CharField(max_length=200, default='', blank=True)\n type = models.IntegerField(default=0)\n\n def __unicode__(self):\n return self.origin_url\n",
"step-3": "<mask token>\n\n\nclass Article(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Comment(models.Model):\n article = models.ForeignKey(Article, related_name='article_comment')\n content = models.TextField(max_length=1000, default='')\n create_time = models.DateTimeField(auto_now_add=True)\n user_id = models.CharField(max_length=100, blank=False)\n\n def __unicode__(self):\n return self.content\n\n\nclass ZoneSubject(models.Model):\n title = models.CharField(max_length=20, default='', blank=True)\n image_url = models.CharField(max_length=500)\n create_time = models.DateTimeField(default=timezone.now)\n\n def __unicode__(self):\n return self.image_url\n\n\nclass Search_record(models.Model):\n title = models.CharField(max_length=100)\n url = models.CharField(max_length=100)\n searchCount = models.IntegerField(blank=True)\n zhihu_type = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass DoubanTopic(models.Model):\n group_id = models.CharField(max_length=10)\n url = models.CharField(max_length=100)\n title = models.CharField(max_length=100)\n create_time = models.CharField(max_length=20)\n author_name = models.CharField(max_length=50)\n author_url = models.CharField(max_length=200)\n user_img_small = models.CharField(max_length=200)\n visible = models.BooleanField(default=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuSubject(models.Model):\n title = models.CharField(max_length=200)\n url = models.CharField(max_length=100)\n zhihu_type = models.IntegerField()\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuQuestion(models.Model):\n subject = models.ForeignKey(ZhihuSubject, related_name='subject_question')\n answer_url = models.CharField(max_length=200)\n author = models.CharField(max_length=100)\n author_url = models.CharField(max_length=200, null=True)\n title = models.CharField(max_length=100, default='')\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuImage(models.Model):\n question = models.ForeignKey(ZhihuQuestion, related_name='question_image')\n origin_url = models.CharField(max_length=200)\n\n def __unicode__(self):\n return self.origin_url\n\n\nclass DoubanImage(models.Model):\n topic = models.ForeignKey(DoubanTopic, related_name='topic_image')\n origin_url = models.CharField(max_length=200)\n cos_url = models.CharField(max_length=200, default='', blank=True)\n type = models.IntegerField(default=0)\n\n def __unicode__(self):\n return self.origin_url\n",
"step-4": "<mask token>\n\n\nclass Article(models.Model):\n title = models.CharField(max_length=200)\n author = models.CharField(max_length=100, default='admin')\n content = models.TextField(max_length=5000)\n hide_content = models.TextField(max_length=1000, default='', blank=True)\n create_time = models.DateTimeField(auto_now_add=True)\n visible = models.BooleanField(default=True)\n description = models.CharField(max_length=200, default='', blank=True)\n cover = models.CharField(max_length=100, default='/')\n\n def __unicode__(self):\n return self.title\n\n\nclass Comment(models.Model):\n article = models.ForeignKey(Article, related_name='article_comment')\n content = models.TextField(max_length=1000, default='')\n create_time = models.DateTimeField(auto_now_add=True)\n user_id = models.CharField(max_length=100, blank=False)\n\n def __unicode__(self):\n return self.content\n\n\nclass ZoneSubject(models.Model):\n title = models.CharField(max_length=20, default='', blank=True)\n image_url = models.CharField(max_length=500)\n create_time = models.DateTimeField(default=timezone.now)\n\n def __unicode__(self):\n return self.image_url\n\n\nclass Search_record(models.Model):\n title = models.CharField(max_length=100)\n url = models.CharField(max_length=100)\n searchCount = models.IntegerField(blank=True)\n zhihu_type = models.IntegerField(blank=True, null=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass DoubanTopic(models.Model):\n group_id = models.CharField(max_length=10)\n url = models.CharField(max_length=100)\n title = models.CharField(max_length=100)\n create_time = models.CharField(max_length=20)\n author_name = models.CharField(max_length=50)\n author_url = models.CharField(max_length=200)\n user_img_small = models.CharField(max_length=200)\n visible = models.BooleanField(default=True)\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuSubject(models.Model):\n title = models.CharField(max_length=200)\n url = models.CharField(max_length=100)\n zhihu_type = models.IntegerField()\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuQuestion(models.Model):\n subject = models.ForeignKey(ZhihuSubject, related_name='subject_question')\n answer_url = models.CharField(max_length=200)\n author = models.CharField(max_length=100)\n author_url = models.CharField(max_length=200, null=True)\n title = models.CharField(max_length=100, default='')\n\n def __unicode__(self):\n return self.title\n\n\nclass ZhihuImage(models.Model):\n question = models.ForeignKey(ZhihuQuestion, related_name='question_image')\n origin_url = models.CharField(max_length=200)\n\n def __unicode__(self):\n return self.origin_url\n\n\nclass DoubanImage(models.Model):\n topic = models.ForeignKey(DoubanTopic, related_name='topic_image')\n origin_url = models.CharField(max_length=200)\n cos_url = models.CharField(max_length=200, default='', blank=True)\n type = models.IntegerField(default=0)\n\n def __unicode__(self):\n return self.origin_url\n",
"step-5": "from __future__ import unicode_literals\nfrom django.db import models\nfrom django.utils import timezone\n\n# Create your models here.\n\n\n\nclass Article(models.Model):\n title = models.CharField(max_length=200)\n author = models.CharField(max_length=100, default='admin')\n content = models.TextField(max_length=5000)\n hide_content = models.TextField(max_length=1000, default='', blank=True)\n create_time = models.DateTimeField(auto_now_add=True)\n visible = models.BooleanField(default=True)\n description = models.CharField(max_length=200, default='', blank=True)\n # comment = models.ForeignKey(Comment, related_name='article_comment')\n cover = models.CharField(max_length=100, default='/')\n def __unicode__(self):\n return self.title\n\nclass Comment(models.Model):\n article = models.ForeignKey(Article, related_name='article_comment')\n content = models.TextField(max_length=1000, default='')\n create_time = models.DateTimeField(auto_now_add=True)\n user_id = models.CharField(max_length=100, blank=False)\n def __unicode__(self):\n return self.content\n\n\nclass ZoneSubject(models.Model):\n title = models.CharField(max_length=20, default='', blank=True)\n image_url = models.CharField(max_length=500)\n create_time = models.DateTimeField(default=timezone.now)\n def __unicode__(self):\n return self.image_url\n\n\nclass Search_record(models.Model):\n title = models.CharField(max_length=100)\n url = models.CharField(max_length=100)\n searchCount = models.IntegerField(blank=True)\n zhihu_type = models.IntegerField(blank=True, null=True)\n def __unicode__(self):\n return self.title\n\nclass DoubanTopic(models.Model):\n group_id = models.CharField(max_length=10)\n url = models.CharField(max_length=100)\n title = models.CharField(max_length=100)\n create_time = models.CharField(max_length=20)\n author_name = models.CharField(max_length=50)\n author_url = models.CharField(max_length=200)\n user_img_small = models.CharField(max_length=200)\n visible = models.BooleanField(default=True)\n def __unicode__(self):\n return self.title\n\nclass ZhihuSubject(models.Model):\n title = models.CharField(max_length=200)\n url = models.CharField(max_length=100)\n zhihu_type = models.IntegerField()\n def __unicode__(self):\n return self.title\n\nclass ZhihuQuestion(models.Model):\n subject = models.ForeignKey(ZhihuSubject, related_name='subject_question')\n answer_url = models.CharField(max_length=200)\n author = models.CharField(max_length=100)\n author_url = models.CharField(max_length=200,null=True)\n title = models.CharField(max_length=100, default='')\n def __unicode__(self):\n return self.title\n\nclass ZhihuImage(models.Model):\n question = models.ForeignKey(ZhihuQuestion, related_name='question_image')\n origin_url = models.CharField(max_length=200)\n def __unicode__(self):\n return self.origin_url\n\n\nclass DoubanImage(models.Model):\n topic = models.ForeignKey(DoubanTopic, related_name='topic_image')\n origin_url = models.CharField(max_length=200)\n cos_url = models.CharField(max_length=200, default='',blank=True)\n type = models.IntegerField(default=0)\n def __unicode__(self):\n return self.origin_url\n\n\n\n\n",
"step-ids": [
18,
24,
25,
27,
29
]
}
|
[
18,
24,
25,
27,
29
] |
"""
Create all figures and Excel files that combine data from all embryos in a given genetic background
Copyright (C) 2017 Ahmet Ay, Dong Mai, Soo Bin Kwon, Ha Vu
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys, shared, os
from subprocess import call
DEFAULT_NUM_BIN = 5
def main():
args = sys.argv[1:]
num_args = len(args)
req_args = [False]*3
num_bins = DEFAULT_NUM_BIN
if num_args >= 6:
i = 0
while i<num_args-1:
option = args[i]
value = args[i+1]
if (option == '-ne' or option == '--number-of-embryos') and shared.isInt(value):
num_embryos = int(value)
req_args[0] = True
i+=2
elif (option == '-nb' or option == '--number-of-bins') and shared.isInt(value):
num_bins = int(value)
i+=2
elif option == '-d' or option == '--output-directory':
directory = value
req_args[1] = True
i+=2
elif req_args[0] and (option == '-i' or option == '--input-files') and ((num_args-7)==num_embryos):
slice_files = args[i+1:i+1+num_embryos]
for f in slice_files:
if not os.path.isfile(f):
print("combine_embryos.py: File "+f+" does not exist.")
exit(1)
req_args[2] = True
i+=num_embryos
else:
usage()
for arg in req_args:
if not arg:
usage()
else:
usage()
shared.ensureDir(directory)
### Spatial amplitude ###
print("Plotting spatial amplitude...")
command = ["python","plot_spatial_amplitude.py",str(num_embryos)] + slice_files + [directory]
if 1==call(command):
exit(1)
# (compare_spatial_amplitude.py can run after plot_spatial_amplitude.py is run for all genetic backgrounds)
'''### Burst size and frequency ###
# 1. create_burst_data.py
print("Creating data for estimate_burst_parameters.m...")
command = ["python","create_burst_data.py",str(num_embryos)] + slice_files + [directory]
if 1==call(command):
exit(1)
# 2. estimate_burst_parameters.m
print("Running estimate_burst_parameters.m on MATLAB...")
command = ['/Applications/MATLAB_R2016a.app/bin/matlab','-nodesktop','-nosplash','-nodisplay','-r','estimate_burst_parameters(\''+directory+'/burst_data.xls\',\''+directory+'\')']
if 1==call(command): # this will automatically open and run MATLAB
exit(1)
# 3. plot_estimated_burst_parameters.py using the output from estimate_burst_parameters.m
print("Plotting estimated burst size and frequencies...")
command = ["python","plot_estimated_burst_parameters.py",directory+"/burst_result.xls",directory]
if 1==call(command):
exit(1)'''
# (compare_burst_parameters.py can run after plot_estimated_burst_parameters.py is run for all genetic backgrounds)
# Fano factor (to demonstrate burstiness)
command = ["python","plot_fano_factor.py",str(num_embryos)] + slice_files + [directory]
print("Plotting fano factor...")
if 1==call(command):
exit(1)
# (compare_fano_factor.py can run after plot_fano_factor.py is run for all genetic backgrounds)
### Noise ###
# Intrinsic and extrinsic noise
print("Plotting intrinsic and extrinsic noise...")
command = ["python","plot_noise.py",str(num_embryos), str(num_bins)] + slice_files + [directory]
if 1==call(command):
exit(1)
# (compare_noise.py can run after plot_noise.py is run for all genetic backgrounds)
### Scatter plot of her1 and her7 for all bins ####
print("Plotting scatter plots of her1 vs her7 mRNAs in all bins ...")
command = ["python", "plot_scatter_her1_her7.py", directory + "/combined_slices.xls", str(num_bins), directory]
if 1 == call(command):
exit(1)
# Spatial noise (coefficient of variation squared across space)
print("Plotting spatial noise (coefficient of variation squared across space)...")
command = ["python","plot_CVsquared.py",str(num_embryos)] + slice_files + [directory]
if 1==call(command):
exit(1)
# (compare_grouped_CVsquared.py and compare_CV_squared.py can run after plot_CVsquared.py is run for all genetic backgrounds)
### Raw data Excel files ###
command = ["python","create_raw_expression_excel.py",str(num_embryos)] + slice_files + [directory]
print("Creating Excel files for RNA expression levels...")
if 1==call(command):
exit(1)
command = ["python","create_raw_spacial_noise_excel.py",str(num_embryos)] + slice_files + [directory]
print("Creating Excel files for spacial noise...")
if 1==call(command):
exit(1)
command = ["python","create_raw_noise_excel.py",str(num_embryos)] + slice_files + [directory]
print("Creating Excel files for noise...")
if 1==call(command):
exit(1)
def usage():
print("combine_embryos.py: Invalid command-line arguments.")
print("Format: combine_embryos.py -ne <number of embryos> -nb <number of bins> -d <output directory> -i <first embryo's slice.xls> <second embryo's slice.xls> ... <last embryo's slice.xls>")
print("Example: python combine_embryos.py -ne 20 -d ../wildtypefulldataset/output -nb 5 -i ../wildtypefulldataset/output/embryo1/slices.xls \
../wildtypefulldataset/output/embryo2/slices.xls .... ../wildtypefulldataset/output/embryo20/slices.xls")
exit(1)
main()
|
normal
|
{
"blob_id": "86928f4358e4999a5cec8bfad1fe055c9a2778d1",
"index": 6230,
"step-1": "<mask token>\n\n\ndef main():\n args = sys.argv[1:]\n num_args = len(args)\n req_args = [False] * 3\n num_bins = DEFAULT_NUM_BIN\n if num_args >= 6:\n i = 0\n while i < num_args - 1:\n option = args[i]\n value = args[i + 1]\n if (option == '-ne' or option == '--number-of-embryos'\n ) and shared.isInt(value):\n num_embryos = int(value)\n req_args[0] = True\n i += 2\n elif (option == '-nb' or option == '--number-of-bins'\n ) and shared.isInt(value):\n num_bins = int(value)\n i += 2\n elif option == '-d' or option == '--output-directory':\n directory = value\n req_args[1] = True\n i += 2\n elif req_args[0] and (option == '-i' or option == '--input-files'\n ) and num_args - 7 == num_embryos:\n slice_files = args[i + 1:i + 1 + num_embryos]\n for f in slice_files:\n if not os.path.isfile(f):\n print('combine_embryos.py: File ' + f +\n ' does not exist.')\n exit(1)\n req_args[2] = True\n i += num_embryos\n else:\n usage()\n for arg in req_args:\n if not arg:\n usage()\n else:\n usage()\n shared.ensureDir(directory)\n print('Plotting spatial amplitude...')\n command = ['python', 'plot_spatial_amplitude.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n \"\"\"### Burst size and frequency ###\n\t# 1. create_burst_data.py\n\tprint(\"Creating data for estimate_burst_parameters.m...\")\n\tcommand = [\"python\",\"create_burst_data.py\",str(num_embryos)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\n\t# 2. estimate_burst_parameters.m \n\tprint(\"Running estimate_burst_parameters.m on MATLAB...\")\n\tcommand = ['/Applications/MATLAB_R2016a.app/bin/matlab','-nodesktop','-nosplash','-nodisplay','-r','estimate_burst_parameters(''+directory+'/burst_data.xls',''+directory+'')']\n\tif 1==call(command): # this will automatically open and run MATLAB\n\t\texit(1)\n\t\n\t# 3. plot_estimated_burst_parameters.py using the output from estimate_burst_parameters.m \t\n\tprint(\"Plotting estimated burst size and frequencies...\")\n\tcommand = [\"python\",\"plot_estimated_burst_parameters.py\",directory+\"/burst_result.xls\",directory]\n\tif 1==call(command):\n\t\texit(1)\"\"\"\n command = ['python', 'plot_fano_factor.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Plotting fano factor...')\n if 1 == call(command):\n exit(1)\n print('Plotting intrinsic and extrinsic noise...')\n command = ['python', 'plot_noise.py', str(num_embryos), str(num_bins)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n print('Plotting scatter plots of her1 vs her7 mRNAs in all bins ...')\n command = ['python', 'plot_scatter_her1_her7.py', directory +\n '/combined_slices.xls', str(num_bins), directory]\n if 1 == call(command):\n exit(1)\n print(\n 'Plotting spatial noise (coefficient of variation squared across space)...'\n )\n command = ['python', 'plot_CVsquared.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_expression_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for RNA expression levels...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_spacial_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for spacial noise...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for noise...')\n if 1 == call(command):\n exit(1)\n\n\ndef usage():\n print('combine_embryos.py: Invalid command-line arguments.')\n print(\n \"Format: combine_embryos.py -ne <number of embryos> -nb <number of bins> -d <output directory> -i <first embryo's slice.xls> <second embryo's slice.xls> ... <last embryo's slice.xls>\"\n )\n print(\n 'Example: python combine_embryos.py -ne 20 -d ../wildtypefulldataset/output -nb 5 -i ../wildtypefulldataset/output/embryo1/slices.xls \\t../wildtypefulldataset/output/embryo2/slices.xls .... ../wildtypefulldataset/output/embryo20/slices.xls'\n )\n exit(1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n args = sys.argv[1:]\n num_args = len(args)\n req_args = [False] * 3\n num_bins = DEFAULT_NUM_BIN\n if num_args >= 6:\n i = 0\n while i < num_args - 1:\n option = args[i]\n value = args[i + 1]\n if (option == '-ne' or option == '--number-of-embryos'\n ) and shared.isInt(value):\n num_embryos = int(value)\n req_args[0] = True\n i += 2\n elif (option == '-nb' or option == '--number-of-bins'\n ) and shared.isInt(value):\n num_bins = int(value)\n i += 2\n elif option == '-d' or option == '--output-directory':\n directory = value\n req_args[1] = True\n i += 2\n elif req_args[0] and (option == '-i' or option == '--input-files'\n ) and num_args - 7 == num_embryos:\n slice_files = args[i + 1:i + 1 + num_embryos]\n for f in slice_files:\n if not os.path.isfile(f):\n print('combine_embryos.py: File ' + f +\n ' does not exist.')\n exit(1)\n req_args[2] = True\n i += num_embryos\n else:\n usage()\n for arg in req_args:\n if not arg:\n usage()\n else:\n usage()\n shared.ensureDir(directory)\n print('Plotting spatial amplitude...')\n command = ['python', 'plot_spatial_amplitude.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n \"\"\"### Burst size and frequency ###\n\t# 1. create_burst_data.py\n\tprint(\"Creating data for estimate_burst_parameters.m...\")\n\tcommand = [\"python\",\"create_burst_data.py\",str(num_embryos)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\n\t# 2. estimate_burst_parameters.m \n\tprint(\"Running estimate_burst_parameters.m on MATLAB...\")\n\tcommand = ['/Applications/MATLAB_R2016a.app/bin/matlab','-nodesktop','-nosplash','-nodisplay','-r','estimate_burst_parameters(''+directory+'/burst_data.xls',''+directory+'')']\n\tif 1==call(command): # this will automatically open and run MATLAB\n\t\texit(1)\n\t\n\t# 3. plot_estimated_burst_parameters.py using the output from estimate_burst_parameters.m \t\n\tprint(\"Plotting estimated burst size and frequencies...\")\n\tcommand = [\"python\",\"plot_estimated_burst_parameters.py\",directory+\"/burst_result.xls\",directory]\n\tif 1==call(command):\n\t\texit(1)\"\"\"\n command = ['python', 'plot_fano_factor.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Plotting fano factor...')\n if 1 == call(command):\n exit(1)\n print('Plotting intrinsic and extrinsic noise...')\n command = ['python', 'plot_noise.py', str(num_embryos), str(num_bins)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n print('Plotting scatter plots of her1 vs her7 mRNAs in all bins ...')\n command = ['python', 'plot_scatter_her1_her7.py', directory +\n '/combined_slices.xls', str(num_bins), directory]\n if 1 == call(command):\n exit(1)\n print(\n 'Plotting spatial noise (coefficient of variation squared across space)...'\n )\n command = ['python', 'plot_CVsquared.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_expression_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for RNA expression levels...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_spacial_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for spacial noise...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for noise...')\n if 1 == call(command):\n exit(1)\n\n\ndef usage():\n print('combine_embryos.py: Invalid command-line arguments.')\n print(\n \"Format: combine_embryos.py -ne <number of embryos> -nb <number of bins> -d <output directory> -i <first embryo's slice.xls> <second embryo's slice.xls> ... <last embryo's slice.xls>\"\n )\n print(\n 'Example: python combine_embryos.py -ne 20 -d ../wildtypefulldataset/output -nb 5 -i ../wildtypefulldataset/output/embryo1/slices.xls \\t../wildtypefulldataset/output/embryo2/slices.xls .... ../wildtypefulldataset/output/embryo20/slices.xls'\n )\n exit(1)\n\n\nmain()\n",
"step-3": "<mask token>\nDEFAULT_NUM_BIN = 5\n\n\ndef main():\n args = sys.argv[1:]\n num_args = len(args)\n req_args = [False] * 3\n num_bins = DEFAULT_NUM_BIN\n if num_args >= 6:\n i = 0\n while i < num_args - 1:\n option = args[i]\n value = args[i + 1]\n if (option == '-ne' or option == '--number-of-embryos'\n ) and shared.isInt(value):\n num_embryos = int(value)\n req_args[0] = True\n i += 2\n elif (option == '-nb' or option == '--number-of-bins'\n ) and shared.isInt(value):\n num_bins = int(value)\n i += 2\n elif option == '-d' or option == '--output-directory':\n directory = value\n req_args[1] = True\n i += 2\n elif req_args[0] and (option == '-i' or option == '--input-files'\n ) and num_args - 7 == num_embryos:\n slice_files = args[i + 1:i + 1 + num_embryos]\n for f in slice_files:\n if not os.path.isfile(f):\n print('combine_embryos.py: File ' + f +\n ' does not exist.')\n exit(1)\n req_args[2] = True\n i += num_embryos\n else:\n usage()\n for arg in req_args:\n if not arg:\n usage()\n else:\n usage()\n shared.ensureDir(directory)\n print('Plotting spatial amplitude...')\n command = ['python', 'plot_spatial_amplitude.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n \"\"\"### Burst size and frequency ###\n\t# 1. create_burst_data.py\n\tprint(\"Creating data for estimate_burst_parameters.m...\")\n\tcommand = [\"python\",\"create_burst_data.py\",str(num_embryos)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\n\t# 2. estimate_burst_parameters.m \n\tprint(\"Running estimate_burst_parameters.m on MATLAB...\")\n\tcommand = ['/Applications/MATLAB_R2016a.app/bin/matlab','-nodesktop','-nosplash','-nodisplay','-r','estimate_burst_parameters(''+directory+'/burst_data.xls',''+directory+'')']\n\tif 1==call(command): # this will automatically open and run MATLAB\n\t\texit(1)\n\t\n\t# 3. plot_estimated_burst_parameters.py using the output from estimate_burst_parameters.m \t\n\tprint(\"Plotting estimated burst size and frequencies...\")\n\tcommand = [\"python\",\"plot_estimated_burst_parameters.py\",directory+\"/burst_result.xls\",directory]\n\tif 1==call(command):\n\t\texit(1)\"\"\"\n command = ['python', 'plot_fano_factor.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Plotting fano factor...')\n if 1 == call(command):\n exit(1)\n print('Plotting intrinsic and extrinsic noise...')\n command = ['python', 'plot_noise.py', str(num_embryos), str(num_bins)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n print('Plotting scatter plots of her1 vs her7 mRNAs in all bins ...')\n command = ['python', 'plot_scatter_her1_her7.py', directory +\n '/combined_slices.xls', str(num_bins), directory]\n if 1 == call(command):\n exit(1)\n print(\n 'Plotting spatial noise (coefficient of variation squared across space)...'\n )\n command = ['python', 'plot_CVsquared.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_expression_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for RNA expression levels...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_spacial_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for spacial noise...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for noise...')\n if 1 == call(command):\n exit(1)\n\n\ndef usage():\n print('combine_embryos.py: Invalid command-line arguments.')\n print(\n \"Format: combine_embryos.py -ne <number of embryos> -nb <number of bins> -d <output directory> -i <first embryo's slice.xls> <second embryo's slice.xls> ... <last embryo's slice.xls>\"\n )\n print(\n 'Example: python combine_embryos.py -ne 20 -d ../wildtypefulldataset/output -nb 5 -i ../wildtypefulldataset/output/embryo1/slices.xls \\t../wildtypefulldataset/output/embryo2/slices.xls .... ../wildtypefulldataset/output/embryo20/slices.xls'\n )\n exit(1)\n\n\nmain()\n",
"step-4": "<mask token>\nimport sys, shared, os\nfrom subprocess import call\nDEFAULT_NUM_BIN = 5\n\n\ndef main():\n args = sys.argv[1:]\n num_args = len(args)\n req_args = [False] * 3\n num_bins = DEFAULT_NUM_BIN\n if num_args >= 6:\n i = 0\n while i < num_args - 1:\n option = args[i]\n value = args[i + 1]\n if (option == '-ne' or option == '--number-of-embryos'\n ) and shared.isInt(value):\n num_embryos = int(value)\n req_args[0] = True\n i += 2\n elif (option == '-nb' or option == '--number-of-bins'\n ) and shared.isInt(value):\n num_bins = int(value)\n i += 2\n elif option == '-d' or option == '--output-directory':\n directory = value\n req_args[1] = True\n i += 2\n elif req_args[0] and (option == '-i' or option == '--input-files'\n ) and num_args - 7 == num_embryos:\n slice_files = args[i + 1:i + 1 + num_embryos]\n for f in slice_files:\n if not os.path.isfile(f):\n print('combine_embryos.py: File ' + f +\n ' does not exist.')\n exit(1)\n req_args[2] = True\n i += num_embryos\n else:\n usage()\n for arg in req_args:\n if not arg:\n usage()\n else:\n usage()\n shared.ensureDir(directory)\n print('Plotting spatial amplitude...')\n command = ['python', 'plot_spatial_amplitude.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n \"\"\"### Burst size and frequency ###\n\t# 1. create_burst_data.py\n\tprint(\"Creating data for estimate_burst_parameters.m...\")\n\tcommand = [\"python\",\"create_burst_data.py\",str(num_embryos)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\n\t# 2. estimate_burst_parameters.m \n\tprint(\"Running estimate_burst_parameters.m on MATLAB...\")\n\tcommand = ['/Applications/MATLAB_R2016a.app/bin/matlab','-nodesktop','-nosplash','-nodisplay','-r','estimate_burst_parameters(''+directory+'/burst_data.xls',''+directory+'')']\n\tif 1==call(command): # this will automatically open and run MATLAB\n\t\texit(1)\n\t\n\t# 3. plot_estimated_burst_parameters.py using the output from estimate_burst_parameters.m \t\n\tprint(\"Plotting estimated burst size and frequencies...\")\n\tcommand = [\"python\",\"plot_estimated_burst_parameters.py\",directory+\"/burst_result.xls\",directory]\n\tif 1==call(command):\n\t\texit(1)\"\"\"\n command = ['python', 'plot_fano_factor.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Plotting fano factor...')\n if 1 == call(command):\n exit(1)\n print('Plotting intrinsic and extrinsic noise...')\n command = ['python', 'plot_noise.py', str(num_embryos), str(num_bins)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n print('Plotting scatter plots of her1 vs her7 mRNAs in all bins ...')\n command = ['python', 'plot_scatter_her1_her7.py', directory +\n '/combined_slices.xls', str(num_bins), directory]\n if 1 == call(command):\n exit(1)\n print(\n 'Plotting spatial noise (coefficient of variation squared across space)...'\n )\n command = ['python', 'plot_CVsquared.py', str(num_embryos)\n ] + slice_files + [directory]\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_expression_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for RNA expression levels...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_spacial_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for spacial noise...')\n if 1 == call(command):\n exit(1)\n command = ['python', 'create_raw_noise_excel.py', str(num_embryos)\n ] + slice_files + [directory]\n print('Creating Excel files for noise...')\n if 1 == call(command):\n exit(1)\n\n\ndef usage():\n print('combine_embryos.py: Invalid command-line arguments.')\n print(\n \"Format: combine_embryos.py -ne <number of embryos> -nb <number of bins> -d <output directory> -i <first embryo's slice.xls> <second embryo's slice.xls> ... <last embryo's slice.xls>\"\n )\n print(\n 'Example: python combine_embryos.py -ne 20 -d ../wildtypefulldataset/output -nb 5 -i ../wildtypefulldataset/output/embryo1/slices.xls \\t../wildtypefulldataset/output/embryo2/slices.xls .... ../wildtypefulldataset/output/embryo20/slices.xls'\n )\n exit(1)\n\n\nmain()\n",
"step-5": "\"\"\"\nCreate all figures and Excel files that combine data from all embryos in a given genetic background\nCopyright (C) 2017 Ahmet Ay, Dong Mai, Soo Bin Kwon, Ha Vu\n\nThis program is free software: you can redistribute it and/or modify\nit under the terms of the GNU General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nThis program is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License\nalong with this program. If not, see <http://www.gnu.org/licenses/>.\n\"\"\"\nimport sys, shared, os\nfrom subprocess import call\n\nDEFAULT_NUM_BIN = 5\n\ndef main():\n\targs = sys.argv[1:]\n\tnum_args = len(args)\n\treq_args = [False]*3\n\t\n\tnum_bins = DEFAULT_NUM_BIN\n\tif num_args >= 6:\n\t\ti = 0\n\t\twhile i<num_args-1:\n\t\t\toption = args[i]\n\t\t\tvalue = args[i+1]\t\t\t\n\t\t\tif (option == '-ne' or option == '--number-of-embryos') and shared.isInt(value):\n\t\t\t\tnum_embryos = int(value)\n\t\t\t\treq_args[0] = True\n\t\t\t\ti+=2\n\t\t\telif (option == '-nb' or option == '--number-of-bins') and shared.isInt(value):\n\t\t\t\tnum_bins = int(value)\n\t\t\t\ti+=2\n\t\t\telif option == '-d' or option == '--output-directory':\n\t\t\t\tdirectory = value\n\t\t\t\treq_args[1] = True\n\t\t\t\ti+=2\n\t\t\telif req_args[0] and (option == '-i' or option == '--input-files') and ((num_args-7)==num_embryos):\n\t\t\t\tslice_files = args[i+1:i+1+num_embryos]\n\t\t\t\tfor f in slice_files:\n\t\t\t\t\tif not os.path.isfile(f):\n\t\t\t\t\t\tprint(\"combine_embryos.py: File \"+f+\" does not exist.\")\n\t\t\t\t\t\texit(1)\n\t\t\t\treq_args[2] = True\t\n\t\t\t\ti+=num_embryos\n\t\t\telse:\n\t\t\t\tusage()\n\t\tfor arg in req_args:\n\t\t\tif not arg:\n\t\t\t\tusage()\n\telse:\n\t\tusage()\n\n\tshared.ensureDir(directory)\n\n\t### Spatial amplitude ###\n\tprint(\"Plotting spatial amplitude...\")\n\tcommand = [\"python\",\"plot_spatial_amplitude.py\",str(num_embryos)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\n\t# (compare_spatial_amplitude.py can run after plot_spatial_amplitude.py is run for all genetic backgrounds)\t\t\n\n\t'''### Burst size and frequency ###\n\t# 1. create_burst_data.py\n\tprint(\"Creating data for estimate_burst_parameters.m...\")\n\tcommand = [\"python\",\"create_burst_data.py\",str(num_embryos)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\n\t# 2. estimate_burst_parameters.m \n\tprint(\"Running estimate_burst_parameters.m on MATLAB...\")\n\tcommand = ['/Applications/MATLAB_R2016a.app/bin/matlab','-nodesktop','-nosplash','-nodisplay','-r','estimate_burst_parameters(\\''+directory+'/burst_data.xls\\',\\''+directory+'\\')']\n\tif 1==call(command): # this will automatically open and run MATLAB\n\t\texit(1)\n\t\n\t# 3. plot_estimated_burst_parameters.py using the output from estimate_burst_parameters.m \t\n\tprint(\"Plotting estimated burst size and frequencies...\")\n\tcommand = [\"python\",\"plot_estimated_burst_parameters.py\",directory+\"/burst_result.xls\",directory]\n\tif 1==call(command):\n\t\texit(1)'''\t\n\t# (compare_burst_parameters.py can run after plot_estimated_burst_parameters.py is run for all genetic backgrounds)\n\t\n\t# Fano factor (to demonstrate burstiness) \n\tcommand = [\"python\",\"plot_fano_factor.py\",str(num_embryos)] + slice_files + [directory]\n\tprint(\"Plotting fano factor...\")\n\tif 1==call(command):\n\t\texit(1)\n\t# (compare_fano_factor.py can run after plot_fano_factor.py is run for all genetic backgrounds)\n\n\t### Noise ###\n\t# Intrinsic and extrinsic noise\n\tprint(\"Plotting intrinsic and extrinsic noise...\")\n\tcommand = [\"python\",\"plot_noise.py\",str(num_embryos), str(num_bins)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\t\n\t# (compare_noise.py can run after plot_noise.py is run for all genetic backgrounds)\t\n\t\n\t### Scatter plot of her1 and her7 for all bins ####\n\tprint(\"Plotting scatter plots of her1 vs her7 mRNAs in all bins ...\")\t\n\tcommand = [\"python\", \"plot_scatter_her1_her7.py\", directory + \"/combined_slices.xls\", str(num_bins), directory]\n\tif 1 == call(command):\n\t\texit(1)\n\t\n\t# Spatial noise (coefficient of variation squared across space)\t\n\tprint(\"Plotting spatial noise (coefficient of variation squared across space)...\")\n\tcommand = [\"python\",\"plot_CVsquared.py\",str(num_embryos)] + slice_files + [directory]\n\tif 1==call(command):\n\t\texit(1)\t\n\t\n\t# (compare_grouped_CVsquared.py and compare_CV_squared.py can run after plot_CVsquared.py is run for all genetic backgrounds)\t\t\n\t### Raw data Excel files ###\n\tcommand = [\"python\",\"create_raw_expression_excel.py\",str(num_embryos)] + slice_files + [directory]\n\tprint(\"Creating Excel files for RNA expression levels...\")\n\tif 1==call(command):\n\t\texit(1)\t\n\n\tcommand = [\"python\",\"create_raw_spacial_noise_excel.py\",str(num_embryos)] + slice_files + [directory]\n\tprint(\"Creating Excel files for spacial noise...\")\n\tif 1==call(command):\n\t\texit(1)\n\t\n\tcommand = [\"python\",\"create_raw_noise_excel.py\",str(num_embryos)] + slice_files + [directory]\n\tprint(\"Creating Excel files for noise...\")\n\tif 1==call(command):\n\t\texit(1)\n\ndef usage():\n\tprint(\"combine_embryos.py: Invalid command-line arguments.\")\n\tprint(\"Format: combine_embryos.py -ne <number of embryos> -nb <number of bins> -d <output directory> -i <first embryo's slice.xls> <second embryo's slice.xls> ... <last embryo's slice.xls>\")\n\tprint(\"Example: python combine_embryos.py -ne 20 -d ../wildtypefulldataset/output -nb 5 -i ../wildtypefulldataset/output/embryo1/slices.xls \\\n\t../wildtypefulldataset/output/embryo2/slices.xls .... ../wildtypefulldataset/output/embryo20/slices.xls\")\n\texit(1)\n\t\nmain()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import numpy as np
def shufflelists(lists):
li = np.random.permutation(len(lists[0])
lo = []
for i in range(len(li)):
|
normal
|
{
"blob_id": "fc01c6fb812fe78ca04496494d68fcc90ae706f5",
"index": 3605,
"step-1": "import numpy as np\n\ndef shufflelists(lists):\n li = np.random.permutation(len(lists[0])\n lo = []\n for i in range(len(li)):\n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
def merge_sort(mlist):
if len(mlist) <= 1:
return mlist
mid = int(len(mlist) / 2)
# 使用递归将数组二分分解
left = merge_sort(mlist[:mid])
right = merge_sort(mlist[mid:])
return merge(left, right) # 将每次分解出来的数组各自排序,合并成一个大数组
def merge(left, right):
"""
合并操作,将两个有序数组left[]和right[]合并成一个大的有序数组
:param left:
:param right:
:return:
"""
l, r = 0, 0 # left与right数组的下标指针
result = []
while l < len(left) and r < len(right):
# 排序
if left[l] < right[r]:
result.append(left[l])
l += 1
else:
result.append(right[r])
r += 1
result += left[l:]
result += right[r:]
return result
if __name__ == '__main__':
mlist = merge_sort([4, 5, 6, 7, 3, 2, 6, 9, 8])
print(mlist)
|
normal
|
{
"blob_id": "a6192e39d86005882d0bde040a99f364bf701c3b",
"index": 1266,
"step-1": "<mask token>\n",
"step-2": "def merge_sort(mlist):\n if len(mlist) <= 1:\n return mlist\n mid = int(len(mlist) / 2)\n left = merge_sort(mlist[:mid])\n right = merge_sort(mlist[mid:])\n return merge(left, right)\n\n\n<mask token>\n",
"step-3": "def merge_sort(mlist):\n if len(mlist) <= 1:\n return mlist\n mid = int(len(mlist) / 2)\n left = merge_sort(mlist[:mid])\n right = merge_sort(mlist[mid:])\n return merge(left, right)\n\n\ndef merge(left, right):\n \"\"\"\n 合并操作,将两个有序数组left[]和right[]合并成一个大的有序数组\n :param left:\n :param right:\n :return:\n \"\"\"\n l, r = 0, 0\n result = []\n while l < len(left) and r < len(right):\n if left[l] < right[r]:\n result.append(left[l])\n l += 1\n else:\n result.append(right[r])\n r += 1\n result += left[l:]\n result += right[r:]\n return result\n\n\n<mask token>\n",
"step-4": "def merge_sort(mlist):\n if len(mlist) <= 1:\n return mlist\n mid = int(len(mlist) / 2)\n left = merge_sort(mlist[:mid])\n right = merge_sort(mlist[mid:])\n return merge(left, right)\n\n\ndef merge(left, right):\n \"\"\"\n 合并操作,将两个有序数组left[]和right[]合并成一个大的有序数组\n :param left:\n :param right:\n :return:\n \"\"\"\n l, r = 0, 0\n result = []\n while l < len(left) and r < len(right):\n if left[l] < right[r]:\n result.append(left[l])\n l += 1\n else:\n result.append(right[r])\n r += 1\n result += left[l:]\n result += right[r:]\n return result\n\n\nif __name__ == '__main__':\n mlist = merge_sort([4, 5, 6, 7, 3, 2, 6, 9, 8])\n print(mlist)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\r\n\r\ndef merge_sort(mlist):\r\n if len(mlist) <= 1:\r\n return mlist\r\n mid = int(len(mlist) / 2)\r\n # 使用递归将数组二分分解\r\n left = merge_sort(mlist[:mid])\r\n right = merge_sort(mlist[mid:])\r\n return merge(left, right) # 将每次分解出来的数组各自排序,合并成一个大数组\r\n\r\n\r\ndef merge(left, right):\r\n \"\"\"\r\n 合并操作,将两个有序数组left[]和right[]合并成一个大的有序数组\r\n :param left:\r\n :param right:\r\n :return:\r\n \"\"\"\r\n l, r = 0, 0 # left与right数组的下标指针\r\n result = []\r\n while l < len(left) and r < len(right):\r\n # 排序\r\n if left[l] < right[r]:\r\n result.append(left[l])\r\n l += 1\r\n else:\r\n result.append(right[r])\r\n r += 1\r\n result += left[l:]\r\n result += right[r:]\r\n return result\r\n\r\n\r\nif __name__ == '__main__':\r\n mlist = merge_sort([4, 5, 6, 7, 3, 2, 6, 9, 8])\r\n print(mlist)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def usage_list(self):
print('Available modules')
print('=================')
for module in sorted(self.list()):
if ('module' not in self.mods[module]):
self.import_module(module)
if (not self.mods[module]['module'].__doc__):
continue
text = self.mods[module]['module'].__doc__.strip('\n ')
text = text.split('\n')
if (len(text) > 2):
if text[1].startswith('='):
text[1] = ('=' * (14 + len(text[1])))
text = '\n'.join(text)
print(('\n%-12s: %s' % (module, text)))
|
normal
|
{
"blob_id": "d0eb6ea2e816ac59ae93684edb38ff3a49909633",
"index": 762,
"step-1": "<mask token>\n",
"step-2": "def usage_list(self):\n print('Available modules')\n print('=================')\n for module in sorted(self.list()):\n if 'module' not in self.mods[module]:\n self.import_module(module)\n if not self.mods[module]['module'].__doc__:\n continue\n text = self.mods[module]['module'].__doc__.strip('\\n ')\n text = text.split('\\n')\n if len(text) > 2:\n if text[1].startswith('='):\n text[1] = '=' * (14 + len(text[1]))\n text = '\\n'.join(text)\n print('\\n%-12s: %s' % (module, text))\n",
"step-3": "def usage_list(self):\n print('Available modules')\n print('=================')\n for module in sorted(self.list()):\n if ('module' not in self.mods[module]):\n self.import_module(module)\n if (not self.mods[module]['module'].__doc__):\n continue\n text = self.mods[module]['module'].__doc__.strip('\\n ')\n text = text.split('\\n')\n if (len(text) > 2):\n if text[1].startswith('='):\n text[1] = ('=' * (14 + len(text[1])))\n text = '\\n'.join(text)\n print(('\\n%-12s: %s' % (module, text)))",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import sys
sys.path.append("..") # Adds higher directory to python modules path.
from utils import npm_decorator
# num_node = 3
@ npm_decorator(3)
def scenario():
"""
1. Check each peer's genesis block
2. Generate new blocks on each peer
2.1. 2 blocks on peer #1
2.2. 4 blocks on peer #2
2.3. 2 blocks on peer #3
3. Connect peers
3.1. peer #1 with #2 (1->2)
3.2. peer #1 with #3 (1->(2 and 3))
4. Generate new blocks
4.1. 3 blocks on peer #1
4.2. 5 blocks on peer #3
5. Stop all peers
"""
LOCAL_HOST = "http://127.0.0.1"
# import functions
from . import genesis_block
from . import create_block
from . import connect_peer
from . import stop_server
from . import block_crosscheck
total_cnt = 0
pass_cnt = 0
# 1. Check each peer's genesis block
try:
assert genesis_block.check(LOCAL_HOST, 3001)
assert genesis_block.check(LOCAL_HOST, 3002)
assert genesis_block.check(LOCAL_HOST, 3003)
print("pass", end=' ')
pass_cnt += 1
except:
print("FAIL", end=' ')
finally:
print("test1/genesis_block")
total_cnt += 1
# 2. Generate new blocks
# 2.1. 2 blocks on peer #1
# 2.2. 4 blocks on peer #2
# 2.3. 2 blocks on peer #3
try:
assert create_block.addBlocks(LOCAL_HOST, 3001, num=2)
assert create_block.check(LOCAL_HOST, 3001, num=2)
assert create_block.addBlocks(LOCAL_HOST, 3002, num=4)
assert create_block.check(LOCAL_HOST, 3002, num=4)
assert create_block.addBlocks(LOCAL_HOST, 3003, num=2)
assert create_block.check(LOCAL_HOST, 3003, num=2)
print("pass", end=' ')
pass_cnt += 1
except:
print("FAIL", end=' ')
finally:
print("test1/create_block")
total_cnt += 1
# 3. Connect peers
# 3.1. peer #1 with #2 (1->2)
# 3.2. peer #1 with #3 (1->(2 and 3))
try:
assert connect_peer.connectPeer(LOCAL_HOST, 3001, "ws://127.0.0.1:6002")
assert connect_peer.connectPeer(LOCAL_HOST, 3001, "ws://127.0.0.1:6003")
print("pass", end=' ')
pass_cnt += 1
except:
print("FAIL", end=' ')
finally:
print("test1/connect_peer")
total_cnt += 1
# 4. Generate new blocks
# 4.1. 3 blocks on peer #1
# 4.2. 5 blocks on peer #3
try:
isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3001, num=3)
assert isPass
assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=3)
assert block_crosscheck.check(LOCAL_HOST, 3003, newBlocks, num=3)
isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3003, num=5)
assert isPass
assert block_crosscheck.check(LOCAL_HOST, 3001, newBlocks, num=5)
assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=5)
print("pass", end=' ')
pass_cnt += 1
except:
print("FAIL", end=' ')
finally:
print("test1/block_crosscheck")
total_cnt += 1
# 5. Stop all peers
try:
assert stop_server.stopServer(LOCAL_HOST, 3001)
assert stop_server.stopServer(LOCAL_HOST, 3002)
assert stop_server.stopServer(LOCAL_HOST, 3003)
print("pass", end=' ')
pass_cnt += 1
except:
print("FAIL", end=' ')
finally:
print("test1/stop_server")
total_cnt += 1
# return pass_cnt_per_test and total_cnt_per_test
return pass_cnt, total_cnt
|
normal
|
{
"blob_id": "91cf1f4cf34ac9723be4863e81149c703adca27a",
"index": 3583,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@npm_decorator(3)\ndef scenario():\n \"\"\"\n 1. Check each peer's genesis block\n 2. Generate new blocks on each peer\n 2.1. 2 blocks on peer #1\n 2.2. 4 blocks on peer #2\n 2.3. 2 blocks on peer #3\n 3. Connect peers\n 3.1. peer #1 with #2 (1->2)\n 3.2. peer #1 with #3 (1->(2 and 3))\n 4. Generate new blocks\n 4.1. 3 blocks on peer #1\n 4.2. 5 blocks on peer #3\n 5. Stop all peers\n \"\"\"\n LOCAL_HOST = 'http://127.0.0.1'\n from . import genesis_block\n from . import create_block\n from . import connect_peer\n from . import stop_server\n from . import block_crosscheck\n total_cnt = 0\n pass_cnt = 0\n try:\n assert genesis_block.check(LOCAL_HOST, 3001)\n assert genesis_block.check(LOCAL_HOST, 3002)\n assert genesis_block.check(LOCAL_HOST, 3003)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/genesis_block')\n total_cnt += 1\n try:\n assert create_block.addBlocks(LOCAL_HOST, 3001, num=2)\n assert create_block.check(LOCAL_HOST, 3001, num=2)\n assert create_block.addBlocks(LOCAL_HOST, 3002, num=4)\n assert create_block.check(LOCAL_HOST, 3002, num=4)\n assert create_block.addBlocks(LOCAL_HOST, 3003, num=2)\n assert create_block.check(LOCAL_HOST, 3003, num=2)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/create_block')\n total_cnt += 1\n try:\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, 'ws://127.0.0.1:6002'\n )\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, 'ws://127.0.0.1:6003'\n )\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/connect_peer')\n total_cnt += 1\n try:\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3001, num=3)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=3)\n assert block_crosscheck.check(LOCAL_HOST, 3003, newBlocks, num=3)\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3003, num=5)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3001, newBlocks, num=5)\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=5)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/block_crosscheck')\n total_cnt += 1\n try:\n assert stop_server.stopServer(LOCAL_HOST, 3001)\n assert stop_server.stopServer(LOCAL_HOST, 3002)\n assert stop_server.stopServer(LOCAL_HOST, 3003)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/stop_server')\n total_cnt += 1\n return pass_cnt, total_cnt\n",
"step-3": "<mask token>\nsys.path.append('..')\n<mask token>\n\n\n@npm_decorator(3)\ndef scenario():\n \"\"\"\n 1. Check each peer's genesis block\n 2. Generate new blocks on each peer\n 2.1. 2 blocks on peer #1\n 2.2. 4 blocks on peer #2\n 2.3. 2 blocks on peer #3\n 3. Connect peers\n 3.1. peer #1 with #2 (1->2)\n 3.2. peer #1 with #3 (1->(2 and 3))\n 4. Generate new blocks\n 4.1. 3 blocks on peer #1\n 4.2. 5 blocks on peer #3\n 5. Stop all peers\n \"\"\"\n LOCAL_HOST = 'http://127.0.0.1'\n from . import genesis_block\n from . import create_block\n from . import connect_peer\n from . import stop_server\n from . import block_crosscheck\n total_cnt = 0\n pass_cnt = 0\n try:\n assert genesis_block.check(LOCAL_HOST, 3001)\n assert genesis_block.check(LOCAL_HOST, 3002)\n assert genesis_block.check(LOCAL_HOST, 3003)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/genesis_block')\n total_cnt += 1\n try:\n assert create_block.addBlocks(LOCAL_HOST, 3001, num=2)\n assert create_block.check(LOCAL_HOST, 3001, num=2)\n assert create_block.addBlocks(LOCAL_HOST, 3002, num=4)\n assert create_block.check(LOCAL_HOST, 3002, num=4)\n assert create_block.addBlocks(LOCAL_HOST, 3003, num=2)\n assert create_block.check(LOCAL_HOST, 3003, num=2)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/create_block')\n total_cnt += 1\n try:\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, 'ws://127.0.0.1:6002'\n )\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, 'ws://127.0.0.1:6003'\n )\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/connect_peer')\n total_cnt += 1\n try:\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3001, num=3)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=3)\n assert block_crosscheck.check(LOCAL_HOST, 3003, newBlocks, num=3)\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3003, num=5)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3001, newBlocks, num=5)\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=5)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/block_crosscheck')\n total_cnt += 1\n try:\n assert stop_server.stopServer(LOCAL_HOST, 3001)\n assert stop_server.stopServer(LOCAL_HOST, 3002)\n assert stop_server.stopServer(LOCAL_HOST, 3003)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/stop_server')\n total_cnt += 1\n return pass_cnt, total_cnt\n",
"step-4": "import sys\nsys.path.append('..')\nfrom utils import npm_decorator\n\n\n@npm_decorator(3)\ndef scenario():\n \"\"\"\n 1. Check each peer's genesis block\n 2. Generate new blocks on each peer\n 2.1. 2 blocks on peer #1\n 2.2. 4 blocks on peer #2\n 2.3. 2 blocks on peer #3\n 3. Connect peers\n 3.1. peer #1 with #2 (1->2)\n 3.2. peer #1 with #3 (1->(2 and 3))\n 4. Generate new blocks\n 4.1. 3 blocks on peer #1\n 4.2. 5 blocks on peer #3\n 5. Stop all peers\n \"\"\"\n LOCAL_HOST = 'http://127.0.0.1'\n from . import genesis_block\n from . import create_block\n from . import connect_peer\n from . import stop_server\n from . import block_crosscheck\n total_cnt = 0\n pass_cnt = 0\n try:\n assert genesis_block.check(LOCAL_HOST, 3001)\n assert genesis_block.check(LOCAL_HOST, 3002)\n assert genesis_block.check(LOCAL_HOST, 3003)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/genesis_block')\n total_cnt += 1\n try:\n assert create_block.addBlocks(LOCAL_HOST, 3001, num=2)\n assert create_block.check(LOCAL_HOST, 3001, num=2)\n assert create_block.addBlocks(LOCAL_HOST, 3002, num=4)\n assert create_block.check(LOCAL_HOST, 3002, num=4)\n assert create_block.addBlocks(LOCAL_HOST, 3003, num=2)\n assert create_block.check(LOCAL_HOST, 3003, num=2)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/create_block')\n total_cnt += 1\n try:\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, 'ws://127.0.0.1:6002'\n )\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, 'ws://127.0.0.1:6003'\n )\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/connect_peer')\n total_cnt += 1\n try:\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3001, num=3)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=3)\n assert block_crosscheck.check(LOCAL_HOST, 3003, newBlocks, num=3)\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3003, num=5)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3001, newBlocks, num=5)\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=5)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/block_crosscheck')\n total_cnt += 1\n try:\n assert stop_server.stopServer(LOCAL_HOST, 3001)\n assert stop_server.stopServer(LOCAL_HOST, 3002)\n assert stop_server.stopServer(LOCAL_HOST, 3003)\n print('pass', end=' ')\n pass_cnt += 1\n except:\n print('FAIL', end=' ')\n finally:\n print('test1/stop_server')\n total_cnt += 1\n return pass_cnt, total_cnt\n",
"step-5": "import sys\nsys.path.append(\"..\") # Adds higher directory to python modules path.\n\nfrom utils import npm_decorator\n\n\n# num_node = 3\n@ npm_decorator(3)\ndef scenario():\n \"\"\"\n 1. Check each peer's genesis block\n 2. Generate new blocks on each peer\n 2.1. 2 blocks on peer #1\n 2.2. 4 blocks on peer #2\n 2.3. 2 blocks on peer #3\n 3. Connect peers\n 3.1. peer #1 with #2 (1->2)\n 3.2. peer #1 with #3 (1->(2 and 3))\n 4. Generate new blocks\n 4.1. 3 blocks on peer #1\n 4.2. 5 blocks on peer #3\n 5. Stop all peers\n \"\"\"\n LOCAL_HOST = \"http://127.0.0.1\"\n\n # import functions\n from . import genesis_block\n from . import create_block\n from . import connect_peer\n from . import stop_server\n from . import block_crosscheck\n\n total_cnt = 0\n pass_cnt = 0\n\n # 1. Check each peer's genesis block\n try:\n assert genesis_block.check(LOCAL_HOST, 3001)\n assert genesis_block.check(LOCAL_HOST, 3002)\n assert genesis_block.check(LOCAL_HOST, 3003)\n\n print(\"pass\", end=' ')\n pass_cnt += 1\n\n except:\n print(\"FAIL\", end=' ')\n\n finally:\n print(\"test1/genesis_block\")\n total_cnt += 1\n\n # 2. Generate new blocks\n # 2.1. 2 blocks on peer #1\n # 2.2. 4 blocks on peer #2\n # 2.3. 2 blocks on peer #3\n try:\n assert create_block.addBlocks(LOCAL_HOST, 3001, num=2)\n assert create_block.check(LOCAL_HOST, 3001, num=2)\n\n assert create_block.addBlocks(LOCAL_HOST, 3002, num=4)\n assert create_block.check(LOCAL_HOST, 3002, num=4)\n\n assert create_block.addBlocks(LOCAL_HOST, 3003, num=2)\n assert create_block.check(LOCAL_HOST, 3003, num=2)\n\n print(\"pass\", end=' ')\n pass_cnt += 1\n\n except:\n print(\"FAIL\", end=' ')\n\n finally:\n print(\"test1/create_block\")\n total_cnt += 1\n\n # 3. Connect peers\n # 3.1. peer #1 with #2 (1->2)\n # 3.2. peer #1 with #3 (1->(2 and 3))\n try:\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, \"ws://127.0.0.1:6002\")\n assert connect_peer.connectPeer(LOCAL_HOST, 3001, \"ws://127.0.0.1:6003\")\n\n print(\"pass\", end=' ')\n pass_cnt += 1\n\n except:\n print(\"FAIL\", end=' ')\n\n finally:\n print(\"test1/connect_peer\")\n total_cnt += 1\n\n # 4. Generate new blocks\n # 4.1. 3 blocks on peer #1\n # 4.2. 5 blocks on peer #3\n try:\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3001, num=3)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=3)\n assert block_crosscheck.check(LOCAL_HOST, 3003, newBlocks, num=3)\n\n isPass, newBlocks = block_crosscheck.addBlocks(LOCAL_HOST, 3003, num=5)\n assert isPass\n assert block_crosscheck.check(LOCAL_HOST, 3001, newBlocks, num=5)\n assert block_crosscheck.check(LOCAL_HOST, 3002, newBlocks, num=5)\n\n print(\"pass\", end=' ')\n pass_cnt += 1\n\n except:\n print(\"FAIL\", end=' ')\n\n finally:\n print(\"test1/block_crosscheck\")\n total_cnt += 1\n\n # 5. Stop all peers\n try:\n assert stop_server.stopServer(LOCAL_HOST, 3001)\n assert stop_server.stopServer(LOCAL_HOST, 3002)\n assert stop_server.stopServer(LOCAL_HOST, 3003)\n\n print(\"pass\", end=' ')\n pass_cnt += 1\n\n except:\n print(\"FAIL\", end=' ')\n\n finally:\n print(\"test1/stop_server\")\n total_cnt += 1\n\n # return pass_cnt_per_test and total_cnt_per_test\n return pass_cnt, total_cnt\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def _make_key(*args, **kwargs):
all_args = [str(arg) for arg in args]
all_args += [str(arg) + '=' + str(value) for arg, value in kwargs.items()]
return '|'.join(all_args)
class DoubleLinked:
def __init__(self, prv, nxt, key):
self.prv = prv
self.nxt = nxt
self.key = key
class CacheEntry:
def __init__(self, value, position):
self.value = value
self.position = position
class LRUCache:
def __init__(self, get_from_origin, max_size=1024):
if max_size == 0:
raise NotImplementedError()
if max_size < 0:
raise ValueError()
# keep separate size counter, to save going over the list
self.size = 0
self.max_size = max_size
# the function to call
self._get_from_origin = get_from_origin
# the values to cache
self._cache = {}
self._most_recent = None
self._least_recent = None
@property
def full(self):
return self.size == self.max_size
def get(self, *args, **kwargs):
if not args and not kwargs:
raise ValueError()
key = _make_key(*args, **kwargs)
if key in self._cache:
return self._hit(key)
return self._miss(key, *args, **kwargs)
def _hit(self, key):
self._bump_cached(key)
return self._cache[key].value
def _miss(self, key, *args, **kwargs):
value = self._get_from_origin(*args, **kwargs)
if not self._most_recent:
self._bump_init(key)
else:
self._bump_new(key)
self._set(key, value)
return value
def _bump_init(self, key):
self._most_recent = DoubleLinked(nxt=None, prv=None, key=key)
self._least_recent = self._most_recent
self.size = 1
def _bump_new(self, key):
self._bump(key)
# remove oldest entry
# this is the entire reason for the linked list business
if self.full:
old_last = self._least_recent
new_last = old_last.prv
new_last.nxt = None
self._least_recent = new_last
self._remove(old_last.key)
else:
self.size += 1
def _bump_cached(self, key):
self._bump(key)
self._remove_old_position(key)
def _remove_old_position(self, key):
old_position = self._cache[key].position
if not old_position.prv:
return # we are already the most recent
old_position.prv.nxt = old_position.nxt
if old_position.nxt: # if we're not the last
old_position.nxt.prv = old_position.prv
else:
self._least_recent = old_position.prv
self._cache[key].position = self._most_recent
def _bump(self, key):
old_first = self._most_recent
new_first = DoubleLinked(nxt=old_first, prv=None, key=key)
old_first.prv = new_first
self._most_recent = new_first
def _set(self, key, value):
self._cache[key] = CacheEntry(value, self._most_recent)
def _remove(self, key):
del self._cache[key]
def __repr__(self):
if not self._most_recent:
return '[ | ]'
current = self._most_recent
keys = [current.key]
while current.nxt:
current = current.nxt
keys.append(current.key)
return '[ ' + (' | '.join(keys)) + ' ]'
def __len__(self):
return self.size
class cache: # pylint: disable=invalid-name
def __init__(self, max_size):
assert isinstance(max_size, int)
self.max_size = max_size
def __call__(self, func):
lru = LRUCache(func, max_size=self.max_size)
def cached_f(*args, **kwargs):
return lru.get(*args, **kwargs)
return cached_f
|
normal
|
{
"blob_id": "9c251e0224979877b9ce244e4871fd4c403abb8e",
"index": 1583,
"step-1": "<mask token>\n\n\nclass LRUCache:\n\n def __init__(self, get_from_origin, max_size=1024):\n if max_size == 0:\n raise NotImplementedError()\n if max_size < 0:\n raise ValueError()\n self.size = 0\n self.max_size = max_size\n self._get_from_origin = get_from_origin\n self._cache = {}\n self._most_recent = None\n self._least_recent = None\n <mask token>\n <mask token>\n\n def _hit(self, key):\n self._bump_cached(key)\n return self._cache[key].value\n\n def _miss(self, key, *args, **kwargs):\n value = self._get_from_origin(*args, **kwargs)\n if not self._most_recent:\n self._bump_init(key)\n else:\n self._bump_new(key)\n self._set(key, value)\n return value\n <mask token>\n\n def _bump_new(self, key):\n self._bump(key)\n if self.full:\n old_last = self._least_recent\n new_last = old_last.prv\n new_last.nxt = None\n self._least_recent = new_last\n self._remove(old_last.key)\n else:\n self.size += 1\n\n def _bump_cached(self, key):\n self._bump(key)\n self._remove_old_position(key)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __repr__(self):\n if not self._most_recent:\n return '[ | ]'\n current = self._most_recent\n keys = [current.key]\n while current.nxt:\n current = current.nxt\n keys.append(current.key)\n return '[ ' + ' | '.join(keys) + ' ]'\n <mask token>\n\n\nclass cache:\n\n def __init__(self, max_size):\n assert isinstance(max_size, int)\n self.max_size = max_size\n\n def __call__(self, func):\n lru = LRUCache(func, max_size=self.max_size)\n\n def cached_f(*args, **kwargs):\n return lru.get(*args, **kwargs)\n return cached_f\n",
"step-2": "<mask token>\n\n\nclass LRUCache:\n\n def __init__(self, get_from_origin, max_size=1024):\n if max_size == 0:\n raise NotImplementedError()\n if max_size < 0:\n raise ValueError()\n self.size = 0\n self.max_size = max_size\n self._get_from_origin = get_from_origin\n self._cache = {}\n self._most_recent = None\n self._least_recent = None\n <mask token>\n <mask token>\n\n def _hit(self, key):\n self._bump_cached(key)\n return self._cache[key].value\n\n def _miss(self, key, *args, **kwargs):\n value = self._get_from_origin(*args, **kwargs)\n if not self._most_recent:\n self._bump_init(key)\n else:\n self._bump_new(key)\n self._set(key, value)\n return value\n\n def _bump_init(self, key):\n self._most_recent = DoubleLinked(nxt=None, prv=None, key=key)\n self._least_recent = self._most_recent\n self.size = 1\n\n def _bump_new(self, key):\n self._bump(key)\n if self.full:\n old_last = self._least_recent\n new_last = old_last.prv\n new_last.nxt = None\n self._least_recent = new_last\n self._remove(old_last.key)\n else:\n self.size += 1\n\n def _bump_cached(self, key):\n self._bump(key)\n self._remove_old_position(key)\n <mask token>\n\n def _bump(self, key):\n old_first = self._most_recent\n new_first = DoubleLinked(nxt=old_first, prv=None, key=key)\n old_first.prv = new_first\n self._most_recent = new_first\n\n def _set(self, key, value):\n self._cache[key] = CacheEntry(value, self._most_recent)\n\n def _remove(self, key):\n del self._cache[key]\n\n def __repr__(self):\n if not self._most_recent:\n return '[ | ]'\n current = self._most_recent\n keys = [current.key]\n while current.nxt:\n current = current.nxt\n keys.append(current.key)\n return '[ ' + ' | '.join(keys) + ' ]'\n <mask token>\n\n\nclass cache:\n\n def __init__(self, max_size):\n assert isinstance(max_size, int)\n self.max_size = max_size\n\n def __call__(self, func):\n lru = LRUCache(func, max_size=self.max_size)\n\n def cached_f(*args, **kwargs):\n return lru.get(*args, **kwargs)\n return cached_f\n",
"step-3": "<mask token>\n\n\nclass LRUCache:\n\n def __init__(self, get_from_origin, max_size=1024):\n if max_size == 0:\n raise NotImplementedError()\n if max_size < 0:\n raise ValueError()\n self.size = 0\n self.max_size = max_size\n self._get_from_origin = get_from_origin\n self._cache = {}\n self._most_recent = None\n self._least_recent = None\n\n @property\n def full(self):\n return self.size == self.max_size\n <mask token>\n\n def _hit(self, key):\n self._bump_cached(key)\n return self._cache[key].value\n\n def _miss(self, key, *args, **kwargs):\n value = self._get_from_origin(*args, **kwargs)\n if not self._most_recent:\n self._bump_init(key)\n else:\n self._bump_new(key)\n self._set(key, value)\n return value\n\n def _bump_init(self, key):\n self._most_recent = DoubleLinked(nxt=None, prv=None, key=key)\n self._least_recent = self._most_recent\n self.size = 1\n\n def _bump_new(self, key):\n self._bump(key)\n if self.full:\n old_last = self._least_recent\n new_last = old_last.prv\n new_last.nxt = None\n self._least_recent = new_last\n self._remove(old_last.key)\n else:\n self.size += 1\n\n def _bump_cached(self, key):\n self._bump(key)\n self._remove_old_position(key)\n <mask token>\n\n def _bump(self, key):\n old_first = self._most_recent\n new_first = DoubleLinked(nxt=old_first, prv=None, key=key)\n old_first.prv = new_first\n self._most_recent = new_first\n\n def _set(self, key, value):\n self._cache[key] = CacheEntry(value, self._most_recent)\n\n def _remove(self, key):\n del self._cache[key]\n\n def __repr__(self):\n if not self._most_recent:\n return '[ | ]'\n current = self._most_recent\n keys = [current.key]\n while current.nxt:\n current = current.nxt\n keys.append(current.key)\n return '[ ' + ' | '.join(keys) + ' ]'\n\n def __len__(self):\n return self.size\n\n\nclass cache:\n\n def __init__(self, max_size):\n assert isinstance(max_size, int)\n self.max_size = max_size\n\n def __call__(self, func):\n lru = LRUCache(func, max_size=self.max_size)\n\n def cached_f(*args, **kwargs):\n return lru.get(*args, **kwargs)\n return cached_f\n",
"step-4": "def _make_key(*args, **kwargs):\n all_args = [str(arg) for arg in args]\n all_args += [(str(arg) + '=' + str(value)) for arg, value in kwargs.items()\n ]\n return '|'.join(all_args)\n\n\nclass DoubleLinked:\n\n def __init__(self, prv, nxt, key):\n self.prv = prv\n self.nxt = nxt\n self.key = key\n\n\nclass CacheEntry:\n\n def __init__(self, value, position):\n self.value = value\n self.position = position\n\n\nclass LRUCache:\n\n def __init__(self, get_from_origin, max_size=1024):\n if max_size == 0:\n raise NotImplementedError()\n if max_size < 0:\n raise ValueError()\n self.size = 0\n self.max_size = max_size\n self._get_from_origin = get_from_origin\n self._cache = {}\n self._most_recent = None\n self._least_recent = None\n\n @property\n def full(self):\n return self.size == self.max_size\n\n def get(self, *args, **kwargs):\n if not args and not kwargs:\n raise ValueError()\n key = _make_key(*args, **kwargs)\n if key in self._cache:\n return self._hit(key)\n return self._miss(key, *args, **kwargs)\n\n def _hit(self, key):\n self._bump_cached(key)\n return self._cache[key].value\n\n def _miss(self, key, *args, **kwargs):\n value = self._get_from_origin(*args, **kwargs)\n if not self._most_recent:\n self._bump_init(key)\n else:\n self._bump_new(key)\n self._set(key, value)\n return value\n\n def _bump_init(self, key):\n self._most_recent = DoubleLinked(nxt=None, prv=None, key=key)\n self._least_recent = self._most_recent\n self.size = 1\n\n def _bump_new(self, key):\n self._bump(key)\n if self.full:\n old_last = self._least_recent\n new_last = old_last.prv\n new_last.nxt = None\n self._least_recent = new_last\n self._remove(old_last.key)\n else:\n self.size += 1\n\n def _bump_cached(self, key):\n self._bump(key)\n self._remove_old_position(key)\n\n def _remove_old_position(self, key):\n old_position = self._cache[key].position\n if not old_position.prv:\n return\n old_position.prv.nxt = old_position.nxt\n if old_position.nxt:\n old_position.nxt.prv = old_position.prv\n else:\n self._least_recent = old_position.prv\n self._cache[key].position = self._most_recent\n\n def _bump(self, key):\n old_first = self._most_recent\n new_first = DoubleLinked(nxt=old_first, prv=None, key=key)\n old_first.prv = new_first\n self._most_recent = new_first\n\n def _set(self, key, value):\n self._cache[key] = CacheEntry(value, self._most_recent)\n\n def _remove(self, key):\n del self._cache[key]\n\n def __repr__(self):\n if not self._most_recent:\n return '[ | ]'\n current = self._most_recent\n keys = [current.key]\n while current.nxt:\n current = current.nxt\n keys.append(current.key)\n return '[ ' + ' | '.join(keys) + ' ]'\n\n def __len__(self):\n return self.size\n\n\nclass cache:\n\n def __init__(self, max_size):\n assert isinstance(max_size, int)\n self.max_size = max_size\n\n def __call__(self, func):\n lru = LRUCache(func, max_size=self.max_size)\n\n def cached_f(*args, **kwargs):\n return lru.get(*args, **kwargs)\n return cached_f\n",
"step-5": "def _make_key(*args, **kwargs):\n all_args = [str(arg) for arg in args]\n all_args += [str(arg) + '=' + str(value) for arg, value in kwargs.items()]\n return '|'.join(all_args)\n\n\nclass DoubleLinked:\n def __init__(self, prv, nxt, key):\n self.prv = prv\n self.nxt = nxt\n self.key = key\n\n\nclass CacheEntry:\n def __init__(self, value, position):\n self.value = value\n self.position = position\n\n\nclass LRUCache:\n def __init__(self, get_from_origin, max_size=1024):\n if max_size == 0:\n raise NotImplementedError()\n if max_size < 0:\n raise ValueError()\n\n # keep separate size counter, to save going over the list\n self.size = 0\n self.max_size = max_size\n # the function to call\n self._get_from_origin = get_from_origin\n\n # the values to cache\n self._cache = {}\n self._most_recent = None\n self._least_recent = None\n\n @property\n def full(self):\n return self.size == self.max_size\n\n def get(self, *args, **kwargs):\n if not args and not kwargs:\n raise ValueError()\n\n key = _make_key(*args, **kwargs)\n if key in self._cache:\n return self._hit(key)\n return self._miss(key, *args, **kwargs)\n\n def _hit(self, key):\n self._bump_cached(key)\n return self._cache[key].value\n\n def _miss(self, key, *args, **kwargs):\n value = self._get_from_origin(*args, **kwargs)\n\n if not self._most_recent:\n self._bump_init(key)\n else:\n self._bump_new(key)\n\n self._set(key, value)\n\n return value\n\n def _bump_init(self, key):\n self._most_recent = DoubleLinked(nxt=None, prv=None, key=key)\n self._least_recent = self._most_recent\n self.size = 1\n\n def _bump_new(self, key):\n self._bump(key)\n\n # remove oldest entry\n # this is the entire reason for the linked list business\n if self.full:\n old_last = self._least_recent\n new_last = old_last.prv\n new_last.nxt = None\n self._least_recent = new_last\n self._remove(old_last.key)\n else:\n self.size += 1\n\n def _bump_cached(self, key):\n self._bump(key)\n self._remove_old_position(key)\n\n def _remove_old_position(self, key):\n old_position = self._cache[key].position\n\n if not old_position.prv:\n return # we are already the most recent\n\n old_position.prv.nxt = old_position.nxt\n\n if old_position.nxt: # if we're not the last\n old_position.nxt.prv = old_position.prv\n else:\n self._least_recent = old_position.prv\n\n self._cache[key].position = self._most_recent\n\n def _bump(self, key):\n old_first = self._most_recent\n new_first = DoubleLinked(nxt=old_first, prv=None, key=key)\n old_first.prv = new_first\n self._most_recent = new_first\n\n def _set(self, key, value):\n self._cache[key] = CacheEntry(value, self._most_recent)\n\n def _remove(self, key):\n del self._cache[key]\n\n def __repr__(self):\n if not self._most_recent:\n return '[ | ]'\n current = self._most_recent\n keys = [current.key]\n while current.nxt:\n current = current.nxt\n keys.append(current.key)\n return '[ ' + (' | '.join(keys)) + ' ]'\n\n def __len__(self):\n return self.size\n\n\nclass cache: # pylint: disable=invalid-name\n def __init__(self, max_size):\n assert isinstance(max_size, int)\n self.max_size = max_size\n\n def __call__(self, func):\n lru = LRUCache(func, max_size=self.max_size)\n\n def cached_f(*args, **kwargs):\n return lru.get(*args, **kwargs)\n return cached_f\n",
"step-ids": [
10,
14,
16,
23,
24
]
}
|
[
10,
14,
16,
23,
24
] |
class HashTableEntry:
"""
Hash Table entry, as a linked list node.
"""
def __init__(self, key, value):
self.key = key
self.value = value
self.next = None
class HashTable:
"""
A hash table that with `capacity` buckets
that accepts string keys
Implement this.
"""
def __init__(self, capacity):
self.capacity = capacity
self.storage = [None] * capacity
self.numberOfItems = 0
def fnv1(self, key):
"""
FNV-1 64-bit hash function
Implement this, and/or DJB2.
"""
# hash = 0xff
hash = 0xcbf29ce484222325
for n in key.encode():
# print(n)
hash = hash ^ n
hash = hash * 0x100000001b3
# print(hash)
return hash
def djb2(self, key):
"""
DJB2 32-bit hash function
Implement this, and/or FNV-1.
"""
hash = 5381
for n in key.encode():
# hash = ((hash << 5) + hash) + n
hash = hash * 33 + n
return hash
# return hash & 0xFFFFFFFF
def hash_index(self, key):
"""
Take an arbitrary key and return a valid integer index
between within the storage capacity of the hash table.
"""
# return self.fnv1(key) % self.capacity
return self.djb2(key) % self.capacity
def put(self, key, value):
"""
Store the value with the given key.
Hash collisions should be handled with Linked List Chaining.
Implement this.
"""
hi = self.hash_index(key)
if self.storage[hi]:
current = self.storage[hi]
while current.next and current.key != key:
current = current.next
if current.key == key:
current.value = value
else:
current.next = HashTableEntry(key, value)
self.numberOfItems += 1
else:
self.storage[hi] = HashTableEntry(key, value)
self.numberOfItems += 1
self.calculateLoad()
def delete(self, key):
"""
Remove the value stored with the given key.
Print a warning if the key is not found.
Implement this.
"""
hi = self.hash_index(key)
# if that hi is empty ignore
# if self.storage[hi] is None:
# print("WARNING: no key")
# return
current = self.storage[hi]
prev = self.storage[hi]
while current and current.key != key:
prev = current
current = current.next
if (current and current.key == key):
# if its the first link in the list
if (current == self.storage[hi]):
self.storage[hi] = current.next
else:
prev.next = current.next
self.numberOfItems -= 1
else:
print("WARNING: no key")
self.calculateLoad()
def get(self, key):
"""
Retrieve the value stored with the given key.
Returns None if the key is not found.
Implement this.
"""
hi = self.hash_index(key)
if (self.storage[hi]):
if(self.storage[hi].next):
current = self.storage[hi]
while current.next and current.key != key:
current = current.next
return current.value
else:
return self.storage[hi].value
return None
def resize(self, factor=2):
"""
Doubles the capacity of the hash table and
rehash all key/value pairs.
Implement this.
"""
self.capacity = round(self.capacity*factor)
newarr = [None] * self.capacity
for i, v in enumerate(self.storage):
while v:
hi = self.hash_index(v.key)
if newarr[hi]:
current = newarr[hi]
while current.next:
current = current.next
current.next = HashTableEntry(v.key, v.value)
else:
newarr[hi] = HashTableEntry(v.key, v.value)
v = v.next
self.storage = newarr
# Solution 2 - Much cleaner
# newHashTable = HashTable(round(self.capacity*factor))
# for i, v in enumerate(self.storage):
# while v:
# newHashTable.put(v.key, v.value)
# v = v.next
# self.capacity = newHashTable.capacity
# self.storage = newHashTable.storage
def calculateLoad(self):
load = self.numberOfItems/len(self.storage)
# print("Items:\t", ht.numberOfItems)
# print("Storage:", len(ht.storage))
# print("LOAD:\t", load)
# comment code bellow to pass tests
if load > 0.7:
self.resize(2)
elif load < 0.2:
self.resize(0.5)
pass
if __name__ == "__main__":
ht = HashTable(2)
ht.put("line_1", "111")
ht.put("line_2", "222")
ht.put("line_3", "333")
ht.put("line_4", "sss")
ht.put("line_5", "ddd")
ht.put("line_6", "ggg")
ht.put("line_7", "hhh")
ht.put("line_12", "jjj")
print("")
# Test storing beyond capacity
# print(ht.get("line_1"))
# print(ht.get("line_2"))
# print(ht.get("line_3"))
# print(ht.get("line_4"))
# print(ht.get("line_5"))
# print(ht.get("line_6"))
# print(ht.get("line_7"))
# Test resizing
old_capacity = len(ht.storage)
ht.resize()
new_capacity = len(ht.storage)
print(f"\nResized from {old_capacity} to {new_capacity}.\n")
# print("1: ", ht.storage[1].value)
# print("1: ", ht.storage[1].next.value)
# print("3: ", ht.storage[3].value)
# print("3: ", ht.storage[3].next.value)
# print("3: ", ht.storage[3].next.next.value)
print("")
for i, v in enumerate(ht.storage):
while v:
print(i, v.value)
v = v.next
print("")
ht.delete("line_3")
print("")
for i, v in enumerate(ht.storage):
while v:
print(i, v.value)
v = v.next
print("")
# Test if data intact after resizing
# print(ht.get("line_1"))
# print(ht.get("line_2"))
# print(ht.get("line_3"))
# print(ht.get("line_4"))
# print(ht.get("line_5"))
# print(ht.get("line_6"))
# print(ht.get("line_7"))
print("")
|
normal
|
{
"blob_id": "7e58fe636e6d835d7857a49900bbc127b52f63d9",
"index": 6112,
"step-1": "<mask token>\n\n\nclass HashTable:\n <mask token>\n\n def __init__(self, capacity):\n self.capacity = capacity\n self.storage = [None] * capacity\n self.numberOfItems = 0\n\n def fnv1(self, key):\n \"\"\"\n FNV-1 64-bit hash function\n\n Implement this, and/or DJB2.\n \"\"\"\n hash = 14695981039346656037\n for n in key.encode():\n hash = hash ^ n\n hash = hash * 1099511628211\n return hash\n\n def djb2(self, key):\n \"\"\"\n DJB2 32-bit hash function\n\n Implement this, and/or FNV-1.\n \"\"\"\n hash = 5381\n for n in key.encode():\n hash = hash * 33 + n\n return hash\n <mask token>\n\n def put(self, key, value):\n \"\"\"\n Store the value with the given key.\n\n Hash collisions should be handled with Linked List Chaining.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n if current.key == key:\n current.value = value\n else:\n current.next = HashTableEntry(key, value)\n self.numberOfItems += 1\n else:\n self.storage[hi] = HashTableEntry(key, value)\n self.numberOfItems += 1\n self.calculateLoad()\n <mask token>\n <mask token>\n\n def resize(self, factor=2):\n \"\"\"\n Doubles the capacity of the hash table and\n rehash all key/value pairs.\n\n Implement this.\n \"\"\"\n self.capacity = round(self.capacity * factor)\n newarr = [None] * self.capacity\n for i, v in enumerate(self.storage):\n while v:\n hi = self.hash_index(v.key)\n if newarr[hi]:\n current = newarr[hi]\n while current.next:\n current = current.next\n current.next = HashTableEntry(v.key, v.value)\n else:\n newarr[hi] = HashTableEntry(v.key, v.value)\n v = v.next\n self.storage = newarr\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass HashTable:\n \"\"\"\n A hash table that with `capacity` buckets\n that accepts string keys\n\n Implement this.\n \"\"\"\n\n def __init__(self, capacity):\n self.capacity = capacity\n self.storage = [None] * capacity\n self.numberOfItems = 0\n\n def fnv1(self, key):\n \"\"\"\n FNV-1 64-bit hash function\n\n Implement this, and/or DJB2.\n \"\"\"\n hash = 14695981039346656037\n for n in key.encode():\n hash = hash ^ n\n hash = hash * 1099511628211\n return hash\n\n def djb2(self, key):\n \"\"\"\n DJB2 32-bit hash function\n\n Implement this, and/or FNV-1.\n \"\"\"\n hash = 5381\n for n in key.encode():\n hash = hash * 33 + n\n return hash\n\n def hash_index(self, key):\n \"\"\"\n Take an arbitrary key and return a valid integer index\n between within the storage capacity of the hash table.\n \"\"\"\n return self.djb2(key) % self.capacity\n\n def put(self, key, value):\n \"\"\"\n Store the value with the given key.\n\n Hash collisions should be handled with Linked List Chaining.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n if current.key == key:\n current.value = value\n else:\n current.next = HashTableEntry(key, value)\n self.numberOfItems += 1\n else:\n self.storage[hi] = HashTableEntry(key, value)\n self.numberOfItems += 1\n self.calculateLoad()\n\n def delete(self, key):\n \"\"\"\n Remove the value stored with the given key.\n\n Print a warning if the key is not found.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n current = self.storage[hi]\n prev = self.storage[hi]\n while current and current.key != key:\n prev = current\n current = current.next\n if current and current.key == key:\n if current == self.storage[hi]:\n self.storage[hi] = current.next\n else:\n prev.next = current.next\n self.numberOfItems -= 1\n else:\n print('WARNING: no key')\n self.calculateLoad()\n\n def get(self, key):\n \"\"\"\n Retrieve the value stored with the given key.\n\n Returns None if the key is not found.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n if self.storage[hi].next:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n return current.value\n else:\n return self.storage[hi].value\n return None\n\n def resize(self, factor=2):\n \"\"\"\n Doubles the capacity of the hash table and\n rehash all key/value pairs.\n\n Implement this.\n \"\"\"\n self.capacity = round(self.capacity * factor)\n newarr = [None] * self.capacity\n for i, v in enumerate(self.storage):\n while v:\n hi = self.hash_index(v.key)\n if newarr[hi]:\n current = newarr[hi]\n while current.next:\n current = current.next\n current.next = HashTableEntry(v.key, v.value)\n else:\n newarr[hi] = HashTableEntry(v.key, v.value)\n v = v.next\n self.storage = newarr\n\n def calculateLoad(self):\n load = self.numberOfItems / len(self.storage)\n if load > 0.7:\n self.resize(2)\n elif load < 0.2:\n self.resize(0.5)\n pass\n\n\n<mask token>\n",
"step-3": "class HashTableEntry:\n <mask token>\n\n def __init__(self, key, value):\n self.key = key\n self.value = value\n self.next = None\n\n\nclass HashTable:\n \"\"\"\n A hash table that with `capacity` buckets\n that accepts string keys\n\n Implement this.\n \"\"\"\n\n def __init__(self, capacity):\n self.capacity = capacity\n self.storage = [None] * capacity\n self.numberOfItems = 0\n\n def fnv1(self, key):\n \"\"\"\n FNV-1 64-bit hash function\n\n Implement this, and/or DJB2.\n \"\"\"\n hash = 14695981039346656037\n for n in key.encode():\n hash = hash ^ n\n hash = hash * 1099511628211\n return hash\n\n def djb2(self, key):\n \"\"\"\n DJB2 32-bit hash function\n\n Implement this, and/or FNV-1.\n \"\"\"\n hash = 5381\n for n in key.encode():\n hash = hash * 33 + n\n return hash\n\n def hash_index(self, key):\n \"\"\"\n Take an arbitrary key and return a valid integer index\n between within the storage capacity of the hash table.\n \"\"\"\n return self.djb2(key) % self.capacity\n\n def put(self, key, value):\n \"\"\"\n Store the value with the given key.\n\n Hash collisions should be handled with Linked List Chaining.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n if current.key == key:\n current.value = value\n else:\n current.next = HashTableEntry(key, value)\n self.numberOfItems += 1\n else:\n self.storage[hi] = HashTableEntry(key, value)\n self.numberOfItems += 1\n self.calculateLoad()\n\n def delete(self, key):\n \"\"\"\n Remove the value stored with the given key.\n\n Print a warning if the key is not found.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n current = self.storage[hi]\n prev = self.storage[hi]\n while current and current.key != key:\n prev = current\n current = current.next\n if current and current.key == key:\n if current == self.storage[hi]:\n self.storage[hi] = current.next\n else:\n prev.next = current.next\n self.numberOfItems -= 1\n else:\n print('WARNING: no key')\n self.calculateLoad()\n\n def get(self, key):\n \"\"\"\n Retrieve the value stored with the given key.\n\n Returns None if the key is not found.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n if self.storage[hi].next:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n return current.value\n else:\n return self.storage[hi].value\n return None\n\n def resize(self, factor=2):\n \"\"\"\n Doubles the capacity of the hash table and\n rehash all key/value pairs.\n\n Implement this.\n \"\"\"\n self.capacity = round(self.capacity * factor)\n newarr = [None] * self.capacity\n for i, v in enumerate(self.storage):\n while v:\n hi = self.hash_index(v.key)\n if newarr[hi]:\n current = newarr[hi]\n while current.next:\n current = current.next\n current.next = HashTableEntry(v.key, v.value)\n else:\n newarr[hi] = HashTableEntry(v.key, v.value)\n v = v.next\n self.storage = newarr\n\n def calculateLoad(self):\n load = self.numberOfItems / len(self.storage)\n if load > 0.7:\n self.resize(2)\n elif load < 0.2:\n self.resize(0.5)\n pass\n\n\n<mask token>\n",
"step-4": "class HashTableEntry:\n \"\"\"\n Hash Table entry, as a linked list node.\n \"\"\"\n\n def __init__(self, key, value):\n self.key = key\n self.value = value\n self.next = None\n\n\nclass HashTable:\n \"\"\"\n A hash table that with `capacity` buckets\n that accepts string keys\n\n Implement this.\n \"\"\"\n\n def __init__(self, capacity):\n self.capacity = capacity\n self.storage = [None] * capacity\n self.numberOfItems = 0\n\n def fnv1(self, key):\n \"\"\"\n FNV-1 64-bit hash function\n\n Implement this, and/or DJB2.\n \"\"\"\n hash = 14695981039346656037\n for n in key.encode():\n hash = hash ^ n\n hash = hash * 1099511628211\n return hash\n\n def djb2(self, key):\n \"\"\"\n DJB2 32-bit hash function\n\n Implement this, and/or FNV-1.\n \"\"\"\n hash = 5381\n for n in key.encode():\n hash = hash * 33 + n\n return hash\n\n def hash_index(self, key):\n \"\"\"\n Take an arbitrary key and return a valid integer index\n between within the storage capacity of the hash table.\n \"\"\"\n return self.djb2(key) % self.capacity\n\n def put(self, key, value):\n \"\"\"\n Store the value with the given key.\n\n Hash collisions should be handled with Linked List Chaining.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n if current.key == key:\n current.value = value\n else:\n current.next = HashTableEntry(key, value)\n self.numberOfItems += 1\n else:\n self.storage[hi] = HashTableEntry(key, value)\n self.numberOfItems += 1\n self.calculateLoad()\n\n def delete(self, key):\n \"\"\"\n Remove the value stored with the given key.\n\n Print a warning if the key is not found.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n current = self.storage[hi]\n prev = self.storage[hi]\n while current and current.key != key:\n prev = current\n current = current.next\n if current and current.key == key:\n if current == self.storage[hi]:\n self.storage[hi] = current.next\n else:\n prev.next = current.next\n self.numberOfItems -= 1\n else:\n print('WARNING: no key')\n self.calculateLoad()\n\n def get(self, key):\n \"\"\"\n Retrieve the value stored with the given key.\n\n Returns None if the key is not found.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n if self.storage[hi].next:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n return current.value\n else:\n return self.storage[hi].value\n return None\n\n def resize(self, factor=2):\n \"\"\"\n Doubles the capacity of the hash table and\n rehash all key/value pairs.\n\n Implement this.\n \"\"\"\n self.capacity = round(self.capacity * factor)\n newarr = [None] * self.capacity\n for i, v in enumerate(self.storage):\n while v:\n hi = self.hash_index(v.key)\n if newarr[hi]:\n current = newarr[hi]\n while current.next:\n current = current.next\n current.next = HashTableEntry(v.key, v.value)\n else:\n newarr[hi] = HashTableEntry(v.key, v.value)\n v = v.next\n self.storage = newarr\n\n def calculateLoad(self):\n load = self.numberOfItems / len(self.storage)\n if load > 0.7:\n self.resize(2)\n elif load < 0.2:\n self.resize(0.5)\n pass\n\n\nif __name__ == '__main__':\n ht = HashTable(2)\n ht.put('line_1', '111')\n ht.put('line_2', '222')\n ht.put('line_3', '333')\n ht.put('line_4', 'sss')\n ht.put('line_5', 'ddd')\n ht.put('line_6', 'ggg')\n ht.put('line_7', 'hhh')\n ht.put('line_12', 'jjj')\n print('')\n old_capacity = len(ht.storage)\n ht.resize()\n new_capacity = len(ht.storage)\n print(f'\\nResized from {old_capacity} to {new_capacity}.\\n')\n print('')\n for i, v in enumerate(ht.storage):\n while v:\n print(i, v.value)\n v = v.next\n print('')\n ht.delete('line_3')\n print('')\n for i, v in enumerate(ht.storage):\n while v:\n print(i, v.value)\n v = v.next\n print('')\n print('')\n",
"step-5": "class HashTableEntry:\n \"\"\"\n Hash Table entry, as a linked list node.\n \"\"\"\n\n def __init__(self, key, value):\n self.key = key\n self.value = value\n self.next = None\n\n\nclass HashTable:\n \"\"\"\n A hash table that with `capacity` buckets\n that accepts string keys\n\n Implement this.\n \"\"\"\n\n def __init__(self, capacity):\n self.capacity = capacity\n self.storage = [None] * capacity\n self.numberOfItems = 0\n\n def fnv1(self, key):\n \"\"\"\n FNV-1 64-bit hash function\n\n Implement this, and/or DJB2.\n \"\"\"\n # hash = 0xff\n hash = 0xcbf29ce484222325\n for n in key.encode():\n # print(n)\n hash = hash ^ n\n hash = hash * 0x100000001b3\n\n # print(hash)\n return hash\n\n def djb2(self, key):\n \"\"\"\n DJB2 32-bit hash function\n\n Implement this, and/or FNV-1.\n \"\"\"\n\n hash = 5381\n for n in key.encode():\n # hash = ((hash << 5) + hash) + n\n hash = hash * 33 + n\n\n return hash\n # return hash & 0xFFFFFFFF\n\n def hash_index(self, key):\n \"\"\"\n Take an arbitrary key and return a valid integer index\n between within the storage capacity of the hash table.\n \"\"\"\n # return self.fnv1(key) % self.capacity\n return self.djb2(key) % self.capacity\n\n def put(self, key, value):\n \"\"\"\n Store the value with the given key.\n\n Hash collisions should be handled with Linked List Chaining.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if self.storage[hi]:\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n\n if current.key == key:\n current.value = value\n else:\n current.next = HashTableEntry(key, value)\n self.numberOfItems += 1\n else:\n self.storage[hi] = HashTableEntry(key, value)\n self.numberOfItems += 1\n\n self.calculateLoad()\n\n def delete(self, key):\n \"\"\"\n Remove the value stored with the given key.\n\n Print a warning if the key is not found.\n\n Implement this.\n \"\"\"\n\n hi = self.hash_index(key)\n\n # if that hi is empty ignore\n # if self.storage[hi] is None:\n # print(\"WARNING: no key\")\n # return\n\n current = self.storage[hi]\n prev = self.storage[hi]\n while current and current.key != key:\n prev = current\n current = current.next\n\n if (current and current.key == key):\n # if its the first link in the list\n if (current == self.storage[hi]):\n self.storage[hi] = current.next\n else:\n prev.next = current.next\n\n self.numberOfItems -= 1\n else:\n print(\"WARNING: no key\")\n\n self.calculateLoad()\n\n def get(self, key):\n \"\"\"\n Retrieve the value stored with the given key.\n\n Returns None if the key is not found.\n\n Implement this.\n \"\"\"\n hi = self.hash_index(key)\n if (self.storage[hi]):\n if(self.storage[hi].next):\n current = self.storage[hi]\n while current.next and current.key != key:\n current = current.next\n return current.value\n else:\n return self.storage[hi].value\n\n return None\n\n def resize(self, factor=2):\n \"\"\"\n Doubles the capacity of the hash table and\n rehash all key/value pairs.\n\n Implement this.\n \"\"\"\n self.capacity = round(self.capacity*factor)\n newarr = [None] * self.capacity\n\n for i, v in enumerate(self.storage):\n while v:\n hi = self.hash_index(v.key)\n if newarr[hi]:\n current = newarr[hi]\n while current.next:\n current = current.next\n\n current.next = HashTableEntry(v.key, v.value)\n else:\n newarr[hi] = HashTableEntry(v.key, v.value)\n\n v = v.next\n\n self.storage = newarr\n\n # Solution 2 - Much cleaner\n # newHashTable = HashTable(round(self.capacity*factor))\n # for i, v in enumerate(self.storage):\n # while v:\n # newHashTable.put(v.key, v.value)\n # v = v.next\n\n # self.capacity = newHashTable.capacity\n # self.storage = newHashTable.storage\n\n def calculateLoad(self):\n load = self.numberOfItems/len(self.storage)\n\n # print(\"Items:\\t\", ht.numberOfItems)\n # print(\"Storage:\", len(ht.storage))\n # print(\"LOAD:\\t\", load)\n\n # comment code bellow to pass tests\n if load > 0.7:\n self.resize(2)\n elif load < 0.2:\n self.resize(0.5)\n\n pass\n\n\nif __name__ == \"__main__\":\n ht = HashTable(2)\n\n ht.put(\"line_1\", \"111\")\n ht.put(\"line_2\", \"222\")\n ht.put(\"line_3\", \"333\")\n ht.put(\"line_4\", \"sss\")\n ht.put(\"line_5\", \"ddd\")\n ht.put(\"line_6\", \"ggg\")\n ht.put(\"line_7\", \"hhh\")\n ht.put(\"line_12\", \"jjj\")\n\n print(\"\")\n\n # Test storing beyond capacity\n # print(ht.get(\"line_1\"))\n # print(ht.get(\"line_2\"))\n # print(ht.get(\"line_3\"))\n # print(ht.get(\"line_4\"))\n # print(ht.get(\"line_5\"))\n # print(ht.get(\"line_6\"))\n # print(ht.get(\"line_7\"))\n\n # Test resizing\n old_capacity = len(ht.storage)\n ht.resize()\n new_capacity = len(ht.storage)\n\n print(f\"\\nResized from {old_capacity} to {new_capacity}.\\n\")\n\n # print(\"1: \", ht.storage[1].value)\n # print(\"1: \", ht.storage[1].next.value)\n\n # print(\"3: \", ht.storage[3].value)\n # print(\"3: \", ht.storage[3].next.value)\n # print(\"3: \", ht.storage[3].next.next.value)\n\n print(\"\")\n for i, v in enumerate(ht.storage):\n while v:\n print(i, v.value)\n v = v.next\n print(\"\")\n ht.delete(\"line_3\")\n print(\"\")\n for i, v in enumerate(ht.storage):\n while v:\n print(i, v.value)\n v = v.next\n print(\"\")\n\n # Test if data intact after resizing\n # print(ht.get(\"line_1\"))\n # print(ht.get(\"line_2\"))\n # print(ht.get(\"line_3\"))\n # print(ht.get(\"line_4\"))\n # print(ht.get(\"line_5\"))\n # print(ht.get(\"line_6\"))\n # print(ht.get(\"line_7\"))\n\n print(\"\")\n",
"step-ids": [
6,
11,
13,
15,
16
]
}
|
[
6,
11,
13,
15,
16
] |
import sys
import array
import random
import math
import gameduino2.prep
import zlib
import struct
import gameduino as GD
from eve import align4
from PIL import Image
import numpy as np
import wave
import common
GLOWR = (128, 256)
GLOWR = (160, 400)
sys.path.append("/home/jamesb/git/gd2-asset/examples/nightstrike")
import night0
class Renderer(common.Branded):
def __init__(self, eve):
self.eve = eve
self.t = 0
def load(self):
eve = self.eve
eve.cc(open("/home/jamesb/git/gd2-asset/examples/nightstrike/night0.gd3", "rb").read())
def draw(self):
eve = self.eve
eve.VertexFormat(3)
eve.ClearColorRGB(0, 0, 100)
eve.Clear()
eve.Begin(GD.BITMAPS)
eve.BlendFunc(GD.SRC_ALPHA, 0)
night0.missile_a.draw(eve, 640, 360, 2, angle = self.t)
self.t += 1
|
normal
|
{
"blob_id": "2471daad5969da29a20417a099a3ecd92fa036b4",
"index": 6393,
"step-1": "<mask token>\n\n\nclass Renderer(common.Branded):\n\n def __init__(self, eve):\n self.eve = eve\n self.t = 0\n\n def load(self):\n eve = self.eve\n eve.cc(open(\n '/home/jamesb/git/gd2-asset/examples/nightstrike/night0.gd3',\n 'rb').read())\n\n def draw(self):\n eve = self.eve\n eve.VertexFormat(3)\n eve.ClearColorRGB(0, 0, 100)\n eve.Clear()\n eve.Begin(GD.BITMAPS)\n eve.BlendFunc(GD.SRC_ALPHA, 0)\n night0.missile_a.draw(eve, 640, 360, 2, angle=self.t)\n self.t += 1\n",
"step-2": "<mask token>\nsys.path.append('/home/jamesb/git/gd2-asset/examples/nightstrike')\n<mask token>\n\n\nclass Renderer(common.Branded):\n\n def __init__(self, eve):\n self.eve = eve\n self.t = 0\n\n def load(self):\n eve = self.eve\n eve.cc(open(\n '/home/jamesb/git/gd2-asset/examples/nightstrike/night0.gd3',\n 'rb').read())\n\n def draw(self):\n eve = self.eve\n eve.VertexFormat(3)\n eve.ClearColorRGB(0, 0, 100)\n eve.Clear()\n eve.Begin(GD.BITMAPS)\n eve.BlendFunc(GD.SRC_ALPHA, 0)\n night0.missile_a.draw(eve, 640, 360, 2, angle=self.t)\n self.t += 1\n",
"step-3": "<mask token>\nGLOWR = 128, 256\nGLOWR = 160, 400\nsys.path.append('/home/jamesb/git/gd2-asset/examples/nightstrike')\n<mask token>\n\n\nclass Renderer(common.Branded):\n\n def __init__(self, eve):\n self.eve = eve\n self.t = 0\n\n def load(self):\n eve = self.eve\n eve.cc(open(\n '/home/jamesb/git/gd2-asset/examples/nightstrike/night0.gd3',\n 'rb').read())\n\n def draw(self):\n eve = self.eve\n eve.VertexFormat(3)\n eve.ClearColorRGB(0, 0, 100)\n eve.Clear()\n eve.Begin(GD.BITMAPS)\n eve.BlendFunc(GD.SRC_ALPHA, 0)\n night0.missile_a.draw(eve, 640, 360, 2, angle=self.t)\n self.t += 1\n",
"step-4": "import sys\nimport array\nimport random\nimport math\nimport gameduino2.prep\nimport zlib\nimport struct\nimport gameduino as GD\nfrom eve import align4\nfrom PIL import Image\nimport numpy as np\nimport wave\nimport common\nGLOWR = 128, 256\nGLOWR = 160, 400\nsys.path.append('/home/jamesb/git/gd2-asset/examples/nightstrike')\nimport night0\n\n\nclass Renderer(common.Branded):\n\n def __init__(self, eve):\n self.eve = eve\n self.t = 0\n\n def load(self):\n eve = self.eve\n eve.cc(open(\n '/home/jamesb/git/gd2-asset/examples/nightstrike/night0.gd3',\n 'rb').read())\n\n def draw(self):\n eve = self.eve\n eve.VertexFormat(3)\n eve.ClearColorRGB(0, 0, 100)\n eve.Clear()\n eve.Begin(GD.BITMAPS)\n eve.BlendFunc(GD.SRC_ALPHA, 0)\n night0.missile_a.draw(eve, 640, 360, 2, angle=self.t)\n self.t += 1\n",
"step-5": "import sys\nimport array\nimport random\nimport math\nimport gameduino2.prep\nimport zlib\nimport struct\nimport gameduino as GD\nfrom eve import align4\n\nfrom PIL import Image\nimport numpy as np\nimport wave\nimport common\n\nGLOWR = (128, 256)\nGLOWR = (160, 400)\n\nsys.path.append(\"/home/jamesb/git/gd2-asset/examples/nightstrike\")\nimport night0\n\nclass Renderer(common.Branded):\n def __init__(self, eve):\n self.eve = eve\n self.t = 0\n\n def load(self):\n eve = self.eve\n\n eve.cc(open(\"/home/jamesb/git/gd2-asset/examples/nightstrike/night0.gd3\", \"rb\").read())\n\n def draw(self):\n eve = self.eve\n\n eve.VertexFormat(3)\n eve.ClearColorRGB(0, 0, 100)\n eve.Clear()\n\n eve.Begin(GD.BITMAPS)\n eve.BlendFunc(GD.SRC_ALPHA, 0)\n\n night0.missile_a.draw(eve, 640, 360, 2, angle = self.t)\n self.t += 1\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from django.http import request
from restapp.ExcelSheet import *
'''ApiHomeDict={}
class LoadDict():
e = ExcelSheetAll()
ApiHomeDict = e.apiHomeDict()
print ApiHomeDict
class ReturnApi:
def returnDict(self):
return ApiHomeDict'''
'''if "ApiDictionary" in request.session:
print 'Dictioanry is already stored in session'
else:
e=ExcelSheetAll()
ApiHomeDict=e.apiHomeDict()
request.session['ApiDictionary'] = ApiHomeDict
#print ApiHomeDict
print request.session['ApiDictionary']'''
|
normal
|
{
"blob_id": "ff924b803a875d3f6201baa2c1251a6c5b8cde61",
"index": 5903,
"step-1": "<mask token>\n",
"step-2": "from django.http import request\nfrom restapp.ExcelSheet import *\n<mask token>\n",
"step-3": "from django.http import request\r\nfrom restapp.ExcelSheet import *\r\n\r\n\r\n'''ApiHomeDict={}\r\nclass LoadDict():\r\n e = ExcelSheetAll()\r\n ApiHomeDict = e.apiHomeDict()\r\n print ApiHomeDict\r\nclass ReturnApi:\r\n def returnDict(self):\r\n return ApiHomeDict'''\r\n'''if \"ApiDictionary\" in request.session:\r\n print 'Dictioanry is already stored in session'\r\nelse:\r\n e=ExcelSheetAll()\r\n ApiHomeDict=e.apiHomeDict()\r\n request.session['ApiDictionary'] = ApiHomeDict\r\n #print ApiHomeDict\r\nprint request.session['ApiDictionary']'''\r\n\r\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from .base import BaseLevel
from map_objects import DefinedMap
from entity.monster import Daemon
from entity.weapons import Axe
class FinalLevel(BaseLevel):
def __init__(self):
lvl_map = DefinedMap('levels/demon_lair.xp')
super().__init__(lvl_map.width, lvl_map.height)
self.map = lvl_map
self.set_entrance(50, 29)
boss = Daemon(8, 27, 10)
self.add_entity(boss)
def add_player(self, player):
super().add_player(player)
self.player.fov = 100
self.player.weapon = Axe()
|
normal
|
{
"blob_id": "7ba8f0bd962413f6ff825df27330447b11360f10",
"index": 6089,
"step-1": "<mask token>\n\n\nclass FinalLevel(BaseLevel):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass FinalLevel(BaseLevel):\n\n def __init__(self):\n lvl_map = DefinedMap('levels/demon_lair.xp')\n super().__init__(lvl_map.width, lvl_map.height)\n self.map = lvl_map\n self.set_entrance(50, 29)\n boss = Daemon(8, 27, 10)\n self.add_entity(boss)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass FinalLevel(BaseLevel):\n\n def __init__(self):\n lvl_map = DefinedMap('levels/demon_lair.xp')\n super().__init__(lvl_map.width, lvl_map.height)\n self.map = lvl_map\n self.set_entrance(50, 29)\n boss = Daemon(8, 27, 10)\n self.add_entity(boss)\n\n def add_player(self, player):\n super().add_player(player)\n self.player.fov = 100\n self.player.weapon = Axe()\n",
"step-4": "from .base import BaseLevel\nfrom map_objects import DefinedMap\nfrom entity.monster import Daemon\nfrom entity.weapons import Axe\n\n\nclass FinalLevel(BaseLevel):\n\n def __init__(self):\n lvl_map = DefinedMap('levels/demon_lair.xp')\n super().__init__(lvl_map.width, lvl_map.height)\n self.map = lvl_map\n self.set_entrance(50, 29)\n boss = Daemon(8, 27, 10)\n self.add_entity(boss)\n\n def add_player(self, player):\n super().add_player(player)\n self.player.fov = 100\n self.player.weapon = Axe()\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras.layers import Dense, Input
from keras.layers import Conv2D, Flatten, Lambda
from keras.layers import Reshape, Conv2DTranspose
from keras.models import Model
from keras.losses import mse, binary_crossentropy
from keras.utils import plot_model
from keras import backend as K
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
K.clear_session()
np.random.seed(237)
# reparameterization trick
# instead of sampling from Q(z|X), sample eps = N(0,I)
# then z = z_mean + sqrt(var)*eps
def sampling(args):
"""Reparameterization trick by sampling fr an isotropic unit Gaussian.
# Arguments
args (tensor): mean and log of variance of Q(z|X)
# Returns
z (tensor): sampled latent vector
"""
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
# by default, random_normal has mean=0 and std=1.0
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
def process_data(data_path):
data = np.load(data_path)
X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)
print('Shape train/test:', X_train.shape, X_test.shape)
image_size = X_train.shape[1], X_train.shape[2]
data = np.reshape(data, [-1, image_size[0], image_size[1], 1])
X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])
X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])
data = data.astype('float32') / 255
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255
return data, X_train, X_test, image_size
def construct_vae(image_size, kernel_size, latent_dim):
# network parameters
input_shape = (image_size[0], image_size[1], 1)
# VAE model = encoder + decoder
# build encoder model
inputs = Input(shape=input_shape, name='encoder_input')
x = inputs
x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)
x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
# shape info needed to build decoder model
shape = K.int_shape(x)
# generate latent vector Q(z|X)
x = Flatten()(x)
x = Dense(16, activation='relu')(x)
z_mean = Dense(latent_dim, name='z_mean')(x)
z_log_var = Dense(latent_dim, name='z_log_var')(x)
# use reparameterization trick to push the sampling out as input
# note that "output_shape" isn't necessary with the TensorFlow backend
z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean, z_log_var])
# instantiate encoder model
encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')
encoder.summary()
plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)
# build decoder model
latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)
x = Reshape((shape[1], shape[2], shape[3]))(x)
x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)
x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)
outputs = Conv2DTranspose(filters=1,
kernel_size=kernel_size,
activation='sigmoid',
padding='same',
name='decoder_output')(x)
# instantiate decoder model
decoder = Model(latent_inputs, outputs, name='decoder')
decoder.summary()
plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)
# instantiate VAE model
outputs = decoder(encoder(inputs)[2])
vae = Model(inputs, outputs, name='vae')
# VAE loss = mse_loss or xent_loss + kl_loss
reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(outputs))
reconstruction_loss *= image_size[0] * image_size[1]
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
vae_loss = K.mean(reconstruction_loss + kl_loss)
vae.add_loss(vae_loss)
vae.compile(optimizer='rmsprop')
vae.summary()
plot_model(vae, to_file='vae_cnn.png', show_shapes=True)
return vae, encoder, decoder
if __name__ == '__main__':
is_train = False
data_file = '../data/out/moment_frames_5.npy'
data, X_train, X_test, im_size = process_data(data_file)
kernel_size = (3, 3)
latent_dim = 128
batch_size = 128
epochs = 10
vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)
if is_train:
history = vae.fit(X_train,
epochs=epochs,
batch_size=batch_size,
validation_data=(X_test, None),
verbose=2)
vae.save_weights('vae_cnn.h5')
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('vae_train.jpeg')
plt.show()
else:
vae.load_weights('vae_cnn.h5')
# Transform to latent representation
encoded_data = encoder.predict(data, batch_size=batch_size)
pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)
print('Completed.')
|
normal
|
{
"blob_id": "88343b9c5cac3510e8cea75ac5b11f517ddc164b",
"index": 5943,
"step-1": "<mask token>\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\n<mask token>\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n data = np.load(data_path)\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n image_size = X_train.shape[1], X_train.shape[2]\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\n<mask token>\n",
"step-3": "<mask token>\nK.clear_session()\nnp.random.seed(237)\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n data = np.load(data_path)\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n image_size = X_train.shape[1], X_train.shape[2]\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\nif __name__ == '__main__':\n is_train = False\n data_file = '../data/out/moment_frames_5.npy'\n data, X_train, X_test, im_size = process_data(data_file)\n kernel_size = 3, 3\n latent_dim = 128\n batch_size = 128\n epochs = 10\n vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)\n if is_train:\n history = vae.fit(X_train, epochs=epochs, batch_size=batch_size,\n validation_data=(X_test, None), verbose=2)\n vae.save_weights('vae_cnn.h5')\n plt.plot(history.history['loss'])\n plt.plot(history.history['val_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.savefig('vae_train.jpeg')\n plt.show()\n else:\n vae.load_weights('vae_cnn.h5')\n encoded_data = encoder.predict(data, batch_size=batch_size)\n pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)\n print('Completed.')\n",
"step-4": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom keras.layers import Dense, Input\nfrom keras.layers import Conv2D, Flatten, Lambda\nfrom keras.layers import Reshape, Conv2DTranspose\nfrom keras.models import Model\nfrom keras.losses import mse, binary_crossentropy\nfrom keras.utils import plot_model\nfrom keras import backend as K\nfrom sklearn.model_selection import train_test_split\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nK.clear_session()\nnp.random.seed(237)\n\n\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n data = np.load(data_path)\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n image_size = X_train.shape[1], X_train.shape[2]\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n input_shape = image_size[0], image_size[1], 1\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu',\n strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu',\n strides=1, padding='same')(x)\n shape = K.int_shape(x)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean,\n z_log_var])\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation=\n 'relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation=\n 'relu', strides=1, padding='same')(x)\n outputs = Conv2DTranspose(filters=1, kernel_size=kernel_size,\n activation='sigmoid', padding='same', name='decoder_output')(x)\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(\n outputs))\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n return vae, encoder, decoder\n\n\nif __name__ == '__main__':\n is_train = False\n data_file = '../data/out/moment_frames_5.npy'\n data, X_train, X_test, im_size = process_data(data_file)\n kernel_size = 3, 3\n latent_dim = 128\n batch_size = 128\n epochs = 10\n vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)\n if is_train:\n history = vae.fit(X_train, epochs=epochs, batch_size=batch_size,\n validation_data=(X_test, None), verbose=2)\n vae.save_weights('vae_cnn.h5')\n plt.plot(history.history['loss'])\n plt.plot(history.history['val_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.savefig('vae_train.jpeg')\n plt.show()\n else:\n vae.load_weights('vae_cnn.h5')\n encoded_data = encoder.predict(data, batch_size=batch_size)\n pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)\n print('Completed.')\n",
"step-5": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom keras.layers import Dense, Input\nfrom keras.layers import Conv2D, Flatten, Lambda\nfrom keras.layers import Reshape, Conv2DTranspose\nfrom keras.models import Model\nfrom keras.losses import mse, binary_crossentropy\nfrom keras.utils import plot_model\nfrom keras import backend as K\n\nfrom sklearn.model_selection import train_test_split\n\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nK.clear_session()\n\nnp.random.seed(237)\n\n\n# reparameterization trick\n# instead of sampling from Q(z|X), sample eps = N(0,I)\n# then z = z_mean + sqrt(var)*eps\ndef sampling(args):\n \"\"\"Reparameterization trick by sampling fr an isotropic unit Gaussian.\n # Arguments\n args (tensor): mean and log of variance of Q(z|X)\n # Returns\n z (tensor): sampled latent vector\n \"\"\"\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon\n\n\ndef process_data(data_path):\n\n data = np.load(data_path)\n\n X_train, X_test = train_test_split(data, test_size=0.05, random_state=42)\n print('Shape train/test:', X_train.shape, X_test.shape)\n\n image_size = X_train.shape[1], X_train.shape[2]\n\n data = np.reshape(data, [-1, image_size[0], image_size[1], 1])\n X_train = np.reshape(X_train, [-1, image_size[0], image_size[1], 1])\n X_test = np.reshape(X_test, [-1, image_size[0], image_size[1], 1])\n\n data = data.astype('float32') / 255\n X_train = X_train.astype('float32') / 255\n X_test = X_test.astype('float32') / 255\n\n return data, X_train, X_test, image_size\n\n\ndef construct_vae(image_size, kernel_size, latent_dim):\n # network parameters\n input_shape = (image_size[0], image_size[1], 1)\n\n # VAE model = encoder + decoder\n # build encoder model\n inputs = Input(shape=input_shape, name='encoder_input')\n x = inputs\n x = Conv2D(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n x = Conv2D(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)\n x = Conv2D(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n\n # shape info needed to build decoder model\n shape = K.int_shape(x)\n\n # generate latent vector Q(z|X)\n x = Flatten()(x)\n x = Dense(16, activation='relu')(x)\n z_mean = Dense(latent_dim, name='z_mean')(x)\n z_log_var = Dense(latent_dim, name='z_log_var')(x)\n\n # use reparameterization trick to push the sampling out as input\n # note that \"output_shape\" isn't necessary with the TensorFlow backend\n z = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean, z_log_var])\n\n # instantiate encoder model\n encoder = Model(inputs, [z_mean, z_log_var, z], name='encoder')\n encoder.summary()\n plot_model(encoder, to_file='vae_cnn_encoder.png', show_shapes=True)\n\n # build decoder model\n latent_inputs = Input(shape=(latent_dim,), name='z_sampling')\n x = Dense(shape[1] * shape[2] * shape[3], activation='relu')(latent_inputs)\n x = Reshape((shape[1], shape[2], shape[3]))(x)\n\n x = Conv2DTranspose(filters=64, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n x = Conv2DTranspose(filters=32, kernel_size=kernel_size, activation='relu', strides=2, padding='same')(x)\n x = Conv2DTranspose(filters=16, kernel_size=kernel_size, activation='relu', strides=1, padding='same')(x)\n\n outputs = Conv2DTranspose(filters=1,\n kernel_size=kernel_size,\n activation='sigmoid',\n padding='same',\n name='decoder_output')(x)\n\n # instantiate decoder model\n decoder = Model(latent_inputs, outputs, name='decoder')\n decoder.summary()\n plot_model(decoder, to_file='vae_cnn_decoder.png', show_shapes=True)\n\n # instantiate VAE model\n outputs = decoder(encoder(inputs)[2])\n vae = Model(inputs, outputs, name='vae')\n\n # VAE loss = mse_loss or xent_loss + kl_loss\n reconstruction_loss = binary_crossentropy(K.flatten(inputs), K.flatten(outputs))\n\n reconstruction_loss *= image_size[0] * image_size[1]\n kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)\n kl_loss = K.sum(kl_loss, axis=-1)\n kl_loss *= -0.5\n vae_loss = K.mean(reconstruction_loss + kl_loss)\n vae.add_loss(vae_loss)\n vae.compile(optimizer='rmsprop')\n vae.summary()\n plot_model(vae, to_file='vae_cnn.png', show_shapes=True)\n\n return vae, encoder, decoder\n\n\nif __name__ == '__main__':\n\n is_train = False\n data_file = '../data/out/moment_frames_5.npy'\n data, X_train, X_test, im_size = process_data(data_file)\n\n kernel_size = (3, 3)\n latent_dim = 128\n batch_size = 128\n epochs = 10\n\n vae, encoder, decoder = construct_vae(im_size, kernel_size, latent_dim)\n\n if is_train:\n history = vae.fit(X_train,\n epochs=epochs,\n batch_size=batch_size,\n validation_data=(X_test, None),\n verbose=2)\n vae.save_weights('vae_cnn.h5')\n\n # summarize history for loss\n plt.plot(history.history['loss'])\n plt.plot(history.history['val_loss'])\n plt.title('model loss')\n plt.ylabel('loss')\n plt.xlabel('epoch')\n plt.legend(['train', 'test'], loc='upper left')\n plt.savefig('vae_train.jpeg')\n plt.show()\n\n else:\n vae.load_weights('vae_cnn.h5')\n\n # Transform to latent representation\n encoded_data = encoder.predict(data, batch_size=batch_size)\n\n pd.DataFrame(encoded_data[0]).to_csv('latest_rep_cnn.csv', index=None)\n\n print('Completed.')\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from typing import Set, Dict, Tuple
from flask import Flask, render_template, request
app = Flask(__name__)
app.config['SECRET_KEY'] = 'top_secret'
# Определение константных величин
RULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S',
('H', 'b'): 'SE',
('S', 'b'): 'SE',
('SE', 'a'): 'SE',
('SE', 'b'): 'SE'}
INITIAL_STATE: str = 'H'
FINAL_STATE: Set[str] = {'S', 'SE'}
def finite_automate(word: str) -> str:
"""Реализация конечного автомата для проверки символьных строк"""
state: str = INITIAL_STATE
for ind, char in enumerate(word):
yield f'{word[ind:]} --> {state}'
state = RULE.get((state, char))
if not state:
break
if state in FINAL_STATE:
yield 'Цепочка принадлежит языку'
else:
yield 'Цепочка не принадлежит языку'
@app.route('/', methods=['GET', 'POST'])
def index():
res = None
if request.method == 'POST':
res = finite_automate(request.form['word'])
return render_template('index.html', res=res)
|
normal
|
{
"blob_id": "86ea1c46383b5a8790eb187163107f4100395ef3",
"index": 8962,
"step-1": "<mask token>\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-2": "<mask token>\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S', ('H', 'b'): 'SE', ('S',\n 'b'): 'SE', ('SE', 'a'): 'SE', ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-3": "<mask token>\napp = Flask(__name__)\napp.config['SECRET_KEY'] = 'top_secret'\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S', ('H', 'b'): 'SE', ('S',\n 'b'): 'SE', ('SE', 'a'): 'SE', ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-4": "from typing import Set, Dict, Tuple\nfrom flask import Flask, render_template, request\napp = Flask(__name__)\napp.config['SECRET_KEY'] = 'top_secret'\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S', ('H', 'b'): 'SE', ('S',\n 'b'): 'SE', ('SE', 'a'): 'SE', ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) ->str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n",
"step-5": "from typing import Set, Dict, Tuple\nfrom flask import Flask, render_template, request\n\napp = Flask(__name__)\napp.config['SECRET_KEY'] = 'top_secret'\n\n# Определение константных величин\nRULE: Dict[Tuple[str, str], str] = {('H', 'a'): 'S',\n ('H', 'b'): 'SE',\n ('S', 'b'): 'SE',\n ('SE', 'a'): 'SE',\n ('SE', 'b'): 'SE'}\nINITIAL_STATE: str = 'H'\nFINAL_STATE: Set[str] = {'S', 'SE'}\n\n\ndef finite_automate(word: str) -> str:\n \"\"\"Реализация конечного автомата для проверки символьных строк\"\"\"\n state: str = INITIAL_STATE\n for ind, char in enumerate(word):\n yield f'{word[ind:]} --> {state}'\n state = RULE.get((state, char))\n if not state:\n break\n\n if state in FINAL_STATE:\n yield 'Цепочка принадлежит языку'\n else:\n yield 'Цепочка не принадлежит языку'\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n res = None\n if request.method == 'POST':\n res = finite_automate(request.form['word'])\n return render_template('index.html', res=res)\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import random
from . import WaiterInterface
class RandomIPv4Waiter(WaiterInterface):
"""
HostPortWaiter which generates random ipv4 adresses
"""
def __init__(self, options):
self.ports = options['ports']
self.limit_generate = options.get('limit_generate', -1)
def generator(self):
while self.limit_generate != 0:
randomIPv4 = generateRandomIPv4()
yield (randomIPv4, self.ports)
if self.limit_generate != -1:
self.limit_generate -= 1
def generateRandomIPv4():
"""
Helper method to generate a random ipv4 adress
"""
return ".".join(map(str, (random.randint(0, 255) for _ in range(4))))
|
normal
|
{
"blob_id": "bd3b1263d7d657fe2edd3c7198f63821a3d1d1e5",
"index": 319,
"step-1": "<mask token>\n\n\nclass RandomIPv4Waiter(WaiterInterface):\n <mask token>\n <mask token>\n\n def generator(self):\n while self.limit_generate != 0:\n randomIPv4 = generateRandomIPv4()\n yield randomIPv4, self.ports\n if self.limit_generate != -1:\n self.limit_generate -= 1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass RandomIPv4Waiter(WaiterInterface):\n \"\"\"\n HostPortWaiter which generates random ipv4 adresses\n \"\"\"\n\n def __init__(self, options):\n self.ports = options['ports']\n self.limit_generate = options.get('limit_generate', -1)\n\n def generator(self):\n while self.limit_generate != 0:\n randomIPv4 = generateRandomIPv4()\n yield randomIPv4, self.ports\n if self.limit_generate != -1:\n self.limit_generate -= 1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass RandomIPv4Waiter(WaiterInterface):\n \"\"\"\n HostPortWaiter which generates random ipv4 adresses\n \"\"\"\n\n def __init__(self, options):\n self.ports = options['ports']\n self.limit_generate = options.get('limit_generate', -1)\n\n def generator(self):\n while self.limit_generate != 0:\n randomIPv4 = generateRandomIPv4()\n yield randomIPv4, self.ports\n if self.limit_generate != -1:\n self.limit_generate -= 1\n\n\ndef generateRandomIPv4():\n \"\"\"\n Helper method to generate a random ipv4 adress\n \"\"\"\n return '.'.join(map(str, (random.randint(0, 255) for _ in range(4))))\n",
"step-4": "import random\nfrom . import WaiterInterface\n\n\nclass RandomIPv4Waiter(WaiterInterface):\n \"\"\"\n HostPortWaiter which generates random ipv4 adresses\n \"\"\"\n\n def __init__(self, options):\n self.ports = options['ports']\n self.limit_generate = options.get('limit_generate', -1)\n\n def generator(self):\n while self.limit_generate != 0:\n randomIPv4 = generateRandomIPv4()\n yield randomIPv4, self.ports\n if self.limit_generate != -1:\n self.limit_generate -= 1\n\n\ndef generateRandomIPv4():\n \"\"\"\n Helper method to generate a random ipv4 adress\n \"\"\"\n return '.'.join(map(str, (random.randint(0, 255) for _ in range(4))))\n",
"step-5": "import random\n\nfrom . import WaiterInterface\n\nclass RandomIPv4Waiter(WaiterInterface):\n \"\"\"\n HostPortWaiter which generates random ipv4 adresses\n \"\"\"\n def __init__(self, options):\n self.ports = options['ports']\n self.limit_generate = options.get('limit_generate', -1)\n\n def generator(self):\n while self.limit_generate != 0:\n randomIPv4 = generateRandomIPv4()\n yield (randomIPv4, self.ports)\n if self.limit_generate != -1:\n self.limit_generate -= 1\n\ndef generateRandomIPv4():\n \"\"\"\n Helper method to generate a random ipv4 adress\n \"\"\"\n return \".\".join(map(str, (random.randint(0, 255) for _ in range(4))))\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
import json
from bottle import request, response, route, get, run, default_app
app = application = default_app()
@route('/candidate/hired', method=['POST'])
def update_delete_handler():
response.content_type = 'application/json'
return json.dumps({"hired": True})
def main():
run(host='localhost', port=8080)
|
normal
|
{
"blob_id": "50e759ff24cdb8fbb5a98d9381afb13ebc1a74f1",
"index": 7317,
"step-1": "<mask token>\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-3": "<mask token>\napp = application = default_app()\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-4": "import json\nfrom bottle import request, response, route, get, run, default_app\napp = application = default_app()\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({'hired': True})\n\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-5": "import json\n\nfrom bottle import request, response, route, get, run, default_app\n\n\napp = application = default_app()\n\n\n@route('/candidate/hired', method=['POST'])\ndef update_delete_handler():\n response.content_type = 'application/json'\n return json.dumps({\"hired\": True})\n\ndef main():\n run(host='localhost', port=8080)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from getMerriamWebster import searchMerriamWebster
from searchWikipedia import searchWikipedia
from synonyms import searchSynonyms
class Scraping:
def __init__(self, clues, answers, gridIndex):
self.clues = clues
self.domains = {"across": {}, "down":{}}
self.answers = answers
self.gridIndex = gridIndex
def setDomains(self):
for down in self.clues["down"]:
self.domains["down"][down] = self.search(self.clues["down"][down])
for across in self.clues["across"]:
self.domains["across"][across] = self.search(self.clues["across"][across])
#======================== CHEAT =============================
#self.cheat()
def getClueList(self, clue):
clueList = [clue]
return clueList
def search(self, clue):
domain = set()
wiki_set = set()
synonym_set = set()
toSearch = clue
"""
print("Google search for:", toSearch)
try:
domain = domain + self.getGoogle(toSearch)
except:
print("An exception occurred")
"""
print("Wikipedia search for:", toSearch)
try:
wiki_set = wiki_set | self.getWiki(toSearch)
except:
print("An exception occurred")
print("Synonym search from Datamuse and Merriam-Webster for:", toSearch)
try:
synonym_set = synonym_set | self.getSynonyms(toSearch)
except:
print("An exception occurred")
"""
print("Merriam Webster search for:", toSearch)
try:
merriam_set = merriam_set | self.getMerriam(toSearch)
except:
print("An exception occurred")
"""
domain = domain.union(wiki_set, synonym_set)
return ' '.join(str(e) for e in domain) #''.join(str(e) for e in words)
def getGoogle(self, toSearch):
return "toSearch"
def getWiki(self, toSearch):
return searchWikipedia(toSearch)
def getMerriam(self,toSearch):
return searchMerriamWebster(toSearch)
def getSynonyms(self, toSearch):
return searchSynonyms(toSearch, self.clues["across"], self.clues["down"])
def cheat(self):
for across in self.clues["across"]:
for row in range(0,5):
for col in range(0,5):
if self.gridIndex[row][col] == across:
answer = ""
for colIn in range(0,5):
if self.answers[row][colIn] != "-":
answer = answer + self.answers[row][colIn]
self.domains["across"][across] = self.domains["across"][across] + " " + answer
#print(answer)
for down in self.clues["down"]:
for row in range(0,5):
for col in range(0,5):
if self.gridIndex[row][col] == down:
answer = ""
for rowIn in range(0,5):
if self.answers[rowIn][col] != "-":
answer = answer + self.answers[rowIn][col]
self.domains["down"][down] = self.domains["down"][down] + " " + answer
#print(answer)
"""
scraping = Scraping()
scraping.setDomains()
print(scraping.domains)
"""
|
normal
|
{
"blob_id": "138abb40fda0f19b4a74a294d5cd0dd326dc59ce",
"index": 7722,
"step-1": "<mask token>\n\n\nclass Scraping:\n\n def __init__(self, clues, answers, gridIndex):\n self.clues = clues\n self.domains = {'across': {}, 'down': {}}\n self.answers = answers\n self.gridIndex = gridIndex\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def getSynonyms(self, toSearch):\n return searchSynonyms(toSearch, self.clues['across'], self.clues[\n 'down'])\n\n def cheat(self):\n for across in self.clues['across']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == across:\n answer = ''\n for colIn in range(0, 5):\n if self.answers[row][colIn] != '-':\n answer = answer + self.answers[row][colIn]\n self.domains['across'][across] = self.domains['across'\n ][across] + ' ' + answer\n for down in self.clues['down']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == down:\n answer = ''\n for rowIn in range(0, 5):\n if self.answers[rowIn][col] != '-':\n answer = answer + self.answers[rowIn][col]\n self.domains['down'][down] = self.domains['down'][down\n ] + ' ' + answer\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Scraping:\n\n def __init__(self, clues, answers, gridIndex):\n self.clues = clues\n self.domains = {'across': {}, 'down': {}}\n self.answers = answers\n self.gridIndex = gridIndex\n <mask token>\n <mask token>\n\n def search(self, clue):\n domain = set()\n wiki_set = set()\n synonym_set = set()\n toSearch = clue\n \"\"\"\n print(\"Google search for:\", toSearch)\n try:\n domain = domain + self.getGoogle(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\"\n print('Wikipedia search for:', toSearch)\n try:\n wiki_set = wiki_set | self.getWiki(toSearch)\n except:\n print('An exception occurred')\n print('Synonym search from Datamuse and Merriam-Webster for:', toSearch\n )\n try:\n synonym_set = synonym_set | self.getSynonyms(toSearch)\n except:\n print('An exception occurred')\n \"\"\"\n print(\"Merriam Webster search for:\", toSearch)\n try:\n merriam_set = merriam_set | self.getMerriam(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\"\n domain = domain.union(wiki_set, synonym_set)\n return ' '.join(str(e) for e in domain)\n <mask token>\n\n def getWiki(self, toSearch):\n return searchWikipedia(toSearch)\n <mask token>\n\n def getSynonyms(self, toSearch):\n return searchSynonyms(toSearch, self.clues['across'], self.clues[\n 'down'])\n\n def cheat(self):\n for across in self.clues['across']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == across:\n answer = ''\n for colIn in range(0, 5):\n if self.answers[row][colIn] != '-':\n answer = answer + self.answers[row][colIn]\n self.domains['across'][across] = self.domains['across'\n ][across] + ' ' + answer\n for down in self.clues['down']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == down:\n answer = ''\n for rowIn in range(0, 5):\n if self.answers[rowIn][col] != '-':\n answer = answer + self.answers[rowIn][col]\n self.domains['down'][down] = self.domains['down'][down\n ] + ' ' + answer\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Scraping:\n\n def __init__(self, clues, answers, gridIndex):\n self.clues = clues\n self.domains = {'across': {}, 'down': {}}\n self.answers = answers\n self.gridIndex = gridIndex\n\n def setDomains(self):\n for down in self.clues['down']:\n self.domains['down'][down] = self.search(self.clues['down'][down])\n for across in self.clues['across']:\n self.domains['across'][across] = self.search(self.clues[\n 'across'][across])\n <mask token>\n\n def search(self, clue):\n domain = set()\n wiki_set = set()\n synonym_set = set()\n toSearch = clue\n \"\"\"\n print(\"Google search for:\", toSearch)\n try:\n domain = domain + self.getGoogle(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\"\n print('Wikipedia search for:', toSearch)\n try:\n wiki_set = wiki_set | self.getWiki(toSearch)\n except:\n print('An exception occurred')\n print('Synonym search from Datamuse and Merriam-Webster for:', toSearch\n )\n try:\n synonym_set = synonym_set | self.getSynonyms(toSearch)\n except:\n print('An exception occurred')\n \"\"\"\n print(\"Merriam Webster search for:\", toSearch)\n try:\n merriam_set = merriam_set | self.getMerriam(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\"\n domain = domain.union(wiki_set, synonym_set)\n return ' '.join(str(e) for e in domain)\n\n def getGoogle(self, toSearch):\n return 'toSearch'\n\n def getWiki(self, toSearch):\n return searchWikipedia(toSearch)\n\n def getMerriam(self, toSearch):\n return searchMerriamWebster(toSearch)\n\n def getSynonyms(self, toSearch):\n return searchSynonyms(toSearch, self.clues['across'], self.clues[\n 'down'])\n\n def cheat(self):\n for across in self.clues['across']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == across:\n answer = ''\n for colIn in range(0, 5):\n if self.answers[row][colIn] != '-':\n answer = answer + self.answers[row][colIn]\n self.domains['across'][across] = self.domains['across'\n ][across] + ' ' + answer\n for down in self.clues['down']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == down:\n answer = ''\n for rowIn in range(0, 5):\n if self.answers[rowIn][col] != '-':\n answer = answer + self.answers[rowIn][col]\n self.domains['down'][down] = self.domains['down'][down\n ] + ' ' + answer\n\n\n<mask token>\n",
"step-4": "from getMerriamWebster import searchMerriamWebster\nfrom searchWikipedia import searchWikipedia\nfrom synonyms import searchSynonyms\n\n\nclass Scraping:\n\n def __init__(self, clues, answers, gridIndex):\n self.clues = clues\n self.domains = {'across': {}, 'down': {}}\n self.answers = answers\n self.gridIndex = gridIndex\n\n def setDomains(self):\n for down in self.clues['down']:\n self.domains['down'][down] = self.search(self.clues['down'][down])\n for across in self.clues['across']:\n self.domains['across'][across] = self.search(self.clues[\n 'across'][across])\n\n def getClueList(self, clue):\n clueList = [clue]\n return clueList\n\n def search(self, clue):\n domain = set()\n wiki_set = set()\n synonym_set = set()\n toSearch = clue\n \"\"\"\n print(\"Google search for:\", toSearch)\n try:\n domain = domain + self.getGoogle(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\"\n print('Wikipedia search for:', toSearch)\n try:\n wiki_set = wiki_set | self.getWiki(toSearch)\n except:\n print('An exception occurred')\n print('Synonym search from Datamuse and Merriam-Webster for:', toSearch\n )\n try:\n synonym_set = synonym_set | self.getSynonyms(toSearch)\n except:\n print('An exception occurred')\n \"\"\"\n print(\"Merriam Webster search for:\", toSearch)\n try:\n merriam_set = merriam_set | self.getMerriam(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\"\n domain = domain.union(wiki_set, synonym_set)\n return ' '.join(str(e) for e in domain)\n\n def getGoogle(self, toSearch):\n return 'toSearch'\n\n def getWiki(self, toSearch):\n return searchWikipedia(toSearch)\n\n def getMerriam(self, toSearch):\n return searchMerriamWebster(toSearch)\n\n def getSynonyms(self, toSearch):\n return searchSynonyms(toSearch, self.clues['across'], self.clues[\n 'down'])\n\n def cheat(self):\n for across in self.clues['across']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == across:\n answer = ''\n for colIn in range(0, 5):\n if self.answers[row][colIn] != '-':\n answer = answer + self.answers[row][colIn]\n self.domains['across'][across] = self.domains['across'\n ][across] + ' ' + answer\n for down in self.clues['down']:\n for row in range(0, 5):\n for col in range(0, 5):\n if self.gridIndex[row][col] == down:\n answer = ''\n for rowIn in range(0, 5):\n if self.answers[rowIn][col] != '-':\n answer = answer + self.answers[rowIn][col]\n self.domains['down'][down] = self.domains['down'][down\n ] + ' ' + answer\n\n\n<mask token>\n",
"step-5": "from getMerriamWebster import searchMerriamWebster\nfrom searchWikipedia import searchWikipedia\nfrom synonyms import searchSynonyms\n\nclass Scraping:\n def __init__(self, clues, answers, gridIndex):\n self.clues = clues\n self.domains = {\"across\": {}, \"down\":{}}\n self.answers = answers\n self.gridIndex = gridIndex\n\n def setDomains(self):\n for down in self.clues[\"down\"]:\n self.domains[\"down\"][down] = self.search(self.clues[\"down\"][down])\n for across in self.clues[\"across\"]:\n self.domains[\"across\"][across] = self.search(self.clues[\"across\"][across])\n #======================== CHEAT =============================\n #self.cheat()\n\n def getClueList(self, clue):\n clueList = [clue]\n return clueList\n\n def search(self, clue):\n domain = set()\n wiki_set = set()\n synonym_set = set()\n toSearch = clue\n \"\"\"\n print(\"Google search for:\", toSearch)\n try:\n domain = domain + self.getGoogle(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\"\n print(\"Wikipedia search for:\", toSearch)\n try:\n\n wiki_set = wiki_set | self.getWiki(toSearch)\n except:\n print(\"An exception occurred\")\n \n print(\"Synonym search from Datamuse and Merriam-Webster for:\", toSearch)\n try:\n synonym_set = synonym_set | self.getSynonyms(toSearch)\n except:\n print(\"An exception occurred\")\n \n \"\"\"\n print(\"Merriam Webster search for:\", toSearch)\n try:\n merriam_set = merriam_set | self.getMerriam(toSearch)\n except:\n print(\"An exception occurred\")\n \"\"\" \n domain = domain.union(wiki_set, synonym_set)\n return ' '.join(str(e) for e in domain) #''.join(str(e) for e in words)\n\n def getGoogle(self, toSearch):\n\n return \"toSearch\"\n\n def getWiki(self, toSearch):\n return searchWikipedia(toSearch)\n\n def getMerriam(self,toSearch):\n return searchMerriamWebster(toSearch)\n\n def getSynonyms(self, toSearch):\n return searchSynonyms(toSearch, self.clues[\"across\"], self.clues[\"down\"])\n\n def cheat(self):\n for across in self.clues[\"across\"]:\n \n for row in range(0,5):\n for col in range(0,5):\n if self.gridIndex[row][col] == across:\n answer = \"\"\n for colIn in range(0,5):\n if self.answers[row][colIn] != \"-\":\n answer = answer + self.answers[row][colIn]\n self.domains[\"across\"][across] = self.domains[\"across\"][across] + \" \" + answer\n #print(answer)\n\n for down in self.clues[\"down\"]:\n \n for row in range(0,5):\n for col in range(0,5):\n if self.gridIndex[row][col] == down:\n answer = \"\"\n for rowIn in range(0,5):\n if self.answers[rowIn][col] != \"-\":\n answer = answer + self.answers[rowIn][col]\n self.domains[\"down\"][down] = self.domains[\"down\"][down] + \" \" + answer\n #print(answer)\n\n\n\"\"\"\nscraping = Scraping()\nscraping.setDomains()\nprint(scraping.domains)\n\"\"\"",
"step-ids": [
4,
6,
9,
11,
12
]
}
|
[
4,
6,
9,
11,
12
] |
S = input()
T = []
sen = ["dream", "dreamer", "erase", "eraser"]
s_len = len(S)
while len(T) <= s_len:
|
normal
|
{
"blob_id": "b874bb37fa59d9f1194c517bedbdbafae748786e",
"index": 5695,
"step-1": "S = input()\n\nT = []\nsen = [\"dream\", \"dreamer\", \"erase\", \"eraser\"]\ns_len = len(S)\n\n\nwhile len(T) <= s_len:\n ",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from random import randint
#funções
def leialetra():
'''
=>Função para validar letras.
parm=msg: Recebe dados to tipo string sendo Ss ou Nn.
return: String de valor S.
'''
while True:
try:
msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]
except KeyboardInterrupt:
print('O usuário desistiu de inserir um dado!')
break
except IndexError:
print('ERRO, nada digitado!')
else:
if msg not in 'SsNn' or msg in ' ':
print('ERRO, DADO INVALIDO!')
else:
if msg in 'Nn':
print('Volte sempre, Obrigado!')
break
else:
return 'S'
break
def facapergunta():
msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ','')
if msg.isnumeric():
return 'N'
else:
return 'L'
#programa principal
resposta = ["Sim", "Não", "Talvez", "Por que não?", "Vá", "Não sei", "Pode ser", "Talvez sim", "Talvez não", "Tenha Fé"]
while True:
aleatorio = randint(0, 9)
escolha = leialetra()
if escolha == 'S':
pergunta = facapergunta()
if pergunta == 'L':
print(resposta[aleatorio])
else:
break
|
normal
|
{
"blob_id": "1fff681363c4c91c47c2818681a3f2f125dd8c83",
"index": 2022,
"step-1": "<mask token>\n\n\ndef leialetra():\n \"\"\"\n =>Função para validar letras.\n parm=msg: Recebe dados to tipo string sendo Ss ou Nn.\n return: String de valor S.\n \"\"\"\n while True:\n try:\n msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]\n except KeyboardInterrupt:\n print('O usuário desistiu de inserir um dado!')\n break\n except IndexError:\n print('ERRO, nada digitado!')\n else:\n if msg not in 'SsNn' or msg in ' ':\n print('ERRO, DADO INVALIDO!')\n elif msg in 'Nn':\n print('Volte sempre, Obrigado!')\n break\n else:\n return 'S'\n break\n\n\ndef facapergunta():\n msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')\n if msg.isnumeric():\n return 'N'\n else:\n return 'L'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef leialetra():\n \"\"\"\n =>Função para validar letras.\n parm=msg: Recebe dados to tipo string sendo Ss ou Nn.\n return: String de valor S.\n \"\"\"\n while True:\n try:\n msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]\n except KeyboardInterrupt:\n print('O usuário desistiu de inserir um dado!')\n break\n except IndexError:\n print('ERRO, nada digitado!')\n else:\n if msg not in 'SsNn' or msg in ' ':\n print('ERRO, DADO INVALIDO!')\n elif msg in 'Nn':\n print('Volte sempre, Obrigado!')\n break\n else:\n return 'S'\n break\n\n\ndef facapergunta():\n msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')\n if msg.isnumeric():\n return 'N'\n else:\n return 'L'\n\n\n<mask token>\nwhile True:\n aleatorio = randint(0, 9)\n escolha = leialetra()\n if escolha == 'S':\n pergunta = facapergunta()\n if pergunta == 'L':\n print(resposta[aleatorio])\n else:\n break\n",
"step-3": "<mask token>\n\n\ndef leialetra():\n \"\"\"\n =>Função para validar letras.\n parm=msg: Recebe dados to tipo string sendo Ss ou Nn.\n return: String de valor S.\n \"\"\"\n while True:\n try:\n msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]\n except KeyboardInterrupt:\n print('O usuário desistiu de inserir um dado!')\n break\n except IndexError:\n print('ERRO, nada digitado!')\n else:\n if msg not in 'SsNn' or msg in ' ':\n print('ERRO, DADO INVALIDO!')\n elif msg in 'Nn':\n print('Volte sempre, Obrigado!')\n break\n else:\n return 'S'\n break\n\n\ndef facapergunta():\n msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')\n if msg.isnumeric():\n return 'N'\n else:\n return 'L'\n\n\nresposta = ['Sim', 'Não', 'Talvez', 'Por que não?', 'Vá', 'Não sei',\n 'Pode ser', 'Talvez sim', 'Talvez não', 'Tenha Fé']\nwhile True:\n aleatorio = randint(0, 9)\n escolha = leialetra()\n if escolha == 'S':\n pergunta = facapergunta()\n if pergunta == 'L':\n print(resposta[aleatorio])\n else:\n break\n",
"step-4": "from random import randint\n\n\ndef leialetra():\n \"\"\"\n =>Função para validar letras.\n parm=msg: Recebe dados to tipo string sendo Ss ou Nn.\n return: String de valor S.\n \"\"\"\n while True:\n try:\n msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]\n except KeyboardInterrupt:\n print('O usuário desistiu de inserir um dado!')\n break\n except IndexError:\n print('ERRO, nada digitado!')\n else:\n if msg not in 'SsNn' or msg in ' ':\n print('ERRO, DADO INVALIDO!')\n elif msg in 'Nn':\n print('Volte sempre, Obrigado!')\n break\n else:\n return 'S'\n break\n\n\ndef facapergunta():\n msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ', '')\n if msg.isnumeric():\n return 'N'\n else:\n return 'L'\n\n\nresposta = ['Sim', 'Não', 'Talvez', 'Por que não?', 'Vá', 'Não sei',\n 'Pode ser', 'Talvez sim', 'Talvez não', 'Tenha Fé']\nwhile True:\n aleatorio = randint(0, 9)\n escolha = leialetra()\n if escolha == 'S':\n pergunta = facapergunta()\n if pergunta == 'L':\n print(resposta[aleatorio])\n else:\n break\n",
"step-5": "from random import randint\n\n#funções\ndef leialetra():\n '''\n =>Função para validar letras.\n parm=msg: Recebe dados to tipo string sendo Ss ou Nn.\n return: String de valor S.\n '''\n while True:\n try:\n msg = str(input('Deseja fazer uma pergunta? [s/n] ')).upper()[0]\n except KeyboardInterrupt:\n print('O usuário desistiu de inserir um dado!')\n break\n except IndexError:\n print('ERRO, nada digitado!')\n else:\n if msg not in 'SsNn' or msg in ' ':\n print('ERRO, DADO INVALIDO!')\n else:\n if msg in 'Nn':\n print('Volte sempre, Obrigado!')\n break\n else:\n return 'S'\n break\n\ndef facapergunta():\n msg = str(input('Faça sua pergunta: ')).upper().strip().replace(' ','')\n if msg.isnumeric():\n return 'N'\n else:\n return 'L'\n\n\n#programa principal\nresposta = [\"Sim\", \"Não\", \"Talvez\", \"Por que não?\", \"Vá\", \"Não sei\", \"Pode ser\", \"Talvez sim\", \"Talvez não\", \"Tenha Fé\"]\n\nwhile True:\n aleatorio = randint(0, 9)\n escolha = leialetra()\n if escolha == 'S':\n pergunta = facapergunta()\n if pergunta == 'L':\n print(resposta[aleatorio])\n else:\n break\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from flask import Flask, render_template, request
from distance import get_distance
app = Flask(__name__)
@app.route('/hello')
@app.route('/hello/<name>')
def hello(name=None):
name = "World" if not name else name
return "Hello %s" % name
@app.route('/')
def index():
return render_template('index.html', title='home')
@app.route('/distance', methods=['POST', 'GET'])
def distance():
result = None
if request.method == 'POST':
location_a = request.form['location_a']
location_b = request.form['location_b']
result = get_distance(location_a, location_b)
return render_template('distance.html', title='Afstand', result=result)
if __name__ == '__main__':
app.run(debug=True)
|
normal
|
{
"blob_id": "05052e9ccbd076e71e9ec6148887ce7b82ed316d",
"index": 6256,
"step-1": "<mask token>\n\n\[email protected]('/hello')\[email protected]('/hello/<name>')\ndef hello(name=None):\n name = 'World' if not name else name\n return 'Hello %s' % name\n\n\[email protected]('/')\ndef index():\n return render_template('index.html', title='home')\n\n\[email protected]('/distance', methods=['POST', 'GET'])\ndef distance():\n result = None\n if request.method == 'POST':\n location_a = request.form['location_a']\n location_b = request.form['location_b']\n result = get_distance(location_a, location_b)\n return render_template('distance.html', title='Afstand', result=result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/hello')\[email protected]('/hello/<name>')\ndef hello(name=None):\n name = 'World' if not name else name\n return 'Hello %s' % name\n\n\[email protected]('/')\ndef index():\n return render_template('index.html', title='home')\n\n\[email protected]('/distance', methods=['POST', 'GET'])\ndef distance():\n result = None\n if request.method == 'POST':\n location_a = request.form['location_a']\n location_b = request.form['location_b']\n result = get_distance(location_a, location_b)\n return render_template('distance.html', title='Afstand', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\[email protected]('/hello')\[email protected]('/hello/<name>')\ndef hello(name=None):\n name = 'World' if not name else name\n return 'Hello %s' % name\n\n\[email protected]('/')\ndef index():\n return render_template('index.html', title='home')\n\n\[email protected]('/distance', methods=['POST', 'GET'])\ndef distance():\n result = None\n if request.method == 'POST':\n location_a = request.form['location_a']\n location_b = request.form['location_b']\n result = get_distance(location_a, location_b)\n return render_template('distance.html', title='Afstand', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask, render_template, request\nfrom distance import get_distance\napp = Flask(__name__)\n\n\[email protected]('/hello')\[email protected]('/hello/<name>')\ndef hello(name=None):\n name = 'World' if not name else name\n return 'Hello %s' % name\n\n\[email protected]('/')\ndef index():\n return render_template('index.html', title='home')\n\n\[email protected]('/distance', methods=['POST', 'GET'])\ndef distance():\n result = None\n if request.method == 'POST':\n location_a = request.form['location_a']\n location_b = request.form['location_b']\n result = get_distance(location_a, location_b)\n return render_template('distance.html', title='Afstand', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask, render_template, request\nfrom distance import get_distance\n\napp = Flask(__name__)\n\n\[email protected]('/hello')\[email protected]('/hello/<name>')\ndef hello(name=None):\n name = \"World\" if not name else name\n return \"Hello %s\" % name\n\n\[email protected]('/')\ndef index():\n return render_template('index.html', title='home')\n\n\[email protected]('/distance', methods=['POST', 'GET'])\ndef distance():\n result = None\n if request.method == 'POST':\n location_a = request.form['location_a']\n location_b = request.form['location_b']\n result = get_distance(location_a, location_b)\n\n return render_template('distance.html', title='Afstand', result=result)\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
'''
fibonacci(6) => [1, 1, 2, 3, 5, 8]
fibonacci(7) => [1, 1, 2, 3, 5, 8, 13]
'''
def fibonacci(n):
if n == 0:
return []
elif n == 1:
return [1]
elif n == 2:
return [1, 1]
else:
lista = fibonacci(n-1)
suma = lista[len(lista)-1] + lista[len(lista)-2]
lista.append(suma)
return lista
def main():
resultado = fibonacci(6)
print(resultado)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "03062ea08bd6ad88376f7c2aa2c89d2194ed8b2e",
"index": 1074,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef fibonacci(n):\n if n == 0:\n return []\n elif n == 1:\n return [1]\n elif n == 2:\n return [1, 1]\n else:\n lista = fibonacci(n - 1)\n suma = lista[len(lista) - 1] + lista[len(lista) - 2]\n lista.append(suma)\n return lista\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fibonacci(n):\n if n == 0:\n return []\n elif n == 1:\n return [1]\n elif n == 2:\n return [1, 1]\n else:\n lista = fibonacci(n - 1)\n suma = lista[len(lista) - 1] + lista[len(lista) - 2]\n lista.append(suma)\n return lista\n\n\ndef main():\n resultado = fibonacci(6)\n print(resultado)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef fibonacci(n):\n if n == 0:\n return []\n elif n == 1:\n return [1]\n elif n == 2:\n return [1, 1]\n else:\n lista = fibonacci(n - 1)\n suma = lista[len(lista) - 1] + lista[len(lista) - 2]\n lista.append(suma)\n return lista\n\n\ndef main():\n resultado = fibonacci(6)\n print(resultado)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "'''\nfibonacci(6) => [1, 1, 2, 3, 5, 8]\nfibonacci(7) => [1, 1, 2, 3, 5, 8, 13]\n'''\n\ndef fibonacci(n):\n if n == 0:\n return []\n elif n == 1:\n return [1]\n elif n == 2:\n return [1, 1]\n else:\n lista = fibonacci(n-1)\n suma = lista[len(lista)-1] + lista[len(lista)-2]\n lista.append(suma)\n return lista\n\ndef main():\n resultado = fibonacci(6)\n print(resultado)\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# This package includes different measures to evaluate topics
|
normal
|
{
"blob_id": "3dcca85c8003b57ad37734bbbe171ab8cef0f56c",
"index": 1894,
"step-1": "# This package includes different measures to evaluate topics\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
from sys import stdin
def get_time(d, sp, dists, i, d_old, sp_old):
if i == len(dists):
return 0
times = []
d_new = d[i]
sp_new = sp[i]
if d_new >= dists[i]:
res1 = get_time(d, sp, dists, i + 1, d_new - dists[i], sp_new)
if res1 is not None:
times.append(res1 + (dists[i] + 0.0) / sp_new)
if d_old >= dists[i]:
res1 = get_time(d, sp, dists, i + 1, d_old - dists[i], sp_old)
if res1 is not None:
times.append(res1 + (dists[i] + 0.0) / sp_old)
if len(times) == 0:
return None
else:
return min(times)
def get_answer():
parts = [int(el) for el in stdin.readline().strip().split()]
n = parts[0]
d = []
sp = []
for i in range(n):
ps = [int(el) for el in stdin.readline().strip().split()]
d.append(ps[0])
sp.append(ps[1])
dist = []
for i in range(n):
dist.append([int(el) for el in stdin.readline().strip().split()])
p = stdin.readline()
dists = []
for line in dist[:len(dist) - 1]:
for i in range(len(line)):
if line[i] != -1:
dists.append(line[i])
break
res = get_time(d, sp, dists, 0, 0, 0)
return res
def main():
t = int(stdin.readline().strip())
for i in range(t):
print "Case #{0}: {1}".format(i + 1, get_answer())
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "3b99cc0eb163f4a94bc47429ad3627a6ecad4818",
"index": 2774,
"step-1": "from sys import stdin\n\ndef get_time(d, sp, dists, i, d_old, sp_old):\n if i == len(dists):\n return 0\n times = []\n d_new = d[i]\n sp_new = sp[i]\n if d_new >= dists[i]:\n res1 = get_time(d, sp, dists, i + 1, d_new - dists[i], sp_new)\n if res1 is not None:\n times.append(res1 + (dists[i] + 0.0) / sp_new)\n if d_old >= dists[i]:\n res1 = get_time(d, sp, dists, i + 1, d_old - dists[i], sp_old)\n if res1 is not None:\n times.append(res1 + (dists[i] + 0.0) / sp_old)\n if len(times) == 0:\n return None\n else:\n return min(times)\n\ndef get_answer():\n parts = [int(el) for el in stdin.readline().strip().split()]\n n = parts[0]\n d = []\n sp = []\n for i in range(n):\n ps = [int(el) for el in stdin.readline().strip().split()]\n d.append(ps[0])\n sp.append(ps[1])\n dist = []\n for i in range(n):\n dist.append([int(el) for el in stdin.readline().strip().split()])\n p = stdin.readline()\n dists = []\n for line in dist[:len(dist) - 1]:\n for i in range(len(line)):\n if line[i] != -1:\n dists.append(line[i])\n break\n res = get_time(d, sp, dists, 0, 0, 0)\n return res\n\ndef main():\n t = int(stdin.readline().strip())\n for i in range(t):\n print \"Case #{0}: {1}\".format(i + 1, get_answer())\n\nif __name__ == \"__main__\":\n main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import re
def parse_rule(rule):
elem_regex = re.compile("(\d+) (.*) bags?.*")
rule = rule[:-1]
color, inside = tuple(rule.split(" bags contain"))
result = []
for element in inside.split(","):
match = elem_regex.search(element)
if match:
result.append((match.group(2), match.group(1)))
return color, result
def get_neighbours(graph, v):
return [color for color, _ in graph[v]]
def dfs_counting(graph, v):
return 1+sum(list(map(lambda vert: int(vert[1]) * dfs_counting(graph, vert[0]), graph[v])))
f = open('input.txt')
rules = f.readlines()
graph = {}
for rule in rules:
color, elements = parse_rule(rule)
graph[color] = elements
print(dfs_counting(graph, 'shiny gold')-1) #we are not counting the shiny gold one, so we substract 1
|
normal
|
{
"blob_id": "730aaa0404a0c776ce4d3a351f292f90768b6867",
"index": 7781,
"step-1": "<mask token>\n\n\ndef get_neighbours(graph, v):\n return [color for color, _ in graph[v]]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_rule(rule):\n elem_regex = re.compile('(\\\\d+) (.*) bags?.*')\n rule = rule[:-1]\n color, inside = tuple(rule.split(' bags contain'))\n result = []\n for element in inside.split(','):\n match = elem_regex.search(element)\n if match:\n result.append((match.group(2), match.group(1)))\n return color, result\n\n\ndef get_neighbours(graph, v):\n return [color for color, _ in graph[v]]\n\n\ndef dfs_counting(graph, v):\n return 1 + sum(list(map(lambda vert: int(vert[1]) * dfs_counting(graph,\n vert[0]), graph[v])))\n\n\n<mask token>\nfor rule in rules:\n color, elements = parse_rule(rule)\n graph[color] = elements\nprint(dfs_counting(graph, 'shiny gold') - 1)\n",
"step-3": "<mask token>\n\n\ndef parse_rule(rule):\n elem_regex = re.compile('(\\\\d+) (.*) bags?.*')\n rule = rule[:-1]\n color, inside = tuple(rule.split(' bags contain'))\n result = []\n for element in inside.split(','):\n match = elem_regex.search(element)\n if match:\n result.append((match.group(2), match.group(1)))\n return color, result\n\n\ndef get_neighbours(graph, v):\n return [color for color, _ in graph[v]]\n\n\ndef dfs_counting(graph, v):\n return 1 + sum(list(map(lambda vert: int(vert[1]) * dfs_counting(graph,\n vert[0]), graph[v])))\n\n\nf = open('input.txt')\nrules = f.readlines()\ngraph = {}\nfor rule in rules:\n color, elements = parse_rule(rule)\n graph[color] = elements\nprint(dfs_counting(graph, 'shiny gold') - 1)\n",
"step-4": "import re\n\n\ndef parse_rule(rule):\n elem_regex = re.compile('(\\\\d+) (.*) bags?.*')\n rule = rule[:-1]\n color, inside = tuple(rule.split(' bags contain'))\n result = []\n for element in inside.split(','):\n match = elem_regex.search(element)\n if match:\n result.append((match.group(2), match.group(1)))\n return color, result\n\n\ndef get_neighbours(graph, v):\n return [color for color, _ in graph[v]]\n\n\ndef dfs_counting(graph, v):\n return 1 + sum(list(map(lambda vert: int(vert[1]) * dfs_counting(graph,\n vert[0]), graph[v])))\n\n\nf = open('input.txt')\nrules = f.readlines()\ngraph = {}\nfor rule in rules:\n color, elements = parse_rule(rule)\n graph[color] = elements\nprint(dfs_counting(graph, 'shiny gold') - 1)\n",
"step-5": "import re\r\n\r\ndef parse_rule(rule):\r\n elem_regex = re.compile(\"(\\d+) (.*) bags?.*\")\r\n rule = rule[:-1]\r\n color, inside = tuple(rule.split(\" bags contain\"))\r\n result = []\r\n for element in inside.split(\",\"):\r\n match = elem_regex.search(element)\r\n if match:\r\n result.append((match.group(2), match.group(1)))\r\n return color, result\r\n\r\ndef get_neighbours(graph, v):\r\n return [color for color, _ in graph[v]]\r\n\r\ndef dfs_counting(graph, v):\r\n return 1+sum(list(map(lambda vert: int(vert[1]) * dfs_counting(graph, vert[0]), graph[v])))\r\n\r\n\r\nf = open('input.txt')\r\nrules = f.readlines()\r\n\r\ngraph = {}\r\nfor rule in rules:\r\n color, elements = parse_rule(rule)\r\n graph[color] = elements\r\n\r\nprint(dfs_counting(graph, 'shiny gold')-1) #we are not counting the shiny gold one, so we substract 1",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
#!/usr/bin/python2
import requests ,optparse
def get_link():
parser=optparse.OptionParser()
parser.add_option("-l","--link",dest="url",help="direct link of file to download .pdf")
(url,argument)=parser.parse_args()
return url
def download(url):
try:
get_request=requests.get(url)
name_url=url.split("/")[-1]
print(name_url)
with open(name_url,"wb") as file:
file.write(get_request.content)
except:
print("[-]Print Valid Link")
def start():
url_link=get_link()
try:
download(url_link.url)
except:
url_link=input("[+]Enter link:")
download(url_link)
start()
|
normal
|
{
"blob_id": "22ddae977afd2a1b0a729cf0d56783eaaca3b0a0",
"index": 9813,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_link():\n parser = optparse.OptionParser()\n parser.add_option('-l', '--link', dest='url', help=\n 'direct link of file to download .pdf')\n url, argument = parser.parse_args()\n return url\n\n\ndef download(url):\n try:\n get_request = requests.get(url)\n name_url = url.split('/')[-1]\n print(name_url)\n with open(name_url, 'wb') as file:\n file.write(get_request.content)\n except:\n print('[-]Print Valid Link')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_link():\n parser = optparse.OptionParser()\n parser.add_option('-l', '--link', dest='url', help=\n 'direct link of file to download .pdf')\n url, argument = parser.parse_args()\n return url\n\n\ndef download(url):\n try:\n get_request = requests.get(url)\n name_url = url.split('/')[-1]\n print(name_url)\n with open(name_url, 'wb') as file:\n file.write(get_request.content)\n except:\n print('[-]Print Valid Link')\n\n\ndef start():\n url_link = get_link()\n try:\n download(url_link.url)\n except:\n url_link = input('[+]Enter link:')\n download(url_link)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef get_link():\n parser = optparse.OptionParser()\n parser.add_option('-l', '--link', dest='url', help=\n 'direct link of file to download .pdf')\n url, argument = parser.parse_args()\n return url\n\n\ndef download(url):\n try:\n get_request = requests.get(url)\n name_url = url.split('/')[-1]\n print(name_url)\n with open(name_url, 'wb') as file:\n file.write(get_request.content)\n except:\n print('[-]Print Valid Link')\n\n\ndef start():\n url_link = get_link()\n try:\n download(url_link.url)\n except:\n url_link = input('[+]Enter link:')\n download(url_link)\n\n\nstart()\n",
"step-5": "#!/usr/bin/python2\n\nimport requests ,optparse\n\n\ndef get_link():\n parser=optparse.OptionParser()\n parser.add_option(\"-l\",\"--link\",dest=\"url\",help=\"direct link of file to download .pdf\")\n (url,argument)=parser.parse_args()\n return url\n\ndef download(url):\n try:\n get_request=requests.get(url)\n name_url=url.split(\"/\")[-1]\n print(name_url)\n with open(name_url,\"wb\") as file:\n file.write(get_request.content)\n except:\n print(\"[-]Print Valid Link\")\n \n \n\n\ndef start():\n url_link=get_link()\n try:\t\n download(url_link.url)\n except:\n url_link=input(\"[+]Enter link:\")\n download(url_link)\n\nstart()\n\n\n\n",
"step-ids": [
0,
2,
3,
4,
6
]
}
|
[
0,
2,
3,
4,
6
] |
from collections import OrderedDict
import tcod.event
from components import Entity, PaperDoll, Brain
from components.enums import Intention
from engine import GameScene
from scenes.list_menu_scene import MenuAction, ListMenuScene
from systems.utilities import set_intention, retract_intention
def run(scene: GameScene):
handle_show_equip_screen(scene)
def handle_show_equip_screen(scene: GameScene):
brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.SHOW_EQUIP_SCREEN]
for brain in brains:
entity = brain.entity
menu_actions = OrderedDict()
equip_action = MenuAction('e', 'equip', lambda slot: set_intention(scene, entity, slot, Intention.EQUIP_SLOT))
menu_actions[tcod.event.K_e] = equip_action
equipment_scene = ListMenuScene(
"Equipment",
get_slots_query(scene, entity),
row_builder=row_builder,
default_action=equip_action,
menu_actions=menu_actions,
id_extractor=lambda e: e[1],
parent_scene=scene
)
scene.controller.push_scene(equipment_scene)
retract_intention(scene, entity)
def get_slots_query(scene: GameScene, entity: int):
"""Return a query that resolves to entity's equipment slots and their equipped items."""
def query():
paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)
equipment = paper_doll.get_equipment()
return [
(k, scene.cm.get_one(Entity, v))
for k, v in equipment.items()
]
return query
def row_builder(entity):
part_name = entity[0]
equipped = entity[1].name if entity[1] else ''
return [part_name, equipped]
|
normal
|
{
"blob_id": "f1547e0893ce9c4661b546e49f3fc998745390d9",
"index": 4397,
"step-1": "<mask token>\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n",
"step-2": "<mask token>\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.\n SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(\n scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene('Equipment', get_slots_query(scene,\n entity), row_builder=row_builder, default_action=equip_action,\n menu_actions=menu_actions, id_extractor=lambda e: e[1],\n parent_scene=scene)\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n",
"step-3": "<mask token>\n\n\ndef run(scene: GameScene):\n handle_show_equip_screen(scene)\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.\n SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(\n scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene('Equipment', get_slots_query(scene,\n entity), row_builder=row_builder, default_action=equip_action,\n menu_actions=menu_actions, id_extractor=lambda e: e[1],\n parent_scene=scene)\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n",
"step-4": "from collections import OrderedDict\nimport tcod.event\nfrom components import Entity, PaperDoll, Brain\nfrom components.enums import Intention\nfrom engine import GameScene\nfrom scenes.list_menu_scene import MenuAction, ListMenuScene\nfrom systems.utilities import set_intention, retract_intention\n\n\ndef run(scene: GameScene):\n handle_show_equip_screen(scene)\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.\n SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(\n scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene('Equipment', get_slots_query(scene,\n entity), row_builder=row_builder, default_action=equip_action,\n menu_actions=menu_actions, id_extractor=lambda e: e[1],\n parent_scene=scene)\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n return [(k, scene.cm.get_one(Entity, v)) for k, v in equipment.items()]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n",
"step-5": "\n\nfrom collections import OrderedDict\n\nimport tcod.event\n\nfrom components import Entity, PaperDoll, Brain\nfrom components.enums import Intention\nfrom engine import GameScene\nfrom scenes.list_menu_scene import MenuAction, ListMenuScene\nfrom systems.utilities import set_intention, retract_intention\n\n\ndef run(scene: GameScene):\n handle_show_equip_screen(scene)\n\n\ndef handle_show_equip_screen(scene: GameScene):\n brains = [b for b in scene.cm.get(Brain) if b.intention is Intention.SHOW_EQUIP_SCREEN]\n for brain in brains:\n entity = brain.entity\n menu_actions = OrderedDict()\n equip_action = MenuAction('e', 'equip', lambda slot: set_intention(scene, entity, slot, Intention.EQUIP_SLOT))\n menu_actions[tcod.event.K_e] = equip_action\n equipment_scene = ListMenuScene(\n \"Equipment\",\n get_slots_query(scene, entity),\n row_builder=row_builder,\n default_action=equip_action,\n menu_actions=menu_actions,\n id_extractor=lambda e: e[1],\n parent_scene=scene\n )\n scene.controller.push_scene(equipment_scene)\n retract_intention(scene, entity)\n\n\ndef get_slots_query(scene: GameScene, entity: int):\n \"\"\"Return a query that resolves to entity's equipment slots and their equipped items.\"\"\"\n\n def query():\n\n paper_doll: PaperDoll = scene.cm.get_one(PaperDoll, entity)\n equipment = paper_doll.get_equipment()\n\n return [\n (k, scene.cm.get_one(Entity, v))\n for k, v in equipment.items()\n ]\n return query\n\n\ndef row_builder(entity):\n part_name = entity[0]\n equipped = entity[1].name if entity[1] else ''\n return [part_name, equipped]\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 25 13:34:46 2017
@author: Sven Geboers
"""
from math import pi,e
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
def LevelToIntensity(NoiseLevelIndB):
I0 = 10.**(-12) #This is the treshold hearing intensity, matching 0 dB
NoiseLevel = float(NoiseLevelIndB)
Intensity = I0*10**(NoiseLevel/10)
return Intensity
def IntensityToLevel(Intensity):
I0 = 10.**(-12) #This is the treshold hearing intensity, matching 0 dB
Intensity = Intensity
NoiseLevelIndB = 10*np.log10(Intensity/I0)
return NoiseLevelIndB
#Definine the mathematical function coth(x)
coth = lambda x: (e**(x)-e**(-x))/(e**(x)-e**(-x)) #np.cosh(x)/np.sinh(x)
#Closes all previous plots so that we don't have to click them away manually
plt.close('all')
#Defining some constants:
SLHighway10 = 53.5 #dB, this is the sound level of a highway at 10 m distance
d1 = 10. #m, distance between the highway and the sound barrier
#Creating data mesh
b = np.arange(0.1, 150, 0.5)
d = np.arange(0.1, 150, 0.5)
b, d = np.meshgrid(b, d)
#Calculating maximum velocity and individual sound power
Vmax = 9.25 #m/s
IntensityTurbine40cm = lambda V: 4*10**(-6)*e**(0.2216*V)
IntensityIndividualTurbine = IntensityTurbine40cm(Vmax)
PowerIndividual = IntensityIndividualTurbine*pi*0.16 * 4
SoundPowerHighway = LevelToIntensity(SLHighway10)*pi*d1**2 * 4
#Calculating intensity and sound level
Intensity = PowerIndividual/(4*b*d)*coth(d/b*pi)+SoundPowerHighway/(4*pi*(d+d1)**2)
SL = IntensityToLevel(Intensity)
#Plots contour curve
levels = [41.,47.] #Contour levels that will be shown
fig = plt.figure()
CS = plt.contourf(d, b, SL, levels,cmap=cm.Greys)
cbar=plt.colorbar()
cbar.set_label('Sound level in dB', rotation=270)
plt.xlabel('Distance (m)')
plt.ylabel('Spacing (m)')
plt.title('Sound level in function of distance and spacing \n with a velocity of 9.25 m/s for WM6',fontweight='bold')
plt.minorticks_on()
plt.grid(b=True, which='major',linewidth=2)
plt.grid(b=True, which='minor')
plt.show()
|
normal
|
{
"blob_id": "68a1d5a77abd19aece04bd560df121ceddccea42",
"index": 3179,
"step-1": "<mask token>\n\n\ndef LevelToIntensity(NoiseLevelIndB):\n I0 = 10.0 ** -12\n NoiseLevel = float(NoiseLevelIndB)\n Intensity = I0 * 10 ** (NoiseLevel / 10)\n return Intensity\n\n\ndef IntensityToLevel(Intensity):\n I0 = 10.0 ** -12\n Intensity = Intensity\n NoiseLevelIndB = 10 * np.log10(Intensity / I0)\n return NoiseLevelIndB\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef LevelToIntensity(NoiseLevelIndB):\n I0 = 10.0 ** -12\n NoiseLevel = float(NoiseLevelIndB)\n Intensity = I0 * 10 ** (NoiseLevel / 10)\n return Intensity\n\n\ndef IntensityToLevel(Intensity):\n I0 = 10.0 ** -12\n Intensity = Intensity\n NoiseLevelIndB = 10 * np.log10(Intensity / I0)\n return NoiseLevelIndB\n\n\n<mask token>\nplt.close('all')\n<mask token>\ncbar.set_label('Sound level in dB', rotation=270)\nplt.xlabel('Distance (m)')\nplt.ylabel('Spacing (m)')\nplt.title(\n \"\"\"Sound level in function of distance and spacing \n with a velocity of 9.25 m/s for WM6\"\"\"\n , fontweight='bold')\nplt.minorticks_on()\nplt.grid(b=True, which='major', linewidth=2)\nplt.grid(b=True, which='minor')\nplt.show()\n",
"step-3": "<mask token>\n\n\ndef LevelToIntensity(NoiseLevelIndB):\n I0 = 10.0 ** -12\n NoiseLevel = float(NoiseLevelIndB)\n Intensity = I0 * 10 ** (NoiseLevel / 10)\n return Intensity\n\n\ndef IntensityToLevel(Intensity):\n I0 = 10.0 ** -12\n Intensity = Intensity\n NoiseLevelIndB = 10 * np.log10(Intensity / I0)\n return NoiseLevelIndB\n\n\ncoth = lambda x: (e ** x - e ** -x) / (e ** x - e ** -x)\nplt.close('all')\nSLHighway10 = 53.5\nd1 = 10.0\nb = np.arange(0.1, 150, 0.5)\nd = np.arange(0.1, 150, 0.5)\nb, d = np.meshgrid(b, d)\nVmax = 9.25\nIntensityTurbine40cm = lambda V: 4 * 10 ** -6 * e ** (0.2216 * V)\nIntensityIndividualTurbine = IntensityTurbine40cm(Vmax)\nPowerIndividual = IntensityIndividualTurbine * pi * 0.16 * 4\nSoundPowerHighway = LevelToIntensity(SLHighway10) * pi * d1 ** 2 * 4\nIntensity = PowerIndividual / (4 * b * d) * coth(d / b * pi\n ) + SoundPowerHighway / (4 * pi * (d + d1) ** 2)\nSL = IntensityToLevel(Intensity)\nlevels = [41.0, 47.0]\nfig = plt.figure()\nCS = plt.contourf(d, b, SL, levels, cmap=cm.Greys)\ncbar = plt.colorbar()\ncbar.set_label('Sound level in dB', rotation=270)\nplt.xlabel('Distance (m)')\nplt.ylabel('Spacing (m)')\nplt.title(\n \"\"\"Sound level in function of distance and spacing \n with a velocity of 9.25 m/s for WM6\"\"\"\n , fontweight='bold')\nplt.minorticks_on()\nplt.grid(b=True, which='major', linewidth=2)\nplt.grid(b=True, which='minor')\nplt.show()\n",
"step-4": "<mask token>\nfrom math import pi, e\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\n\n\ndef LevelToIntensity(NoiseLevelIndB):\n I0 = 10.0 ** -12\n NoiseLevel = float(NoiseLevelIndB)\n Intensity = I0 * 10 ** (NoiseLevel / 10)\n return Intensity\n\n\ndef IntensityToLevel(Intensity):\n I0 = 10.0 ** -12\n Intensity = Intensity\n NoiseLevelIndB = 10 * np.log10(Intensity / I0)\n return NoiseLevelIndB\n\n\ncoth = lambda x: (e ** x - e ** -x) / (e ** x - e ** -x)\nplt.close('all')\nSLHighway10 = 53.5\nd1 = 10.0\nb = np.arange(0.1, 150, 0.5)\nd = np.arange(0.1, 150, 0.5)\nb, d = np.meshgrid(b, d)\nVmax = 9.25\nIntensityTurbine40cm = lambda V: 4 * 10 ** -6 * e ** (0.2216 * V)\nIntensityIndividualTurbine = IntensityTurbine40cm(Vmax)\nPowerIndividual = IntensityIndividualTurbine * pi * 0.16 * 4\nSoundPowerHighway = LevelToIntensity(SLHighway10) * pi * d1 ** 2 * 4\nIntensity = PowerIndividual / (4 * b * d) * coth(d / b * pi\n ) + SoundPowerHighway / (4 * pi * (d + d1) ** 2)\nSL = IntensityToLevel(Intensity)\nlevels = [41.0, 47.0]\nfig = plt.figure()\nCS = plt.contourf(d, b, SL, levels, cmap=cm.Greys)\ncbar = plt.colorbar()\ncbar.set_label('Sound level in dB', rotation=270)\nplt.xlabel('Distance (m)')\nplt.ylabel('Spacing (m)')\nplt.title(\n \"\"\"Sound level in function of distance and spacing \n with a velocity of 9.25 m/s for WM6\"\"\"\n , fontweight='bold')\nplt.minorticks_on()\nplt.grid(b=True, which='major', linewidth=2)\nplt.grid(b=True, which='minor')\nplt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue Apr 25 13:34:46 2017\r\n\r\n@author: Sven Geboers\r\n\"\"\"\r\n\r\nfrom math import pi,e\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom matplotlib import cm\r\n\r\ndef LevelToIntensity(NoiseLevelIndB):\r\n I0 = 10.**(-12) #This is the treshold hearing intensity, matching 0 dB\r\n NoiseLevel = float(NoiseLevelIndB)\r\n Intensity = I0*10**(NoiseLevel/10)\r\n return Intensity\r\n \r\ndef IntensityToLevel(Intensity):\r\n I0 = 10.**(-12) #This is the treshold hearing intensity, matching 0 dB\r\n Intensity = Intensity\r\n NoiseLevelIndB = 10*np.log10(Intensity/I0)\r\n return NoiseLevelIndB\r\n \r\n#Definine the mathematical function coth(x)\r\ncoth = lambda x: (e**(x)-e**(-x))/(e**(x)-e**(-x)) #np.cosh(x)/np.sinh(x)\r\n\r\n#Closes all previous plots so that we don't have to click them away manually\r\nplt.close('all')\r\n\r\n#Defining some constants:\r\nSLHighway10 = 53.5 #dB, this is the sound level of a highway at 10 m distance\r\nd1 = 10. #m, distance between the highway and the sound barrier\r\n\r\n#Creating data mesh \r\nb = np.arange(0.1, 150, 0.5)\r\nd = np.arange(0.1, 150, 0.5)\r\nb, d = np.meshgrid(b, d)\r\n\r\n#Calculating maximum velocity and individual sound power\r\nVmax = 9.25 #m/s\r\nIntensityTurbine40cm = lambda V: 4*10**(-6)*e**(0.2216*V)\r\nIntensityIndividualTurbine = IntensityTurbine40cm(Vmax)\r\nPowerIndividual = IntensityIndividualTurbine*pi*0.16 * 4\r\nSoundPowerHighway = LevelToIntensity(SLHighway10)*pi*d1**2 * 4\r\n\r\n#Calculating intensity and sound level\r\nIntensity = PowerIndividual/(4*b*d)*coth(d/b*pi)+SoundPowerHighway/(4*pi*(d+d1)**2)\r\nSL = IntensityToLevel(Intensity)\r\n\r\n#Plots contour curve \r\nlevels = [41.,47.] #Contour levels that will be shown\r\nfig = plt.figure()\r\nCS = plt.contourf(d, b, SL, levels,cmap=cm.Greys)\r\ncbar=plt.colorbar()\r\ncbar.set_label('Sound level in dB', rotation=270)\r\nplt.xlabel('Distance (m)')\r\nplt.ylabel('Spacing (m)')\r\nplt.title('Sound level in function of distance and spacing \\n with a velocity of 9.25 m/s for WM6',fontweight='bold')\r\nplt.minorticks_on()\r\nplt.grid(b=True, which='major',linewidth=2)\r\nplt.grid(b=True, which='minor') \r\nplt.show()\r\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
"""""""""""""""
Write Data
"""""""""""""""
import json
from city import City
def load_json(file_name='data.json'):
with open(file_name, 'r') as json_fp:
json_data = json_fp.read()
data_arr = json.loads(json_data)
return data_arr
if __name__ == '__main__':
json_file = 'data.json'
load_json(json_file)
|
normal
|
{
"blob_id": "63068a15d750abb29398d687495d6001ba17ab8a",
"index": 9435,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\n\nif __name__ == '__main__':\n json_file = 'data.json'\n load_json(json_file)\n",
"step-4": "<mask token>\nimport json\nfrom city import City\n\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\n\nif __name__ == '__main__':\n json_file = 'data.json'\n load_json(json_file)\n",
"step-5": "\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\nWrite Data\n\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n\nimport json\nfrom city import City\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\nif __name__ == '__main__':\n json_file = 'data.json'\n load_json(json_file)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
__author__ = 'Jager'
from char import Character
class Rouge (Character):
def special_attack1(self, opponent, hitdamage_callback, specatt_callback):
pass # hook method
def special_attack2(self, opponent, hitdamage_callback, specatt_callback):
pass # hook method
def heal(self, target):
pass # hook method
def regen_resource(self):
pass # hook method
def full_resource(self):
pass
|
normal
|
{
"blob_id": "36991c3191ba48b1b9dbd843e279f8fe124f1339",
"index": 73,
"step-1": "<mask token>\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n <mask token>\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-2": "<mask token>\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def heal(self, target):\n pass\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-3": "__author__ = 'Jager'\n<mask token>\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def heal(self, target):\n pass\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-4": "__author__ = 'Jager'\nfrom char import Character\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def heal(self, target):\n pass\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-5": "__author__ = 'Jager'\nfrom char import Character\n\nclass Rouge (Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass # hook method\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass # hook method\n\n def heal(self, target):\n pass # hook method\n\n def regen_resource(self):\n pass # hook method\n\n\n def full_resource(self):\n pass",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
data = [] ##用來裝reviews.txt的留言
count = 0 ##計數目前檔案讀取到第幾筆
with open('reviews.txt', 'r') as f:
for line in f:
data.append(line)
count += 1
if count % 1000 == 0:
print(len(data))
total = len(data)
print('檔案讀取完了,總共有', len(data), '筆資料')
print(len(data)) #印出data串列的--項目數量
print(len(data[0])) #印出data串列第一個項目的--字元數量
print(data[0])
sum_len = 0
for d in data:
sum_len += len(d)
print(sum_len) #data串列的總字元數
print("留言平均長度為", sum_len / count)
##印出字數小於100的留言
new = []
for d in data:
if len(d) < 100:
new.append(d)
print('一共有', len(new), '比留言長度小於100')
print(new[0])
##印出提到good的留言
good = []
for d in data:
if 'good' in d: #d裡面是否有'good'字串
good.append(d)
print('一共有', len(good), '提到good')
print(good[0])
####################
#清單快寫法1
good = [d for d in data if 'good' in d] #[|d運算| for |d變數| in |data清單| |if 'good' in d篩選條件|]
print(good)
####################
#清單快寫法2
bad = ['bad' in d for d in data] #[|d運算| for |d變數| in |data清單|]
print(bad)
#普通寫法
bad = []
for d in data:
bad.append('bad' in d)
print(bad)
# 文字計數
wc = {} # word_count
for d in data:
words = d.split()
for word in words:
if word in wc:
wc[word] += 1
else:
wc[word] = 1 # 新增新的key進wc字典
for word in wc:
if wc[word] > 1000000:
print(word, wc[word])
print(len(wc))
print(wc['Allen'])
while True:
word = input('請問你想查甚麼字: ')
if word == 'q':
print('感謝使用')
break
elif word not in wc:
print('沒這個字')
continue
else:
print(word, '出現過的次數: ', wc[word])
|
normal
|
{
"blob_id": "835beebe452a252fb744a06d3e6ff221469af6bf",
"index": 6699,
"step-1": "data = [] ##用來裝reviews.txt的留言\ncount = 0 ##計數目前檔案讀取到第幾筆\n\nwith open('reviews.txt', 'r') as f:\n for line in f:\n data.append(line)\n count += 1\n if count % 1000 == 0:\n print(len(data))\ntotal = len(data)\nprint('檔案讀取完了,總共有', len(data), '筆資料')\n\nprint(len(data)) #印出data串列的--項目數量\nprint(len(data[0])) #印出data串列第一個項目的--字元數量\nprint(data[0])\n\nsum_len = 0\n\nfor d in data:\nsum_len += len(d)\n\nprint(sum_len) #data串列的總字元數\n\nprint(\"留言平均長度為\", sum_len / count)\n\n##印出字數小於100的留言\nnew = []\nfor d in data:\nif len(d) < 100:\n new.append(d)\nprint('一共有', len(new), '比留言長度小於100')\nprint(new[0])\n\n##印出提到good的留言\ngood = []\nfor d in data:\nif 'good' in d: #d裡面是否有'good'字串\n good.append(d)\nprint('一共有', len(good), '提到good')\nprint(good[0])\n\n####################\n#清單快寫法1\ngood = [d for d in data if 'good' in d] #[|d運算| for |d變數| in |data清單| |if 'good' in d篩選條件|]\nprint(good)\n\n####################\n#清單快寫法2\nbad = ['bad' in d for d in data] #[|d運算| for |d變數| in |data清單|]\nprint(bad)\n\n#普通寫法\nbad = []\nfor d in data:\nbad.append('bad' in d)\n\nprint(bad)\n\n# 文字計數\nwc = {} # word_count\nfor d in data:\n words = d.split()\n for word in words:\n if word in wc:\n wc[word] += 1\n else:\n wc[word] = 1 # 新增新的key進wc字典\n\nfor word in wc:\n if wc[word] > 1000000:\n print(word, wc[word])\n\nprint(len(wc))\nprint(wc['Allen'])\n\nwhile True:\n word = input('請問你想查甚麼字: ')\n if word == 'q':\n print('感謝使用')\n break\n elif word not in wc:\n print('沒這個字')\n continue\n else:\n print(word, '出現過的次數: ', wc[word])\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import grpc
import time
import json
import sys
import uuid
from arch.api.proto import inference_service_pb2
from arch.api.proto import inference_service_pb2_grpc
import threading
def run(address):
ths = []
with grpc.insecure_channel(address) as channel:
for i in range(1):
th = threading.Thread(target=send, args=(channel,))
ths.append(th)
st = int(time.time())
for th in ths:
th.start()
for th in ths:
th.join()
et = int(time.time())
def process_response(call_future):
print(call_future.result())
def send(channel):
stub = inference_service_pb2_grpc.InferenceServiceStub(channel)
request = inference_service_pb2.InferenceMessage()
request_data = dict()
request_data['serviceId'] = 'xxxxxxxxx'
request_data['applyId'] = ''
# request_data['modelId'] = 'arbiter-10000#guest-10000#host-10000#model' # You can specify the model id this way
# request_data['modelVersion'] = 'acd3e1807a1211e9969aacde48001122' # You can specify the model version this way
request_data['caseid'] = uuid.uuid1().hex
feature_data = dict()
feature_data['fid1'] = 5.1
feature_data['fid2'] = 6.2
feature_data['fid3'] = 7.6
request_data['featureData'] = feature_data
request_data['sendToRemoteFeatureData'] = feature_data
print(json.dumps(request_data, indent=4))
request.body = json.dumps(request_data).encode(encoding='utf-8')
print(stub.inference(request))
if __name__ == '__main__':
run(sys.argv[1])
|
normal
|
{
"blob_id": "5430e1861a6244c25c00699323efa0921a5af940",
"index": 3709,
"step-1": "<mask token>\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\n<mask token>\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\nif __name__ == '__main__':\n run(sys.argv[1])\n",
"step-4": "import grpc\nimport time\nimport json\nimport sys\nimport uuid\nfrom arch.api.proto import inference_service_pb2\nfrom arch.api.proto import inference_service_pb2_grpc\nimport threading\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\nif __name__ == '__main__':\n run(sys.argv[1])\n",
"step-5": "import grpc\nimport time\nimport json\nimport sys\nimport uuid\n\nfrom arch.api.proto import inference_service_pb2\nfrom arch.api.proto import inference_service_pb2_grpc\nimport threading\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n # request_data['modelId'] = 'arbiter-10000#guest-10000#host-10000#model' # You can specify the model id this way\n # request_data['modelVersion'] = 'acd3e1807a1211e9969aacde48001122' # You can specify the model version this way\n request_data['caseid'] = uuid.uuid1().hex\n\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n\n print(json.dumps(request_data, indent=4))\n\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\nif __name__ == '__main__':\n run(sys.argv[1])\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import weakref
from enum import Enum
from functools import partial
from typing import TYPE_CHECKING
import inflection
if TYPE_CHECKING:
from stake.client import StakeClient
camelcase = partial(inflection.camelize, uppercase_first_letter=False)
__all__ = ["SideEnum"]
class SideEnum(str, Enum):
BUY = "B"
SELL = "S"
class BaseClient:
# flake8: noqa
def __init__(self, client: "StakeClient"):
self._client = weakref.proxy(client)
|
normal
|
{
"blob_id": "f13ccbfb27788deca0d4f4b58a4e9e8c7e8e0306",
"index": 1644,
"step-1": "<mask token>\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-2": "<mask token>\nif TYPE_CHECKING:\n from stake.client import StakeClient\n<mask token>\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n from stake.client import StakeClient\ncamelcase = partial(inflection.camelize, uppercase_first_letter=False)\n__all__ = ['SideEnum']\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-4": "import weakref\nfrom enum import Enum\nfrom functools import partial\nfrom typing import TYPE_CHECKING\nimport inflection\nif TYPE_CHECKING:\n from stake.client import StakeClient\ncamelcase = partial(inflection.camelize, uppercase_first_letter=False)\n__all__ = ['SideEnum']\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-5": "import weakref\nfrom enum import Enum\nfrom functools import partial\nfrom typing import TYPE_CHECKING\n\nimport inflection\n\nif TYPE_CHECKING:\n from stake.client import StakeClient\n\ncamelcase = partial(inflection.camelize, uppercase_first_letter=False)\n\n__all__ = [\"SideEnum\"]\n\n\nclass SideEnum(str, Enum):\n BUY = \"B\"\n SELL = \"S\"\n\n\nclass BaseClient:\n # flake8: noqa\n def __init__(self, client: \"StakeClient\"):\n self._client = weakref.proxy(client)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from typing import List, Callable
#: A list of int
T = List[int]
C = Callable[[int], None] # a generic alias not having a doccomment
|
normal
|
{
"blob_id": "aaee69d339cf1c14e54366633155ee57026e6487",
"index": 2071,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nT = List[int]\nC = Callable[[int], None]\n",
"step-3": "from typing import List, Callable\nT = List[int]\nC = Callable[[int], None]\n",
"step-4": "from typing import List, Callable\n\n#: A list of int\nT = List[int]\n\nC = Callable[[int], None] # a generic alias not having a doccomment\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.contrib.auth.models import User
from django_filters import (
NumberFilter,
DateTimeFilter,
AllValuesFilter
)
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import permissions
from rest_framework.throttling import ScopedRateThrottle
from rest_framework import filters
from rest_framework.generics import (
ListCreateAPIView,
RetrieveUpdateDestroyAPIView,
GenericAPIView,
ListAPIView,
RetrieveAPIView
)
from games.models import (
GameCategory,
Game,
Player,
PlayerScore
)
from games.serializers import (
GameCategorySerializer,
GameSerializer,
PlayerSerializer,
PlayerScoreSerializer,
)
from games.serializers import UserSerializer
from games.permissions import IsOwnerOrReadOnly
class ApiRoot(GenericAPIView):
name= 'api-root'
def get(self,request,*args,**kwargs):
return Response(
{
'players':reverse(PlayerList.name,request=request),
'game-categories':reverse(GameCategoryList.name,request=request),
'game':reverse(GameList.name,request=request),
'scores':reverse(PlayerScoreList.name,request=request),
'users': reverse(UserList.name,request=request)
}
)
class GameCategoryList(ListCreateAPIView):
queryset = GameCategory.objects.all()
serializer_class = GameCategorySerializer
name = 'gamecategory-list'
throttle_scope = 'game-categories'
throttle_classes = (ScopedRateThrottle,)
filter_fields = ('name',)
search_fields = ('^name',)
ordering_fields = ('name',)
class GameCategoryDetail(RetrieveUpdateDestroyAPIView):
queryset = GameCategory.objects.all()
serializer_class = GameCategorySerializer
name = 'gamecategory-detail'
throttle_scope = 'game-categories'
throttle_classes = (ScopedRateThrottle,)
class GameList(ListCreateAPIView):
queryset = Game.objects.all()
serializer_class = GameSerializer
name = 'game-list'
permission_classes = (
permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly
)
filter_fields = (
'name',
'game_category',
'release_date',
'played',
'owner',
)
search_fields = (
'^name',
)
ordering_fields = (
'name',
'release_date',
)
def perform_create(self, serializer):
# pass an additional owner field to the create method
# to set the owner to the user recieved in the request
serializer.save(owner=self.request.user)
class GameDetail(RetrieveUpdateDestroyAPIView):
queryset = Game.objects.all()
serializer_class = GameSerializer
name = 'game-detail'
permission_classes = (
permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly
)
class PlayerList(ListCreateAPIView):
queryset = Player.objects.all()
serializer_class = PlayerSerializer
name = 'player-list'
filter_fields = (
'name',
'gender',
)
search_fields = (
'^name',
)
ordering_fields = (
'name',
)
class PlayerDetail(RetrieveUpdateDestroyAPIView):
queryset = Player.objects.all()
serializer_class = PlayerSerializer
name = 'player-detail'
class PlayerScoreFilter(filters.FilterSet):
min_score = NumberFilter(
name='score',lookup_expr='gte'
)
max_score = NumberFilter(
name='score',lookup_expr='lte'
)
from_score_date = DateTimeFilter(
name='score_date',
lookup_expr='gte'
)
to_score_date = DateTimeFilter(
name='score_date',
lookup_expr='lte'
)
player_name = AllValuesFilter(
name='player__name'
)
game_name = AllValuesFilter(
name= 'game__name'
)
class Meta:
model = PlayerScore
fields = (
'score',
'from_score_date',
'to_score_date',
'min_score',
'max_score',
# player__name will be accessed as player_name
'player_name',
#game__name will be accessed as game_name
'game_name'
)
class PlayerScoreList(ListCreateAPIView):
queryset = PlayerScore.objects.all()
serializer_class = PlayerScoreSerializer
name = 'playerscore-list'
filter_class =PlayerScoreFilter
ordering_fields = (
'score',
'score_date',
)
class PlayerScoreDetail(RetrieveUpdateDestroyAPIView):
queryset = PlayerScore.objects.all()
serializer_class = PlayerScoreSerializer
name = 'playerscore-detail'
class UserList(ListAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
name = 'user-list'
class UserDetail(RetrieveAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
name = 'user-detail'
|
normal
|
{
"blob_id": "2908d34165fac272c9571be623855a0613c952f3",
"index": 5433,
"step-1": "<mask token>\n\n\nclass GameList(ListCreateAPIView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass GameDetail(RetrieveUpdateDestroyAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-detail'\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly)\n\n\nclass PlayerList(ListCreateAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-list'\n filter_fields = 'name', 'gender'\n search_fields = '^name',\n ordering_fields = 'name',\n\n\nclass PlayerDetail(RetrieveUpdateDestroyAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-detail'\n\n\nclass PlayerScoreFilter(filters.FilterSet):\n min_score = NumberFilter(name='score', lookup_expr='gte')\n max_score = NumberFilter(name='score', lookup_expr='lte')\n from_score_date = DateTimeFilter(name='score_date', lookup_expr='gte')\n to_score_date = DateTimeFilter(name='score_date', lookup_expr='lte')\n player_name = AllValuesFilter(name='player__name')\n game_name = AllValuesFilter(name='game__name')\n\n\n class Meta:\n model = PlayerScore\n fields = ('score', 'from_score_date', 'to_score_date', 'min_score',\n 'max_score', 'player_name', 'game_name')\n\n\nclass PlayerScoreList(ListCreateAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-list'\n filter_class = PlayerScoreFilter\n ordering_fields = 'score', 'score_date'\n\n\nclass PlayerScoreDetail(RetrieveUpdateDestroyAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-detail'\n\n\nclass UserList(ListAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-list'\n\n\nclass UserDetail(RetrieveAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-detail'\n",
"step-2": "<mask token>\n\n\nclass ApiRoot(GenericAPIView):\n <mask token>\n\n def get(self, request, *args, **kwargs):\n return Response({'players': reverse(PlayerList.name, request=\n request), 'game-categories': reverse(GameCategoryList.name,\n request=request), 'game': reverse(GameList.name, request=\n request), 'scores': reverse(PlayerScoreList.name, request=\n request), 'users': reverse(UserList.name, request=request)})\n\n\nclass GameCategoryList(ListCreateAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-list'\n throttle_scope = 'game-categories'\n throttle_classes = ScopedRateThrottle,\n filter_fields = 'name',\n search_fields = '^name',\n ordering_fields = 'name',\n\n\nclass GameCategoryDetail(RetrieveUpdateDestroyAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-detail'\n throttle_scope = 'game-categories'\n throttle_classes = ScopedRateThrottle,\n\n\nclass GameList(ListCreateAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-list'\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly)\n filter_fields = 'name', 'game_category', 'release_date', 'played', 'owner'\n search_fields = '^name',\n ordering_fields = 'name', 'release_date'\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass GameDetail(RetrieveUpdateDestroyAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-detail'\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly)\n\n\nclass PlayerList(ListCreateAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-list'\n filter_fields = 'name', 'gender'\n search_fields = '^name',\n ordering_fields = 'name',\n\n\nclass PlayerDetail(RetrieveUpdateDestroyAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-detail'\n\n\nclass PlayerScoreFilter(filters.FilterSet):\n min_score = NumberFilter(name='score', lookup_expr='gte')\n max_score = NumberFilter(name='score', lookup_expr='lte')\n from_score_date = DateTimeFilter(name='score_date', lookup_expr='gte')\n to_score_date = DateTimeFilter(name='score_date', lookup_expr='lte')\n player_name = AllValuesFilter(name='player__name')\n game_name = AllValuesFilter(name='game__name')\n\n\n class Meta:\n model = PlayerScore\n fields = ('score', 'from_score_date', 'to_score_date', 'min_score',\n 'max_score', 'player_name', 'game_name')\n\n\nclass PlayerScoreList(ListCreateAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-list'\n filter_class = PlayerScoreFilter\n ordering_fields = 'score', 'score_date'\n\n\nclass PlayerScoreDetail(RetrieveUpdateDestroyAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-detail'\n\n\nclass UserList(ListAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-list'\n\n\nclass UserDetail(RetrieveAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-detail'\n",
"step-3": "<mask token>\n\n\nclass ApiRoot(GenericAPIView):\n name = 'api-root'\n\n def get(self, request, *args, **kwargs):\n return Response({'players': reverse(PlayerList.name, request=\n request), 'game-categories': reverse(GameCategoryList.name,\n request=request), 'game': reverse(GameList.name, request=\n request), 'scores': reverse(PlayerScoreList.name, request=\n request), 'users': reverse(UserList.name, request=request)})\n\n\nclass GameCategoryList(ListCreateAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-list'\n throttle_scope = 'game-categories'\n throttle_classes = ScopedRateThrottle,\n filter_fields = 'name',\n search_fields = '^name',\n ordering_fields = 'name',\n\n\nclass GameCategoryDetail(RetrieveUpdateDestroyAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-detail'\n throttle_scope = 'game-categories'\n throttle_classes = ScopedRateThrottle,\n\n\nclass GameList(ListCreateAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-list'\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly)\n filter_fields = 'name', 'game_category', 'release_date', 'played', 'owner'\n search_fields = '^name',\n ordering_fields = 'name', 'release_date'\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass GameDetail(RetrieveUpdateDestroyAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-detail'\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly)\n\n\nclass PlayerList(ListCreateAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-list'\n filter_fields = 'name', 'gender'\n search_fields = '^name',\n ordering_fields = 'name',\n\n\nclass PlayerDetail(RetrieveUpdateDestroyAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-detail'\n\n\nclass PlayerScoreFilter(filters.FilterSet):\n min_score = NumberFilter(name='score', lookup_expr='gte')\n max_score = NumberFilter(name='score', lookup_expr='lte')\n from_score_date = DateTimeFilter(name='score_date', lookup_expr='gte')\n to_score_date = DateTimeFilter(name='score_date', lookup_expr='lte')\n player_name = AllValuesFilter(name='player__name')\n game_name = AllValuesFilter(name='game__name')\n\n\n class Meta:\n model = PlayerScore\n fields = ('score', 'from_score_date', 'to_score_date', 'min_score',\n 'max_score', 'player_name', 'game_name')\n\n\nclass PlayerScoreList(ListCreateAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-list'\n filter_class = PlayerScoreFilter\n ordering_fields = 'score', 'score_date'\n\n\nclass PlayerScoreDetail(RetrieveUpdateDestroyAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-detail'\n\n\nclass UserList(ListAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-list'\n\n\nclass UserDetail(RetrieveAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-detail'\n",
"step-4": "from django.contrib.auth.models import User\nfrom django_filters import NumberFilter, DateTimeFilter, AllValuesFilter\nfrom rest_framework.response import Response\nfrom rest_framework.reverse import reverse\nfrom rest_framework import permissions\nfrom rest_framework.throttling import ScopedRateThrottle\nfrom rest_framework import filters\nfrom rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView, GenericAPIView, ListAPIView, RetrieveAPIView\nfrom games.models import GameCategory, Game, Player, PlayerScore\nfrom games.serializers import GameCategorySerializer, GameSerializer, PlayerSerializer, PlayerScoreSerializer\nfrom games.serializers import UserSerializer\nfrom games.permissions import IsOwnerOrReadOnly\n\n\nclass ApiRoot(GenericAPIView):\n name = 'api-root'\n\n def get(self, request, *args, **kwargs):\n return Response({'players': reverse(PlayerList.name, request=\n request), 'game-categories': reverse(GameCategoryList.name,\n request=request), 'game': reverse(GameList.name, request=\n request), 'scores': reverse(PlayerScoreList.name, request=\n request), 'users': reverse(UserList.name, request=request)})\n\n\nclass GameCategoryList(ListCreateAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-list'\n throttle_scope = 'game-categories'\n throttle_classes = ScopedRateThrottle,\n filter_fields = 'name',\n search_fields = '^name',\n ordering_fields = 'name',\n\n\nclass GameCategoryDetail(RetrieveUpdateDestroyAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-detail'\n throttle_scope = 'game-categories'\n throttle_classes = ScopedRateThrottle,\n\n\nclass GameList(ListCreateAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-list'\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly)\n filter_fields = 'name', 'game_category', 'release_date', 'played', 'owner'\n search_fields = '^name',\n ordering_fields = 'name', 'release_date'\n\n def perform_create(self, serializer):\n serializer.save(owner=self.request.user)\n\n\nclass GameDetail(RetrieveUpdateDestroyAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-detail'\n permission_classes = (permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly)\n\n\nclass PlayerList(ListCreateAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-list'\n filter_fields = 'name', 'gender'\n search_fields = '^name',\n ordering_fields = 'name',\n\n\nclass PlayerDetail(RetrieveUpdateDestroyAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-detail'\n\n\nclass PlayerScoreFilter(filters.FilterSet):\n min_score = NumberFilter(name='score', lookup_expr='gte')\n max_score = NumberFilter(name='score', lookup_expr='lte')\n from_score_date = DateTimeFilter(name='score_date', lookup_expr='gte')\n to_score_date = DateTimeFilter(name='score_date', lookup_expr='lte')\n player_name = AllValuesFilter(name='player__name')\n game_name = AllValuesFilter(name='game__name')\n\n\n class Meta:\n model = PlayerScore\n fields = ('score', 'from_score_date', 'to_score_date', 'min_score',\n 'max_score', 'player_name', 'game_name')\n\n\nclass PlayerScoreList(ListCreateAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-list'\n filter_class = PlayerScoreFilter\n ordering_fields = 'score', 'score_date'\n\n\nclass PlayerScoreDetail(RetrieveUpdateDestroyAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-detail'\n\n\nclass UserList(ListAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-list'\n\n\nclass UserDetail(RetrieveAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-detail'\n",
"step-5": "from django.contrib.auth.models import User\nfrom django_filters import (\n NumberFilter,\n DateTimeFilter,\n AllValuesFilter\n)\n\nfrom rest_framework.response import Response\nfrom rest_framework.reverse import reverse\nfrom rest_framework import permissions\nfrom rest_framework.throttling import ScopedRateThrottle\nfrom rest_framework import filters\n\nfrom rest_framework.generics import (\n ListCreateAPIView,\n RetrieveUpdateDestroyAPIView,\n GenericAPIView,\n ListAPIView,\n RetrieveAPIView\n)\n\nfrom games.models import (\n GameCategory,\n Game,\n Player,\n PlayerScore\n)\n\nfrom games.serializers import (\n GameCategorySerializer,\n GameSerializer,\n PlayerSerializer,\n PlayerScoreSerializer,\n)\n\nfrom games.serializers import UserSerializer\nfrom games.permissions import IsOwnerOrReadOnly\n\n\n\n\nclass ApiRoot(GenericAPIView):\n name= 'api-root'\n\n def get(self,request,*args,**kwargs):\n return Response(\n {\n 'players':reverse(PlayerList.name,request=request),\n 'game-categories':reverse(GameCategoryList.name,request=request),\n 'game':reverse(GameList.name,request=request),\n 'scores':reverse(PlayerScoreList.name,request=request),\n 'users': reverse(UserList.name,request=request)\n }\n )\n\n\n\n\nclass GameCategoryList(ListCreateAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-list'\n throttle_scope = 'game-categories'\n throttle_classes = (ScopedRateThrottle,)\n filter_fields = ('name',)\n search_fields = ('^name',)\n ordering_fields = ('name',)\n\nclass GameCategoryDetail(RetrieveUpdateDestroyAPIView):\n queryset = GameCategory.objects.all()\n serializer_class = GameCategorySerializer\n name = 'gamecategory-detail'\n throttle_scope = 'game-categories'\n throttle_classes = (ScopedRateThrottle,)\n\nclass GameList(ListCreateAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-list'\n permission_classes = (\n permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly\n )\n filter_fields = (\n 'name',\n 'game_category',\n 'release_date',\n 'played',\n 'owner',\n )\n search_fields = (\n '^name',\n )\n ordering_fields = (\n 'name',\n 'release_date',\n )\n\n def perform_create(self, serializer):\n # pass an additional owner field to the create method\n # to set the owner to the user recieved in the request\n serializer.save(owner=self.request.user)\n\nclass GameDetail(RetrieveUpdateDestroyAPIView):\n queryset = Game.objects.all()\n serializer_class = GameSerializer\n name = 'game-detail'\n\n permission_classes = (\n permissions.IsAuthenticatedOrReadOnly,\n IsOwnerOrReadOnly\n )\n\nclass PlayerList(ListCreateAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-list'\n filter_fields = (\n 'name',\n 'gender',\n )\n search_fields = (\n '^name',\n )\n ordering_fields = (\n 'name',\n )\n\nclass PlayerDetail(RetrieveUpdateDestroyAPIView):\n queryset = Player.objects.all()\n serializer_class = PlayerSerializer\n name = 'player-detail'\n\n\nclass PlayerScoreFilter(filters.FilterSet):\n min_score = NumberFilter(\n name='score',lookup_expr='gte'\n )\n max_score = NumberFilter(\n name='score',lookup_expr='lte'\n )\n from_score_date = DateTimeFilter(\n name='score_date',\n lookup_expr='gte'\n )\n to_score_date = DateTimeFilter(\n name='score_date',\n lookup_expr='lte'\n )\n player_name = AllValuesFilter(\n name='player__name'\n )\n game_name = AllValuesFilter(\n name= 'game__name'\n )\n\n class Meta:\n model = PlayerScore\n fields = (\n 'score',\n 'from_score_date',\n 'to_score_date',\n 'min_score',\n 'max_score',\n # player__name will be accessed as player_name\n 'player_name',\n #game__name will be accessed as game_name\n 'game_name'\n )\n\n\n\nclass PlayerScoreList(ListCreateAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-list'\n filter_class =PlayerScoreFilter\n ordering_fields = (\n 'score',\n 'score_date',\n )\n\n\nclass PlayerScoreDetail(RetrieveUpdateDestroyAPIView):\n queryset = PlayerScore.objects.all()\n serializer_class = PlayerScoreSerializer\n name = 'playerscore-detail'\n\nclass UserList(ListAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-list'\n\nclass UserDetail(RetrieveAPIView):\n queryset = User.objects.all()\n serializer_class = UserSerializer\n name = 'user-detail'\n\n\n",
"step-ids": [
18,
25,
26,
27,
28
]
}
|
[
18,
25,
26,
27,
28
] |
import sys
from PIL import Image
from pr_common import *
file_name = sys.argv[1]
saturation_color = sys.argv[2]
saturation_modifier = int(sys.argv[3])
img = getImage(file_name)
pixels = pixelValues(img)
for i in range(img.height):
for j in range(img.width):
pixel_val = pixels[i][j]
color_idx = None
if (saturation_color == "R"):
color_idx = 0
elif (saturation_color == "G"):
color_idx = 1
elif (saturation_color == "B"):
color_idx = 2
color_val = pixel_val[color_idx] + saturation_modifier
if (color_val > 255):
color_val = 255
pixel_list = list(pixel_val)
pixel_list[color_idx] = color_val
pixels[i][j] = tuple(pixel_list)
savePixelsToImage(editedFilePath(file_name, "saturated"), pixels)
|
normal
|
{
"blob_id": "96ef95d8997eeab3d85a1bb6e4f8c86c9bfbb0a2",
"index": 4732,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(img.height):\n for j in range(img.width):\n pixel_val = pixels[i][j]\n color_idx = None\n if saturation_color == 'R':\n color_idx = 0\n elif saturation_color == 'G':\n color_idx = 1\n elif saturation_color == 'B':\n color_idx = 2\n color_val = pixel_val[color_idx] + saturation_modifier\n if color_val > 255:\n color_val = 255\n pixel_list = list(pixel_val)\n pixel_list[color_idx] = color_val\n pixels[i][j] = tuple(pixel_list)\nsavePixelsToImage(editedFilePath(file_name, 'saturated'), pixels)\n",
"step-3": "<mask token>\nfile_name = sys.argv[1]\nsaturation_color = sys.argv[2]\nsaturation_modifier = int(sys.argv[3])\nimg = getImage(file_name)\npixels = pixelValues(img)\nfor i in range(img.height):\n for j in range(img.width):\n pixel_val = pixels[i][j]\n color_idx = None\n if saturation_color == 'R':\n color_idx = 0\n elif saturation_color == 'G':\n color_idx = 1\n elif saturation_color == 'B':\n color_idx = 2\n color_val = pixel_val[color_idx] + saturation_modifier\n if color_val > 255:\n color_val = 255\n pixel_list = list(pixel_val)\n pixel_list[color_idx] = color_val\n pixels[i][j] = tuple(pixel_list)\nsavePixelsToImage(editedFilePath(file_name, 'saturated'), pixels)\n",
"step-4": "import sys\nfrom PIL import Image\nfrom pr_common import *\nfile_name = sys.argv[1]\nsaturation_color = sys.argv[2]\nsaturation_modifier = int(sys.argv[3])\nimg = getImage(file_name)\npixels = pixelValues(img)\nfor i in range(img.height):\n for j in range(img.width):\n pixel_val = pixels[i][j]\n color_idx = None\n if saturation_color == 'R':\n color_idx = 0\n elif saturation_color == 'G':\n color_idx = 1\n elif saturation_color == 'B':\n color_idx = 2\n color_val = pixel_val[color_idx] + saturation_modifier\n if color_val > 255:\n color_val = 255\n pixel_list = list(pixel_val)\n pixel_list[color_idx] = color_val\n pixels[i][j] = tuple(pixel_list)\nsavePixelsToImage(editedFilePath(file_name, 'saturated'), pixels)\n",
"step-5": "import sys\nfrom PIL import Image\nfrom pr_common import *\n\nfile_name = sys.argv[1]\nsaturation_color = sys.argv[2]\nsaturation_modifier = int(sys.argv[3])\n\nimg = getImage(file_name)\npixels = pixelValues(img)\n\nfor i in range(img.height):\n for j in range(img.width):\n pixel_val = pixels[i][j]\n color_idx = None\n\n if (saturation_color == \"R\"):\n color_idx = 0\n elif (saturation_color == \"G\"):\n color_idx = 1\n elif (saturation_color == \"B\"):\n color_idx = 2\n\n color_val = pixel_val[color_idx] + saturation_modifier\n \n if (color_val > 255):\n color_val = 255\n \n pixel_list = list(pixel_val)\n pixel_list[color_idx] = color_val\n pixels[i][j] = tuple(pixel_list)\n\nsavePixelsToImage(editedFilePath(file_name, \"saturated\"), pixels)\n ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from nltk.stem.porter import PorterStemmer
from nltk.stem.snowball import SnowballStemmer
p_stemmer = PorterStemmer()
s_stemmer = SnowballStemmer(language="english")
print(s_stemmer.stem("writing"))
|
normal
|
{
"blob_id": "67e6d39ef291e4bb30c0b6bab7b71d97c86b0ef1",
"index": 4108,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(s_stemmer.stem('writing'))\n",
"step-3": "<mask token>\np_stemmer = PorterStemmer()\ns_stemmer = SnowballStemmer(language='english')\nprint(s_stemmer.stem('writing'))\n",
"step-4": "from nltk.stem.porter import PorterStemmer\nfrom nltk.stem.snowball import SnowballStemmer\np_stemmer = PorterStemmer()\ns_stemmer = SnowballStemmer(language='english')\nprint(s_stemmer.stem('writing'))\n",
"step-5": "from nltk.stem.porter import PorterStemmer\r\nfrom nltk.stem.snowball import SnowballStemmer\r\n\r\np_stemmer = PorterStemmer()\r\ns_stemmer = SnowballStemmer(language=\"english\")\r\n\r\n\r\nprint(s_stemmer.stem(\"writing\"))\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import json
import paho.mqtt.client as mqtt
from datetime import datetime
import ssl
from collections import OrderedDict
import time
from tkinter import *
import numpy as np
MQTT_IP = 'emq'
MQTT_PORT = 8883
username = "spread_ICAM"
password = "spread_ICAM"
deviceType = "spread_ICAM"
version = "v1"
def on_connect(client, userdata, flags, rc):
"""0: Connection successful
1: Connection refused - incorrect protocol version
2: Connection refused - invalid client identifier
3: Connection refused - server unavailable
4: Connection refused - bad username or password
5: Connection refused - not authorised
6-255: Currently unused."""
print("Connected with result code " + str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
# If connection successful start publishing data
# if rc == 0:
# client.subscribe(subscribeTopic)
# self.__send_data_loop()
def on_message(client, userdata, msg):
print(str(datetime.now()) + " Message Received: " + str(msg.payload))
publishTopic = "%s_%s/%s/events" % (deviceType, version, username)
subscribeTopic = "%s_%s/%s/operations" % (deviceType, version, username)
# se non imposto il client_id non riesce a connettersi!!!!!
client = mqtt.Client(client_id="TentativoRaffo")
client.tls_set(ca_certs="digitalfuture_ca_public.pem", certfile=None, keyfile=None, cert_reqs=ssl.CERT_REQUIRED,
tls_version=ssl.PROTOCOL_SSLv23, ciphers=None)
client.tls_insecure_set(False)
client.username_pw_set(username, password=password)
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_IP, MQTT_PORT, 60, bind_address="")
client.loop_start()
#########################
#
# CREATE THE GUI
#
#########################
root = Tk()
Label(root, text="Spread simulator").grid(row=0, column=1, pady=5)
Label(root, text="Kg").grid(row=1, column=0, pady=5)
text_id = Text(root, height=1, width=10)
text_id.grid(row=1, column=1, padx=5, pady=5)
Label(root, text="Peso in kg del vassoio prelevato (Kg)").grid(row=1, column=2, pady=5)
Label(root, text="mm_kg").grid(row=2, column=0, pady=5)
text_speed = Text(root, height=1, width=10)
text_speed.grid(row=2, column=1, padx=5, pady=5)
Label(root, text="Di quanti mm affonda per ogni kg prelevato (mm)").grid(row=2, column=2, pady=5)
Label(root, text="s").grid(row=3, column=0, pady=5)
text_speed = Text(root, height=1, width=10)
text_speed.grid(row=3, column=1, padx=5, pady=5)
Label(root, text="Coefficiente di sovraelongazione delle catene").grid(row=3, column=2, pady=5)
Label(root, text="interval").grid(row=4, column=0, pady=5)
text_speed = Text(root, height=1, width=10)
text_speed.grid(row=4, column=1, padx=5, pady=5)
Label(root, text="Intervallo di invio dati (s)").grid(row=4, column=2, pady=5)
btn_start = Button(root)
btn_start["text"] = "Start"
btn_start.grid(row=5, column=1, padx=5, pady=5)
btn_start = Button(root)
btn_start["text"] = "Stop"
btn_start.grid(row=6, column=1, padx=5, pady=5)
interval_time = 1000;
def task():
spread = np.random.normal(loc=0.708727, scale=0.192176)
print("spread")
root.after(interval_time, task) # reschedule event in 2 seconds
root.after(interval_time, task)
root.mainloop()
root.destroy()
i=0
timestamp = 1234567890123
while(True):
time.sleep(1)
timestamp += i
print(timestamp)
ordered_obj_to_send = OrderedDict([
("spread", 3.0),
("timestamp_", timestamp),
("date", "eee")])
client.publish(publishTopic, json.dumps(ordered_obj_to_send), qos=2)
i+=1
#time.sleep(2)
|
normal
|
{
"blob_id": "f3664f5f69207c3f2dcec96c90cd220003da0904",
"index": 4142,
"step-1": "<mask token>\n\n\ndef on_connect(client, userdata, flags, rc):\n \"\"\"0: Connection successful\n 1: Connection refused - incorrect protocol version\n 2: Connection refused - invalid client identifier\n 3: Connection refused - server unavailable\n 4: Connection refused - bad username or password\n 5: Connection refused - not authorised\n 6-255: Currently unused.\"\"\"\n print('Connected with result code ' + str(rc))\n\n\ndef on_message(client, userdata, msg):\n print(str(datetime.now()) + ' Message Received: ' + str(msg.payload))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef on_connect(client, userdata, flags, rc):\n \"\"\"0: Connection successful\n 1: Connection refused - incorrect protocol version\n 2: Connection refused - invalid client identifier\n 3: Connection refused - server unavailable\n 4: Connection refused - bad username or password\n 5: Connection refused - not authorised\n 6-255: Currently unused.\"\"\"\n print('Connected with result code ' + str(rc))\n\n\ndef on_message(client, userdata, msg):\n print(str(datetime.now()) + ' Message Received: ' + str(msg.payload))\n\n\n<mask token>\n\n\ndef task():\n spread = np.random.normal(loc=0.708727, scale=0.192176)\n print('spread')\n root.after(interval_time, task)\n\n\n<mask token>\n",
"step-3": "<mask token>\nMQTT_IP = 'emq'\nMQTT_PORT = 8883\nusername = 'spread_ICAM'\npassword = 'spread_ICAM'\ndeviceType = 'spread_ICAM'\nversion = 'v1'\n\n\ndef on_connect(client, userdata, flags, rc):\n \"\"\"0: Connection successful\n 1: Connection refused - incorrect protocol version\n 2: Connection refused - invalid client identifier\n 3: Connection refused - server unavailable\n 4: Connection refused - bad username or password\n 5: Connection refused - not authorised\n 6-255: Currently unused.\"\"\"\n print('Connected with result code ' + str(rc))\n\n\ndef on_message(client, userdata, msg):\n print(str(datetime.now()) + ' Message Received: ' + str(msg.payload))\n\n\npublishTopic = '%s_%s/%s/events' % (deviceType, version, username)\nsubscribeTopic = '%s_%s/%s/operations' % (deviceType, version, username)\nclient = mqtt.Client(client_id='TentativoRaffo')\nclient.tls_set(ca_certs='digitalfuture_ca_public.pem', certfile=None,\n keyfile=None, cert_reqs=ssl.CERT_REQUIRED, tls_version=ssl.\n PROTOCOL_SSLv23, ciphers=None)\nclient.tls_insecure_set(False)\nclient.username_pw_set(username, password=password)\nclient.on_connect = on_connect\nclient.on_message = on_message\nclient.connect(MQTT_IP, MQTT_PORT, 60, bind_address='')\nclient.loop_start()\nroot = Tk()\nLabel(root, text='Spread simulator').grid(row=0, column=1, pady=5)\nLabel(root, text='Kg').grid(row=1, column=0, pady=5)\ntext_id = Text(root, height=1, width=10)\ntext_id.grid(row=1, column=1, padx=5, pady=5)\nLabel(root, text='Peso in kg del vassoio prelevato (Kg)').grid(row=1,\n column=2, pady=5)\nLabel(root, text='mm_kg').grid(row=2, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=2, column=1, padx=5, pady=5)\nLabel(root, text='Di quanti mm affonda per ogni kg prelevato (mm)').grid(row\n =2, column=2, pady=5)\nLabel(root, text='s').grid(row=3, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=3, column=1, padx=5, pady=5)\nLabel(root, text='Coefficiente di sovraelongazione delle catene').grid(row=\n 3, column=2, pady=5)\nLabel(root, text='interval').grid(row=4, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=4, column=1, padx=5, pady=5)\nLabel(root, text='Intervallo di invio dati (s)').grid(row=4, column=2, pady=5)\nbtn_start = Button(root)\nbtn_start['text'] = 'Start'\nbtn_start.grid(row=5, column=1, padx=5, pady=5)\nbtn_start = Button(root)\nbtn_start['text'] = 'Stop'\nbtn_start.grid(row=6, column=1, padx=5, pady=5)\ninterval_time = 1000\n\n\ndef task():\n spread = np.random.normal(loc=0.708727, scale=0.192176)\n print('spread')\n root.after(interval_time, task)\n\n\nroot.after(interval_time, task)\nroot.mainloop()\nroot.destroy()\ni = 0\ntimestamp = 1234567890123\nwhile True:\n time.sleep(1)\n timestamp += i\n print(timestamp)\n ordered_obj_to_send = OrderedDict([('spread', 3.0), ('timestamp_',\n timestamp), ('date', 'eee')])\n client.publish(publishTopic, json.dumps(ordered_obj_to_send), qos=2)\n i += 1\n",
"step-4": "import json\nimport paho.mqtt.client as mqtt\nfrom datetime import datetime\nimport ssl\nfrom collections import OrderedDict\nimport time\nfrom tkinter import *\nimport numpy as np\nMQTT_IP = 'emq'\nMQTT_PORT = 8883\nusername = 'spread_ICAM'\npassword = 'spread_ICAM'\ndeviceType = 'spread_ICAM'\nversion = 'v1'\n\n\ndef on_connect(client, userdata, flags, rc):\n \"\"\"0: Connection successful\n 1: Connection refused - incorrect protocol version\n 2: Connection refused - invalid client identifier\n 3: Connection refused - server unavailable\n 4: Connection refused - bad username or password\n 5: Connection refused - not authorised\n 6-255: Currently unused.\"\"\"\n print('Connected with result code ' + str(rc))\n\n\ndef on_message(client, userdata, msg):\n print(str(datetime.now()) + ' Message Received: ' + str(msg.payload))\n\n\npublishTopic = '%s_%s/%s/events' % (deviceType, version, username)\nsubscribeTopic = '%s_%s/%s/operations' % (deviceType, version, username)\nclient = mqtt.Client(client_id='TentativoRaffo')\nclient.tls_set(ca_certs='digitalfuture_ca_public.pem', certfile=None,\n keyfile=None, cert_reqs=ssl.CERT_REQUIRED, tls_version=ssl.\n PROTOCOL_SSLv23, ciphers=None)\nclient.tls_insecure_set(False)\nclient.username_pw_set(username, password=password)\nclient.on_connect = on_connect\nclient.on_message = on_message\nclient.connect(MQTT_IP, MQTT_PORT, 60, bind_address='')\nclient.loop_start()\nroot = Tk()\nLabel(root, text='Spread simulator').grid(row=0, column=1, pady=5)\nLabel(root, text='Kg').grid(row=1, column=0, pady=5)\ntext_id = Text(root, height=1, width=10)\ntext_id.grid(row=1, column=1, padx=5, pady=5)\nLabel(root, text='Peso in kg del vassoio prelevato (Kg)').grid(row=1,\n column=2, pady=5)\nLabel(root, text='mm_kg').grid(row=2, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=2, column=1, padx=5, pady=5)\nLabel(root, text='Di quanti mm affonda per ogni kg prelevato (mm)').grid(row\n =2, column=2, pady=5)\nLabel(root, text='s').grid(row=3, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=3, column=1, padx=5, pady=5)\nLabel(root, text='Coefficiente di sovraelongazione delle catene').grid(row=\n 3, column=2, pady=5)\nLabel(root, text='interval').grid(row=4, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=4, column=1, padx=5, pady=5)\nLabel(root, text='Intervallo di invio dati (s)').grid(row=4, column=2, pady=5)\nbtn_start = Button(root)\nbtn_start['text'] = 'Start'\nbtn_start.grid(row=5, column=1, padx=5, pady=5)\nbtn_start = Button(root)\nbtn_start['text'] = 'Stop'\nbtn_start.grid(row=6, column=1, padx=5, pady=5)\ninterval_time = 1000\n\n\ndef task():\n spread = np.random.normal(loc=0.708727, scale=0.192176)\n print('spread')\n root.after(interval_time, task)\n\n\nroot.after(interval_time, task)\nroot.mainloop()\nroot.destroy()\ni = 0\ntimestamp = 1234567890123\nwhile True:\n time.sleep(1)\n timestamp += i\n print(timestamp)\n ordered_obj_to_send = OrderedDict([('spread', 3.0), ('timestamp_',\n timestamp), ('date', 'eee')])\n client.publish(publishTopic, json.dumps(ordered_obj_to_send), qos=2)\n i += 1\n",
"step-5": "import json\nimport paho.mqtt.client as mqtt\nfrom datetime import datetime\nimport ssl\nfrom collections import OrderedDict\nimport time\nfrom tkinter import *\nimport numpy as np\n\nMQTT_IP = 'emq'\nMQTT_PORT = 8883\n\nusername = \"spread_ICAM\"\npassword = \"spread_ICAM\"\ndeviceType = \"spread_ICAM\"\nversion = \"v1\"\n\ndef on_connect(client, userdata, flags, rc):\n \"\"\"0: Connection successful\n 1: Connection refused - incorrect protocol version\n 2: Connection refused - invalid client identifier\n 3: Connection refused - server unavailable\n 4: Connection refused - bad username or password\n 5: Connection refused - not authorised\n 6-255: Currently unused.\"\"\"\n print(\"Connected with result code \" + str(rc))\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n # If connection successful start publishing data\n # if rc == 0:\n # client.subscribe(subscribeTopic)\n # self.__send_data_loop()\n\n\ndef on_message(client, userdata, msg):\n print(str(datetime.now()) + \" Message Received: \" + str(msg.payload))\n\n\npublishTopic = \"%s_%s/%s/events\" % (deviceType, version, username)\nsubscribeTopic = \"%s_%s/%s/operations\" % (deviceType, version, username)\n# se non imposto il client_id non riesce a connettersi!!!!!\nclient = mqtt.Client(client_id=\"TentativoRaffo\")\nclient.tls_set(ca_certs=\"digitalfuture_ca_public.pem\", certfile=None, keyfile=None, cert_reqs=ssl.CERT_REQUIRED,\n tls_version=ssl.PROTOCOL_SSLv23, ciphers=None)\nclient.tls_insecure_set(False)\nclient.username_pw_set(username, password=password)\nclient.on_connect = on_connect\nclient.on_message = on_message\nclient.connect(MQTT_IP, MQTT_PORT, 60, bind_address=\"\")\nclient.loop_start()\n\n\n\n#########################\n#\n# CREATE THE GUI\n#\n#########################\n\n\nroot = Tk()\n\nLabel(root, text=\"Spread simulator\").grid(row=0, column=1, pady=5)\n\nLabel(root, text=\"Kg\").grid(row=1, column=0, pady=5)\ntext_id = Text(root, height=1, width=10)\ntext_id.grid(row=1, column=1, padx=5, pady=5)\nLabel(root, text=\"Peso in kg del vassoio prelevato (Kg)\").grid(row=1, column=2, pady=5)\n\n\nLabel(root, text=\"mm_kg\").grid(row=2, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=2, column=1, padx=5, pady=5)\nLabel(root, text=\"Di quanti mm affonda per ogni kg prelevato (mm)\").grid(row=2, column=2, pady=5)\n\nLabel(root, text=\"s\").grid(row=3, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=3, column=1, padx=5, pady=5)\nLabel(root, text=\"Coefficiente di sovraelongazione delle catene\").grid(row=3, column=2, pady=5)\n\nLabel(root, text=\"interval\").grid(row=4, column=0, pady=5)\ntext_speed = Text(root, height=1, width=10)\ntext_speed.grid(row=4, column=1, padx=5, pady=5)\nLabel(root, text=\"Intervallo di invio dati (s)\").grid(row=4, column=2, pady=5)\n\nbtn_start = Button(root)\nbtn_start[\"text\"] = \"Start\"\nbtn_start.grid(row=5, column=1, padx=5, pady=5)\n\nbtn_start = Button(root)\nbtn_start[\"text\"] = \"Stop\"\nbtn_start.grid(row=6, column=1, padx=5, pady=5)\n\ninterval_time = 1000;\n\ndef task():\n\n spread = np.random.normal(loc=0.708727, scale=0.192176)\n print(\"spread\")\n root.after(interval_time, task) # reschedule event in 2 seconds\n\nroot.after(interval_time, task)\n\nroot.mainloop()\nroot.destroy()\n\n\ni=0\ntimestamp = 1234567890123\nwhile(True):\n\n\n time.sleep(1)\n timestamp += i\n print(timestamp)\n\n ordered_obj_to_send = OrderedDict([\n (\"spread\", 3.0),\n (\"timestamp_\", timestamp),\n (\"date\", \"eee\")])\n client.publish(publishTopic, json.dumps(ordered_obj_to_send), qos=2)\n i+=1\n#time.sleep(2)",
"step-ids": [
2,
3,
5,
6,
7
]
}
|
[
2,
3,
5,
6,
7
] |
# def test_categories:
# ["5S", "5H", "5D", "4S", "4H", "4D", "3D", "3S"]
import unittest
from poker import Hand, makeCard, Rank, count_ranks, RankCount, max_straight
class TestHand(unittest.TestCase):
# def test_heap_multiples(self):
# heaped_multiples = Hand.heap_multiples({"J":4, "2":3})
# print heaped_multiples
# self.assertEqual(heaped_multiples, [(4, "J"), (3,"2")], "failure in heap_multiples")
def test_max_straight(self):
cards = map(makeCard, ["10S", "6S", "9S", "8S", "7S"])
straight = max_straight(cards)
self.assertEqual(straight, sorted(map(makeCard, ["10S", "6S", "9S", "8S", "7S"]), reverse=True))
cards = map(makeCard, ["10S", "6S", "9S", "8S", "8C", "7S"])
straight = max_straight(cards)
self.assertEqual(straight, sorted(map(makeCard, ["10S", "6S", "9S", "8S", "7S"]), reverse=True))
cards = map(makeCard, ["10S", "6S", "9S", "8S", "5C", "7S"])
straight = max_straight(cards)
self.assertEqual(straight, sorted(map(makeCard, ["10S", "6S", "9S", "8S", "7S"]), reverse=True))
def test_categories(self):
my_hand = Hand(["KH", "QH", "JH", "AH", "10H"])
self.assertEqual(my_hand.category, Hand.Categories.straight_flush)
my_hand = Hand(["10S", "6S", "9S", "8S", "7S"])
self.assertEqual(my_hand.category, Hand.Categories.straight_flush)
my_hand = Hand(["JH", "JC", "9H", "JS", "JD"])
self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)
my_hand = Hand(["JH", "JC", "JS", "9D", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.full_house)
my_hand = Hand(["10S", "9S", "8S", "5S", "6S"])
self.assertEqual(my_hand.category, Hand.Categories.flush)
my_hand = Hand(["10H", "6S", "9D", "8S", "7S"])
self.assertEqual(my_hand.category, Hand.Categories.straight)
my_hand = Hand(["JH", "JC", "9H", "JS", "8D"])
self.assertEqual(my_hand.category, Hand.Categories.three_of_a_kind)
my_hand = Hand(["JH", "JC", "QS", "9D", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.two_pair)
my_hand = Hand(["JH", "JC", "QS", "5D", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.pair)
my_hand = Hand(["JH", "3C", "4S", "5C", "9H"])
self.assertEqual(my_hand.category, Hand.Categories.high_card)
def test_category_options(self):
my_hand = Hand(["10H", "6S", "9D", "8S", "7S", "7D", "7H"])
self.assertEqual(my_hand.category, Hand.Categories.straight)
my_hand = Hand(["10H", "6S", "9D", "8S", "7S", "7D", "7H", "7C"])
self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)
my_hand = Hand(["10H", "6S", "9D", "8S", "7S", "7D", "7H", "8C"])
self.assertEqual(my_hand.category, Hand.Categories.full_house)
my_hand = Hand(["10S", "9S", "8S", "5S", "6S", "10H", "6D", "9D", "8C", "7C"])
self.assertEqual(my_hand.category, Hand.Categories.flush)
my_hand = Hand(["KH", "QH", "JH", "AH", "10H", "10S", "6S", "9S", "8S", "7S"])
self.assertEqual(my_hand.category, Hand.Categories.straight_flush)
# It gets the royal flush
my_hand = Hand(["5S", "5H", "5D", "4S", "4H", "4D", "3D", "3S"])
self.assertEqual(my_hand.category, Hand.Categories.full_house)
# It gets the fours
my_hand = Hand(["5S", "5H", "5D", "5C", "4S", "4H", "3C", "3D", "3S"])
self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)
# get the 4 kicker
def test_cmp(self):
pair_to_high_card = Hand(["JH", "JC", "QS", "5D", "9H"]) < Hand(["JH", "3C", "4S", "5C", "9H"])
self.assertEqual(pair_to_high_card, False)
straight_to_flush = Hand(["10H", "6S", "9D", "8S", "7S"]) < Hand(["10S", "9S", "8S", "5S", "6S"])
self.assertEqual(straight_to_flush, True)
def test_deck_validation(self):
"""
Test with some hands that are impossible to form with a 52-card deck
Five-of-a-kind
Something that is both a flush and has a pair (flush wins)
Something that is both a flush and four-of-a-kind (four-of-a-kind wins)
"""
pass
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "5b8d1bd026e97bb7508a500048f940abf0253471",
"index": 9698,
"step-1": "<mask token>\n\n\nclass TestHand(unittest.TestCase):\n\n def test_max_straight(self):\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '8C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '5C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n\n def test_categories(self):\n my_hand = Hand(['KH', 'QH', 'JH', 'AH', '10H'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['10S', '6S', '9S', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', 'JD'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'JS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(my_hand.category, Hand.Categories.flush)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', '8D'])\n self.assertEqual(my_hand.category, Hand.Categories.three_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'QS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.two_pair)\n my_hand = Hand(['JH', 'JC', 'QS', '5D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.pair)\n my_hand = Hand(['JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.high_card)\n <mask token>\n\n def test_cmp(self):\n pair_to_high_card = Hand(['JH', 'JC', 'QS', '5D', '9H']) < Hand([\n 'JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(pair_to_high_card, False)\n straight_to_flush = Hand(['10H', '6S', '9D', '8S', '7S']) < Hand([\n '10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(straight_to_flush, True)\n\n def test_deck_validation(self):\n \"\"\"\n \tTest with some hands that are impossible to form with a 52-card deck\n \tFive-of-a-kind\n \tSomething that is both a flush and has a pair (flush wins)\n \tSomething that is both a flush and four-of-a-kind (four-of-a-kind wins)\n \t\"\"\"\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestHand(unittest.TestCase):\n\n def test_max_straight(self):\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '8C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '5C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n\n def test_categories(self):\n my_hand = Hand(['KH', 'QH', 'JH', 'AH', '10H'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['10S', '6S', '9S', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', 'JD'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'JS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(my_hand.category, Hand.Categories.flush)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', '8D'])\n self.assertEqual(my_hand.category, Hand.Categories.three_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'QS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.two_pair)\n my_hand = Hand(['JH', 'JC', 'QS', '5D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.pair)\n my_hand = Hand(['JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.high_card)\n\n def test_category_options(self):\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H'])\n self.assertEqual(my_hand.category, Hand.Categories.straight)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H', '7C'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H', '8C'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['10S', '9S', '8S', '5S', '6S', '10H', '6D', '9D',\n '8C', '7C'])\n self.assertEqual(my_hand.category, Hand.Categories.flush)\n my_hand = Hand(['KH', 'QH', 'JH', 'AH', '10H', '10S', '6S', '9S',\n '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['5S', '5H', '5D', '4S', '4H', '4D', '3D', '3S'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['5S', '5H', '5D', '5C', '4S', '4H', '3C', '3D', '3S'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n\n def test_cmp(self):\n pair_to_high_card = Hand(['JH', 'JC', 'QS', '5D', '9H']) < Hand([\n 'JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(pair_to_high_card, False)\n straight_to_flush = Hand(['10H', '6S', '9D', '8S', '7S']) < Hand([\n '10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(straight_to_flush, True)\n\n def test_deck_validation(self):\n \"\"\"\n \tTest with some hands that are impossible to form with a 52-card deck\n \tFive-of-a-kind\n \tSomething that is both a flush and has a pair (flush wins)\n \tSomething that is both a flush and four-of-a-kind (four-of-a-kind wins)\n \t\"\"\"\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestHand(unittest.TestCase):\n\n def test_max_straight(self):\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '8C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '5C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n\n def test_categories(self):\n my_hand = Hand(['KH', 'QH', 'JH', 'AH', '10H'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['10S', '6S', '9S', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', 'JD'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'JS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(my_hand.category, Hand.Categories.flush)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', '8D'])\n self.assertEqual(my_hand.category, Hand.Categories.three_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'QS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.two_pair)\n my_hand = Hand(['JH', 'JC', 'QS', '5D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.pair)\n my_hand = Hand(['JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.high_card)\n\n def test_category_options(self):\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H'])\n self.assertEqual(my_hand.category, Hand.Categories.straight)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H', '7C'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H', '8C'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['10S', '9S', '8S', '5S', '6S', '10H', '6D', '9D',\n '8C', '7C'])\n self.assertEqual(my_hand.category, Hand.Categories.flush)\n my_hand = Hand(['KH', 'QH', 'JH', 'AH', '10H', '10S', '6S', '9S',\n '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['5S', '5H', '5D', '4S', '4H', '4D', '3D', '3S'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['5S', '5H', '5D', '5C', '4S', '4H', '3C', '3D', '3S'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n\n def test_cmp(self):\n pair_to_high_card = Hand(['JH', 'JC', 'QS', '5D', '9H']) < Hand([\n 'JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(pair_to_high_card, False)\n straight_to_flush = Hand(['10H', '6S', '9D', '8S', '7S']) < Hand([\n '10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(straight_to_flush, True)\n\n def test_deck_validation(self):\n \"\"\"\n \tTest with some hands that are impossible to form with a 52-card deck\n \tFive-of-a-kind\n \tSomething that is both a flush and has a pair (flush wins)\n \tSomething that is both a flush and four-of-a-kind (four-of-a-kind wins)\n \t\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import unittest\nfrom poker import Hand, makeCard, Rank, count_ranks, RankCount, max_straight\n\n\nclass TestHand(unittest.TestCase):\n\n def test_max_straight(self):\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '8C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n cards = map(makeCard, ['10S', '6S', '9S', '8S', '5C', '7S'])\n straight = max_straight(cards)\n self.assertEqual(straight, sorted(map(makeCard, ['10S', '6S', '9S',\n '8S', '7S']), reverse=True))\n\n def test_categories(self):\n my_hand = Hand(['KH', 'QH', 'JH', 'AH', '10H'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['10S', '6S', '9S', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', 'JD'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'JS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(my_hand.category, Hand.Categories.flush)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight)\n my_hand = Hand(['JH', 'JC', '9H', 'JS', '8D'])\n self.assertEqual(my_hand.category, Hand.Categories.three_of_a_kind)\n my_hand = Hand(['JH', 'JC', 'QS', '9D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.two_pair)\n my_hand = Hand(['JH', 'JC', 'QS', '5D', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.pair)\n my_hand = Hand(['JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(my_hand.category, Hand.Categories.high_card)\n\n def test_category_options(self):\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H'])\n self.assertEqual(my_hand.category, Hand.Categories.straight)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H', '7C'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n my_hand = Hand(['10H', '6S', '9D', '8S', '7S', '7D', '7H', '8C'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['10S', '9S', '8S', '5S', '6S', '10H', '6D', '9D',\n '8C', '7C'])\n self.assertEqual(my_hand.category, Hand.Categories.flush)\n my_hand = Hand(['KH', 'QH', 'JH', 'AH', '10H', '10S', '6S', '9S',\n '8S', '7S'])\n self.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n my_hand = Hand(['5S', '5H', '5D', '4S', '4H', '4D', '3D', '3S'])\n self.assertEqual(my_hand.category, Hand.Categories.full_house)\n my_hand = Hand(['5S', '5H', '5D', '5C', '4S', '4H', '3C', '3D', '3S'])\n self.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n\n def test_cmp(self):\n pair_to_high_card = Hand(['JH', 'JC', 'QS', '5D', '9H']) < Hand([\n 'JH', '3C', '4S', '5C', '9H'])\n self.assertEqual(pair_to_high_card, False)\n straight_to_flush = Hand(['10H', '6S', '9D', '8S', '7S']) < Hand([\n '10S', '9S', '8S', '5S', '6S'])\n self.assertEqual(straight_to_flush, True)\n\n def test_deck_validation(self):\n \"\"\"\n \tTest with some hands that are impossible to form with a 52-card deck\n \tFive-of-a-kind\n \tSomething that is both a flush and has a pair (flush wins)\n \tSomething that is both a flush and four-of-a-kind (four-of-a-kind wins)\n \t\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "# def test_categories:\n\t\n\n# [\"5S\", \"5H\", \"5D\", \"4S\", \"4H\", \"4D\", \"3D\", \"3S\"] \n\nimport unittest\n\nfrom poker import Hand, makeCard, Rank, count_ranks, RankCount, max_straight\n\nclass TestHand(unittest.TestCase):\n\n # def test_heap_multiples(self):\n # \theaped_multiples = Hand.heap_multiples({\"J\":4, \"2\":3})\n # \tprint heaped_multiples\n # \tself.assertEqual(heaped_multiples, [(4, \"J\"), (3,\"2\")], \"failure in heap_multiples\")\n\n def test_max_straight(self):\n \tcards = map(makeCard, [\"10S\", \"6S\", \"9S\", \"8S\", \"7S\"])\n \tstraight = max_straight(cards)\n \tself.assertEqual(straight, sorted(map(makeCard, [\"10S\", \"6S\", \"9S\", \"8S\", \"7S\"]), reverse=True))\n\n \tcards = map(makeCard, [\"10S\", \"6S\", \"9S\", \"8S\", \"8C\", \"7S\"])\n \tstraight = max_straight(cards)\n \tself.assertEqual(straight, sorted(map(makeCard, [\"10S\", \"6S\", \"9S\", \"8S\", \"7S\"]), reverse=True))\n\n \tcards = map(makeCard, [\"10S\", \"6S\", \"9S\", \"8S\", \"5C\", \"7S\"])\n \tstraight = max_straight(cards)\n \tself.assertEqual(straight, sorted(map(makeCard, [\"10S\", \"6S\", \"9S\", \"8S\", \"7S\"]), reverse=True))\n\n def test_categories(self):\n\n \tmy_hand = Hand([\"KH\", \"QH\", \"JH\", \"AH\", \"10H\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n\n \tmy_hand = Hand([\"10S\", \"6S\", \"9S\", \"8S\", \"7S\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n\n \tmy_hand = Hand([\"JH\", \"JC\", \"9H\", \"JS\", \"JD\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n\n \tmy_hand = Hand([\"JH\", \"JC\", \"JS\", \"9D\", \"9H\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.full_house)\n\n \tmy_hand = Hand([\"10S\", \"9S\", \"8S\", \"5S\", \"6S\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.flush)\n\n \tmy_hand = Hand([\"10H\", \"6S\", \"9D\", \"8S\", \"7S\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.straight)\n\n \tmy_hand = Hand([\"JH\", \"JC\", \"9H\", \"JS\", \"8D\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.three_of_a_kind)\n\n \tmy_hand = Hand([\"JH\", \"JC\", \"QS\", \"9D\", \"9H\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.two_pair)\n\n \tmy_hand = Hand([\"JH\", \"JC\", \"QS\", \"5D\", \"9H\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.pair)\n\n \tmy_hand = Hand([\"JH\", \"3C\", \"4S\", \"5C\", \"9H\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.high_card)\n\n def test_category_options(self):\n\n \tmy_hand = Hand([\"10H\", \"6S\", \"9D\", \"8S\", \"7S\", \"7D\", \"7H\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.straight)\n\n \tmy_hand = Hand([\"10H\", \"6S\", \"9D\", \"8S\", \"7S\", \"7D\", \"7H\", \"7C\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n\n \tmy_hand = Hand([\"10H\", \"6S\", \"9D\", \"8S\", \"7S\", \"7D\", \"7H\", \"8C\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.full_house)\n\n \tmy_hand = Hand([\"10S\", \"9S\", \"8S\", \"5S\", \"6S\", \"10H\", \"6D\", \"9D\", \"8C\", \"7C\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.flush)\n\n \tmy_hand = Hand([\"KH\", \"QH\", \"JH\", \"AH\", \"10H\", \"10S\", \"6S\", \"9S\", \"8S\", \"7S\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.straight_flush)\n \t# It gets the royal flush\n\n \tmy_hand = Hand([\"5S\", \"5H\", \"5D\", \"4S\", \"4H\", \"4D\", \"3D\", \"3S\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.full_house)\n \t# It gets the fours\n\n \tmy_hand = Hand([\"5S\", \"5H\", \"5D\", \"5C\", \"4S\", \"4H\", \"3C\", \"3D\", \"3S\"])\n \tself.assertEqual(my_hand.category, Hand.Categories.four_of_a_kind)\n \t# get the 4 kicker\n\n\n\n def test_cmp(self):\n \tpair_to_high_card = Hand([\"JH\", \"JC\", \"QS\", \"5D\", \"9H\"]) < Hand([\"JH\", \"3C\", \"4S\", \"5C\", \"9H\"])\n \tself.assertEqual(pair_to_high_card, False)\n\n \tstraight_to_flush = Hand([\"10H\", \"6S\", \"9D\", \"8S\", \"7S\"]) < Hand([\"10S\", \"9S\", \"8S\", \"5S\", \"6S\"])\n \tself.assertEqual(straight_to_flush, True)\n\n\n def test_deck_validation(self):\n \t\"\"\"\n \tTest with some hands that are impossible to form with a 52-card deck\n \tFive-of-a-kind\n \tSomething that is both a flush and has a pair (flush wins)\n \tSomething that is both a flush and four-of-a-kind (four-of-a-kind wins)\n \t\"\"\"\n \tpass\n\nif __name__ == '__main__':\n unittest.main()",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
"""Utilties to access a column and one field of a column if the column is composite."""
from typing import TYPE_CHECKING, Optional
from greenplumpython.db import Database
from greenplumpython.expr import Expr
from greenplumpython.type import DataType
if TYPE_CHECKING:
from greenplumpython.dataframe import DataFrame
class ColumnField(Expr):
"""
Inherited from :class:`~expr.Expr`.
Representation of a field of a :class:`~col.Column` of composite type. This
type allows to access to the fields in a dict-like manner.
"""
def __init__(
self,
column: "Column",
field_name: str,
) -> None:
# noqa
""":meta private:"""
self._field_name = field_name
self._column = column
super().__init__(column._dataframe)
def _serialize(self, db: Optional[Database] = None) -> str:
return (
f'({self._column._serialize(db=db)})."{self._field_name}"'
if self._field_name != "*"
else f"({self._column._serialize(db=db)}).*"
)
class Column(Expr):
"""
Inherited from :class:`~expr.Expr`.
Representation of a Python object :class:`~col.Column`.
"""
def __init__(self, name: str, dataframe: "DataFrame") -> None:
# noqa: D400
""":meta private:"""
super().__init__(dataframe=dataframe)
self._name = name
self._type: Optional[DataType] = None # TODO: Add type inference
def _serialize(self, db: Optional[Database] = None) -> str:
assert self._dataframe is not None
# Quote both dataframe name and column name to avoid SQL injection.
return (
f'{self._dataframe._name}."{self._name}"'
if self._name != "*"
else f"{self._dataframe._name}.*"
)
def __getitem__(self, field_name: str) -> ColumnField:
"""
Get access to a field of the current column.
Args:
field_name: str
Returns:
Field of the column with the specified name.
"""
return ColumnField(self, field_name=field_name)
def _bind(
self,
dataframe: Optional["DataFrame"] = None,
db: Optional[Database] = None,
):
# noqa D400
""":meta private:"""
c = Column(
self._name,
self._dataframe,
)
c._db = db if db is not None else dataframe._db if dataframe is not None else self._db
assert c._db is not None
return c
|
normal
|
{
"blob_id": "a52edeec62a6849bda7e5a5481fb6e3d7d9a4c6a",
"index": 8571,
"step-1": "<mask token>\n\n\nclass Column(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a Python object :class:`~col.Column`.\n \"\"\"\n\n def __init__(self, name: str, dataframe: 'DataFrame') ->None:\n \"\"\":meta private:\"\"\"\n super().__init__(dataframe=dataframe)\n self._name = name\n self._type: Optional[DataType] = None\n\n def _serialize(self, db: Optional[Database]=None) ->str:\n assert self._dataframe is not None\n return (f'{self._dataframe._name}.\"{self._name}\"' if self._name !=\n '*' else f'{self._dataframe._name}.*')\n\n def __getitem__(self, field_name: str) ->ColumnField:\n \"\"\"\n Get access to a field of the current column.\n\n Args:\n field_name: str\n\n Returns:\n Field of the column with the specified name.\n \"\"\"\n return ColumnField(self, field_name=field_name)\n\n def _bind(self, dataframe: Optional['DataFrame']=None, db: Optional[\n Database]=None):\n \"\"\":meta private:\"\"\"\n c = Column(self._name, self._dataframe)\n c._db = (db if db is not None else dataframe._db if dataframe is not\n None else self._db)\n assert c._db is not None\n return c\n",
"step-2": "<mask token>\n\n\nclass ColumnField(Expr):\n <mask token>\n\n def __init__(self, column: 'Column', field_name: str) ->None:\n \"\"\":meta private:\"\"\"\n self._field_name = field_name\n self._column = column\n super().__init__(column._dataframe)\n\n def _serialize(self, db: Optional[Database]=None) ->str:\n return (f'({self._column._serialize(db=db)}).\"{self._field_name}\"' if\n self._field_name != '*' else\n f'({self._column._serialize(db=db)}).*')\n\n\nclass Column(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a Python object :class:`~col.Column`.\n \"\"\"\n\n def __init__(self, name: str, dataframe: 'DataFrame') ->None:\n \"\"\":meta private:\"\"\"\n super().__init__(dataframe=dataframe)\n self._name = name\n self._type: Optional[DataType] = None\n\n def _serialize(self, db: Optional[Database]=None) ->str:\n assert self._dataframe is not None\n return (f'{self._dataframe._name}.\"{self._name}\"' if self._name !=\n '*' else f'{self._dataframe._name}.*')\n\n def __getitem__(self, field_name: str) ->ColumnField:\n \"\"\"\n Get access to a field of the current column.\n\n Args:\n field_name: str\n\n Returns:\n Field of the column with the specified name.\n \"\"\"\n return ColumnField(self, field_name=field_name)\n\n def _bind(self, dataframe: Optional['DataFrame']=None, db: Optional[\n Database]=None):\n \"\"\":meta private:\"\"\"\n c = Column(self._name, self._dataframe)\n c._db = (db if db is not None else dataframe._db if dataframe is not\n None else self._db)\n assert c._db is not None\n return c\n",
"step-3": "<mask token>\n\n\nclass ColumnField(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a field of a :class:`~col.Column` of composite type. This\n type allows to access to the fields in a dict-like manner.\n \"\"\"\n\n def __init__(self, column: 'Column', field_name: str) ->None:\n \"\"\":meta private:\"\"\"\n self._field_name = field_name\n self._column = column\n super().__init__(column._dataframe)\n\n def _serialize(self, db: Optional[Database]=None) ->str:\n return (f'({self._column._serialize(db=db)}).\"{self._field_name}\"' if\n self._field_name != '*' else\n f'({self._column._serialize(db=db)}).*')\n\n\nclass Column(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a Python object :class:`~col.Column`.\n \"\"\"\n\n def __init__(self, name: str, dataframe: 'DataFrame') ->None:\n \"\"\":meta private:\"\"\"\n super().__init__(dataframe=dataframe)\n self._name = name\n self._type: Optional[DataType] = None\n\n def _serialize(self, db: Optional[Database]=None) ->str:\n assert self._dataframe is not None\n return (f'{self._dataframe._name}.\"{self._name}\"' if self._name !=\n '*' else f'{self._dataframe._name}.*')\n\n def __getitem__(self, field_name: str) ->ColumnField:\n \"\"\"\n Get access to a field of the current column.\n\n Args:\n field_name: str\n\n Returns:\n Field of the column with the specified name.\n \"\"\"\n return ColumnField(self, field_name=field_name)\n\n def _bind(self, dataframe: Optional['DataFrame']=None, db: Optional[\n Database]=None):\n \"\"\":meta private:\"\"\"\n c = Column(self._name, self._dataframe)\n c._db = (db if db is not None else dataframe._db if dataframe is not\n None else self._db)\n assert c._db is not None\n return c\n",
"step-4": "<mask token>\nfrom typing import TYPE_CHECKING, Optional\nfrom greenplumpython.db import Database\nfrom greenplumpython.expr import Expr\nfrom greenplumpython.type import DataType\nif TYPE_CHECKING:\n from greenplumpython.dataframe import DataFrame\n\n\nclass ColumnField(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a field of a :class:`~col.Column` of composite type. This\n type allows to access to the fields in a dict-like manner.\n \"\"\"\n\n def __init__(self, column: 'Column', field_name: str) ->None:\n \"\"\":meta private:\"\"\"\n self._field_name = field_name\n self._column = column\n super().__init__(column._dataframe)\n\n def _serialize(self, db: Optional[Database]=None) ->str:\n return (f'({self._column._serialize(db=db)}).\"{self._field_name}\"' if\n self._field_name != '*' else\n f'({self._column._serialize(db=db)}).*')\n\n\nclass Column(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a Python object :class:`~col.Column`.\n \"\"\"\n\n def __init__(self, name: str, dataframe: 'DataFrame') ->None:\n \"\"\":meta private:\"\"\"\n super().__init__(dataframe=dataframe)\n self._name = name\n self._type: Optional[DataType] = None\n\n def _serialize(self, db: Optional[Database]=None) ->str:\n assert self._dataframe is not None\n return (f'{self._dataframe._name}.\"{self._name}\"' if self._name !=\n '*' else f'{self._dataframe._name}.*')\n\n def __getitem__(self, field_name: str) ->ColumnField:\n \"\"\"\n Get access to a field of the current column.\n\n Args:\n field_name: str\n\n Returns:\n Field of the column with the specified name.\n \"\"\"\n return ColumnField(self, field_name=field_name)\n\n def _bind(self, dataframe: Optional['DataFrame']=None, db: Optional[\n Database]=None):\n \"\"\":meta private:\"\"\"\n c = Column(self._name, self._dataframe)\n c._db = (db if db is not None else dataframe._db if dataframe is not\n None else self._db)\n assert c._db is not None\n return c\n",
"step-5": "\"\"\"Utilties to access a column and one field of a column if the column is composite.\"\"\"\nfrom typing import TYPE_CHECKING, Optional\n\nfrom greenplumpython.db import Database\nfrom greenplumpython.expr import Expr\nfrom greenplumpython.type import DataType\n\nif TYPE_CHECKING:\n from greenplumpython.dataframe import DataFrame\n\n\nclass ColumnField(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a field of a :class:`~col.Column` of composite type. This\n type allows to access to the fields in a dict-like manner.\n \"\"\"\n\n def __init__(\n self,\n column: \"Column\",\n field_name: str,\n ) -> None:\n # noqa\n \"\"\":meta private:\"\"\"\n self._field_name = field_name\n self._column = column\n super().__init__(column._dataframe)\n\n def _serialize(self, db: Optional[Database] = None) -> str:\n return (\n f'({self._column._serialize(db=db)}).\"{self._field_name}\"'\n if self._field_name != \"*\"\n else f\"({self._column._serialize(db=db)}).*\"\n )\n\n\nclass Column(Expr):\n \"\"\"\n Inherited from :class:`~expr.Expr`.\n\n Representation of a Python object :class:`~col.Column`.\n \"\"\"\n\n def __init__(self, name: str, dataframe: \"DataFrame\") -> None:\n # noqa: D400\n \"\"\":meta private:\"\"\"\n super().__init__(dataframe=dataframe)\n self._name = name\n self._type: Optional[DataType] = None # TODO: Add type inference\n\n def _serialize(self, db: Optional[Database] = None) -> str:\n assert self._dataframe is not None\n # Quote both dataframe name and column name to avoid SQL injection.\n return (\n f'{self._dataframe._name}.\"{self._name}\"'\n if self._name != \"*\"\n else f\"{self._dataframe._name}.*\"\n )\n\n def __getitem__(self, field_name: str) -> ColumnField:\n \"\"\"\n Get access to a field of the current column.\n\n Args:\n field_name: str\n\n Returns:\n Field of the column with the specified name.\n \"\"\"\n return ColumnField(self, field_name=field_name)\n\n def _bind(\n self,\n dataframe: Optional[\"DataFrame\"] = None,\n db: Optional[Database] = None,\n ):\n # noqa D400\n \"\"\":meta private:\"\"\"\n c = Column(\n self._name,\n self._dataframe,\n )\n c._db = db if db is not None else dataframe._db if dataframe is not None else self._db\n assert c._db is not None\n return c\n",
"step-ids": [
6,
9,
10,
12,
13
]
}
|
[
6,
9,
10,
12,
13
] |
import micropython
# viper function taking and returning ints
@micropython.viper
def viper_int(x:int, y:int) -> int:
return x + y + 3
print(viper_int(1, 2))
# viper function taking and returning objects
@micropython.viper
def viper_object(x:object, y:object) -> object:
return x + y
print(viper_object(1, 2))
# a local (should have automatic type int)
@micropython.viper
def viper_local(x:int) -> int:
y = 4
return x + y
print(viper_local(3))
# without type annotation, types should default to object
@micropython.viper
def viper_no_annotation(x, y):
return x * y
print(viper_no_annotation(4, 5))
# a for loop
@micropython.viper
def viper_for(a:int, b:int) -> int:
total = 0
for x in range(a, b):
total += x
return total
print(viper_for(10, 10000))
# accessing a global
@micropython.viper
def viper_access_global():
global gl
gl = 1
return gl
print(viper_access_global(), gl)
# calling print with object and int types
@micropython.viper
def viper_print(x, y:int):
print(x, y + 1)
viper_print(1, 2)
# making a tuple from an object and an int
@micropython.viper
def viper_tuple(x, y:int):
return (x, y + 1)
print(viper_tuple(1, 2))
# making a list from an object and an int
@micropython.viper
def viper_list(x, y:int):
return [x, y + 1]
print(viper_list(1, 2))
# making a set from an object and an int
@micropython.viper
def viper_set(x, y:int):
return {x, y + 1}
print(sorted(list(viper_set(1, 2))))
# raising an exception
@micropython.viper
def viper_raise(x:int):
raise OSError(x)
try:
viper_raise(1)
except OSError as e:
print(repr(e))
# this doesn't work at the moment
#@micropython.viper
#def g() -> uint:
# return -1
# calling GC after defining the function
@micropython.viper
def viper_gc() -> int:
return 1
print(viper_gc())
import gc
gc.collect()
print(viper_gc())
|
normal
|
{
"blob_id": "eec52695e5afcc21e5fed6453e96cc3a58e7c1df",
"index": 101,
"step-1": "<mask token>\n\n\[email protected]\ndef viper_int(x: int, y: int) ->int:\n return x + y + 3\n\n\n<mask token>\n\n\[email protected]\ndef viper_local(x: int) ->int:\n y = 4\n return x + y\n\n\n<mask token>\n\n\[email protected]\ndef viper_no_annotation(x, y):\n return x * y\n\n\n<mask token>\n\n\[email protected]\ndef viper_for(a: int, b: int) ->int:\n total = 0\n for x in range(a, b):\n total += x\n return total\n\n\n<mask token>\n\n\[email protected]\ndef viper_access_global():\n global gl\n gl = 1\n return gl\n\n\n<mask token>\n\n\[email protected]\ndef viper_print(x, y: int):\n print(x, y + 1)\n\n\n<mask token>\n\n\[email protected]\ndef viper_set(x, y: int):\n return {x, y + 1}\n\n\n<mask token>\n\n\[email protected]\ndef viper_raise(x: int):\n raise OSError(x)\n\n\n<mask token>\n\n\[email protected]\ndef viper_gc() ->int:\n return 1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]\ndef viper_int(x: int, y: int) ->int:\n return x + y + 3\n\n\n<mask token>\n\n\[email protected]\ndef viper_local(x: int) ->int:\n y = 4\n return x + y\n\n\n<mask token>\n\n\[email protected]\ndef viper_no_annotation(x, y):\n return x * y\n\n\n<mask token>\n\n\[email protected]\ndef viper_for(a: int, b: int) ->int:\n total = 0\n for x in range(a, b):\n total += x\n return total\n\n\n<mask token>\n\n\[email protected]\ndef viper_access_global():\n global gl\n gl = 1\n return gl\n\n\n<mask token>\n\n\[email protected]\ndef viper_print(x, y: int):\n print(x, y + 1)\n\n\n<mask token>\n\n\[email protected]\ndef viper_tuple(x, y: int):\n return x, y + 1\n\n\n<mask token>\n\n\[email protected]\ndef viper_set(x, y: int):\n return {x, y + 1}\n\n\n<mask token>\n\n\[email protected]\ndef viper_raise(x: int):\n raise OSError(x)\n\n\n<mask token>\n\n\[email protected]\ndef viper_gc() ->int:\n return 1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\[email protected]\ndef viper_int(x: int, y: int) ->int:\n return x + y + 3\n\n\n<mask token>\n\n\[email protected]\ndef viper_object(x: object, y: object) ->object:\n return x + y\n\n\n<mask token>\n\n\[email protected]\ndef viper_local(x: int) ->int:\n y = 4\n return x + y\n\n\n<mask token>\n\n\[email protected]\ndef viper_no_annotation(x, y):\n return x * y\n\n\n<mask token>\n\n\[email protected]\ndef viper_for(a: int, b: int) ->int:\n total = 0\n for x in range(a, b):\n total += x\n return total\n\n\n<mask token>\n\n\[email protected]\ndef viper_access_global():\n global gl\n gl = 1\n return gl\n\n\n<mask token>\n\n\[email protected]\ndef viper_print(x, y: int):\n print(x, y + 1)\n\n\n<mask token>\n\n\[email protected]\ndef viper_tuple(x, y: int):\n return x, y + 1\n\n\n<mask token>\n\n\[email protected]\ndef viper_list(x, y: int):\n return [x, y + 1]\n\n\n<mask token>\n\n\[email protected]\ndef viper_set(x, y: int):\n return {x, y + 1}\n\n\n<mask token>\n\n\[email protected]\ndef viper_raise(x: int):\n raise OSError(x)\n\n\n<mask token>\n\n\[email protected]\ndef viper_gc() ->int:\n return 1\n\n\n<mask token>\n",
"step-4": "import micropython\n\n\[email protected]\ndef viper_int(x: int, y: int) ->int:\n return x + y + 3\n\n\nprint(viper_int(1, 2))\n\n\[email protected]\ndef viper_object(x: object, y: object) ->object:\n return x + y\n\n\nprint(viper_object(1, 2))\n\n\[email protected]\ndef viper_local(x: int) ->int:\n y = 4\n return x + y\n\n\nprint(viper_local(3))\n\n\[email protected]\ndef viper_no_annotation(x, y):\n return x * y\n\n\nprint(viper_no_annotation(4, 5))\n\n\[email protected]\ndef viper_for(a: int, b: int) ->int:\n total = 0\n for x in range(a, b):\n total += x\n return total\n\n\nprint(viper_for(10, 10000))\n\n\[email protected]\ndef viper_access_global():\n global gl\n gl = 1\n return gl\n\n\nprint(viper_access_global(), gl)\n\n\[email protected]\ndef viper_print(x, y: int):\n print(x, y + 1)\n\n\nviper_print(1, 2)\n\n\[email protected]\ndef viper_tuple(x, y: int):\n return x, y + 1\n\n\nprint(viper_tuple(1, 2))\n\n\[email protected]\ndef viper_list(x, y: int):\n return [x, y + 1]\n\n\nprint(viper_list(1, 2))\n\n\[email protected]\ndef viper_set(x, y: int):\n return {x, y + 1}\n\n\nprint(sorted(list(viper_set(1, 2))))\n\n\[email protected]\ndef viper_raise(x: int):\n raise OSError(x)\n\n\ntry:\n viper_raise(1)\nexcept OSError as e:\n print(repr(e))\n\n\[email protected]\ndef viper_gc() ->int:\n return 1\n\n\nprint(viper_gc())\nimport gc\ngc.collect()\nprint(viper_gc())\n",
"step-5": "import micropython\r\n\r\n# viper function taking and returning ints\r\[email protected]\r\ndef viper_int(x:int, y:int) -> int:\r\n return x + y + 3\r\nprint(viper_int(1, 2))\r\n\r\n# viper function taking and returning objects\r\[email protected]\r\ndef viper_object(x:object, y:object) -> object:\r\n return x + y\r\nprint(viper_object(1, 2))\r\n\r\n# a local (should have automatic type int)\r\[email protected]\r\ndef viper_local(x:int) -> int:\r\n y = 4\r\n return x + y\r\nprint(viper_local(3))\r\n\r\n# without type annotation, types should default to object\r\[email protected]\r\ndef viper_no_annotation(x, y):\r\n return x * y\r\nprint(viper_no_annotation(4, 5))\r\n\r\n# a for loop\r\[email protected]\r\ndef viper_for(a:int, b:int) -> int:\r\n total = 0\r\n for x in range(a, b):\r\n total += x\r\n return total\r\nprint(viper_for(10, 10000))\r\n\r\n# accessing a global\r\[email protected]\r\ndef viper_access_global():\r\n global gl\r\n gl = 1\r\n return gl\r\nprint(viper_access_global(), gl)\r\n\r\n# calling print with object and int types\r\[email protected]\r\ndef viper_print(x, y:int):\r\n print(x, y + 1)\r\nviper_print(1, 2)\r\n\r\n# making a tuple from an object and an int\r\[email protected]\r\ndef viper_tuple(x, y:int):\r\n return (x, y + 1)\r\nprint(viper_tuple(1, 2))\r\n\r\n# making a list from an object and an int\r\[email protected]\r\ndef viper_list(x, y:int):\r\n return [x, y + 1]\r\nprint(viper_list(1, 2))\r\n\r\n# making a set from an object and an int\r\[email protected]\r\ndef viper_set(x, y:int):\r\n return {x, y + 1}\r\nprint(sorted(list(viper_set(1, 2))))\r\n\r\n# raising an exception\r\[email protected]\r\ndef viper_raise(x:int):\r\n raise OSError(x)\r\ntry:\r\n viper_raise(1)\r\nexcept OSError as e:\r\n print(repr(e))\r\n\r\n# this doesn't work at the moment\r\n#@micropython.viper\r\n#def g() -> uint:\r\n# return -1\r\n\r\n# calling GC after defining the function\r\[email protected]\r\ndef viper_gc() -> int:\r\n return 1\r\nprint(viper_gc())\r\nimport gc\r\ngc.collect()\r\nprint(viper_gc())\r\n",
"step-ids": [
9,
10,
12,
14,
15
]
}
|
[
9,
10,
12,
14,
15
] |
#!/usr/bin/python
def sumbelow(n):
multiples_of_3 = set(range(0,n,3))
multiples_of_5 = set(range(0,n,5))
return sum(multiples_of_3.union(multiples_of_5))
#one linear:
# return sum(set(range(0,n,3)).union(set(range(0,n,5)))),
# or rather,
# return sum(set(range(0,n,3) + range(0,n,5)))
if __name__ == '__main__':
print sumbelow(1000)
n = 1000
|
normal
|
{
"blob_id": "8dbc0b9b80aae4cb5c4101007afc50ac54f7a7e7",
"index": 5873,
"step-1": "#!/usr/bin/python\n\ndef sumbelow(n):\n multiples_of_3 = set(range(0,n,3))\n multiples_of_5 = set(range(0,n,5))\n return sum(multiples_of_3.union(multiples_of_5))\n\n#one linear:\n# return sum(set(range(0,n,3)).union(set(range(0,n,5)))),\n# or rather,\n# return sum(set(range(0,n,3) + range(0,n,5)))\n\nif __name__ == '__main__':\n print sumbelow(1000)\n n = 1000\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
text = "I love Python Programming"
for word in text.split():
print(word)
|
normal
|
{
"blob_id": "fdc8f9ff9a0e2cd8ad1990948036d9e420fdc074",
"index": 4216,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor word in text.split():\n print(word)\n",
"step-3": "text = 'I love Python Programming'\nfor word in text.split():\n print(word)\n",
"step-4": "text = \"I love Python Programming\"\nfor word in text.split():\n print(word)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding:utf-8 -*-
import sys
from PyQt4 import QtGui,QtCore
import experiment
class Node(QtGui.QGraphicsEllipseItem):
def __init__(self,name):
super(Node, self).__init__()
self.__name = name
def getName(self):
return self.__name
def changeBrush(self, color, style):
b = QtGui.QBrush()
b.setStyle(style)
c = b.color()
c.setRgb(color[0],color[1],color[2])
b.setColor(c)
self.setBrush(b)
class Link(QtGui.QGraphicsLineItem):
def __init__(self,name,link_type):
super(Link, self).__init__()
self.__link_type = link_type
self.__name = name
def getName(self):
return self.__name
def getType(self):
return self.__link_type
def changeType(self,link_type):
self.__link_type = link_type
def changeColor(self,color):
p = QtGui.QPen()
c = p.color()
c.setRgb(color[0],color[1],color[2])
p.setColor(c)
self.setPen(p)
class Text(QtGui.QGraphicsTextItem):
def __init__(self,name,text):
super(Text, self).__init__(text)
self.__name = name
def getName(self):
return self.__name
class GUI(QtGui.QWidget):
def __init__(self):
super(GUI, self).__init__()
self.exp = experiment.Experiments(20,3)
self.matching = self.exp.unidirectional_match()
self.man_rank, self.woman_rank = self.matching.get_avg_rank()
self.man_spouse, self.woman_spouse = self.matching.get_spouse_rank()
self.initUI()
self.showMaximized()
def initUI(self):
self.setWindowTitle(' Stable Matching ')
grid = QtGui.QGridLayout()
step_button = QtGui.QPushButton('STEP',self)
epoch_button = QtGui.QPushButton('EPOCH',self)
end_button = QtGui.QPushButton('END',self)
self.showText = QtGui.QTextEdit(self)
self.showText.setText('START! ')
self.statu_scene = QtGui.QGraphicsScene(self)
self.initScene(self.statu_scene)
self.statu_view = QtGui.QGraphicsView()
self.statu_view.setScene(self.statu_scene)
self.statu_view.setMinimumSize(600,600)
self.statu_view.show()
self.history_scene = QtGui.QGraphicsScene(self)
self.initScene(self.history_scene)
self.history_view = QtGui.QGraphicsView()
self.history_view.setScene(self.history_scene)
self.history_view.setMinimumSize(600,600)
self.history_view.show()
grid.addWidget(step_button,1,1)
grid.addWidget(epoch_button,2,1)
grid.addWidget(end_button,3,1)
grid.addWidget(self.showText,1,2,4,1)
grid.addWidget(self.statu_view,1,3,4,1)
grid.addWidget(self.history_view,1,4,4,1)
self.setLayout(grid)
self.connect(step_button,QtCore.SIGNAL('clicked()'),self.nextStep)
self.connect(epoch_button,QtCore.SIGNAL('clicked()'),self.nextEpoch)
self.connect(end_button,QtCore.SIGNAL('clicked()'),self.exeToEnd)
def initScene(self,scene):
man_num = self.exp.get_man_num()
woman_num = self.exp.get_woman_num()
length = max(man_num,woman_num) * 30
scene.setSceneRect(0,0,600,length)
for i in range(man_num):
node = self.__addNode(scene, 'M_'+str(i),120,i*30,20,20,(0,0,255))
for i in range(woman_num):
node = self.__addNode(scene, 'W_'+str(i),480,i*30,20,20,(255,0,0))
def __addNode(self, scene, name, x, y, w, h, color=(0,0,0)):
sex = name.split('_')[0]
number = name.split('_')[1]
rank_bias = spouse_bias = rank = 0
if sex == 'M':
rank = self.man_rank[int(number)]
rank_bias = -2.0
spouse_bias = -4.0
elif sex == 'W':
rank = self.woman_rank[int(number)]
rank_bias = 2.0
spouse_bias = 4.0
node = Node(name)
node.setRect(x,y,w,h)
node.changeBrush(color,1)
if int(number) < 10:
number = '0' + number
text = QtGui.QGraphicsTextItem (number, node)
text.setPos(x,y)
text.setTextWidth(1.5*w)
font = QtGui.QFont('Times',8)
font.setWeight(99)
text.setFont(font)
rank_text = QtGui.QGraphicsTextItem (str(rank), node)
rank_text.setPos(x + rank_bias*w,y)
rank_text.setTextWidth(2*w)
font = QtGui.QFont('Times',8)
font.setWeight(99)
rank_text.setFont(font)
spouse_text = Text(name+'_S', '-1')
spouse_text.setPos(x + spouse_bias*w,y)
spouse_text.setTextWidth(1.5*w)
font = QtGui.QFont('Times',8)
font.setWeight(99)
spouse_text.setFont(font)
scene.addItem(node)
scene.addItem(spouse_text)
def __addLink(self, scene, name, node1, node2, color = (0,0,0), link_type = ''):
center1 = node1.boundingRect().center()
center2 = node2.boundingRect().center()
name1 = node1.getName().split('_')[1]
name2 = node2.getName().split('_')[1]
link = Link(name1 + '-' + name2, link_type)
link.setLine(center1.x(),center1.y(),center2.x(),center2.y())
link.changeColor(color)
scene.addItem(link)
def __deleteLink(self, scene, name):
link = self.__findItem(name, Link, scene.items())
scene.removeItem(link)
def __changeText(self, scene, name, text):
txt = self.__findItem(name, Text, scene.items())
txt.setPlainText(text)
def __findItem(self, name, _type, items):
for item in items:
if isinstance(item, _type) and name == item.getName():
return item
return False
def __clearLinks(self, scene):
for item in scene.items():
if isinstance(item,Link) and item.getType() != 'marry':
scene.removeItem(item)
def __clearUpLinks(self, scene):
for item in scene.items():
if isinstance(item, Link):
scene.removeItem(item)
def __refreshViewStep(self, info):
record = info.split('\n')
length = len(record)
lineiter = 0
epoch = record[lineiter].strip().split(':')[1]
lineiter += 1
step = record[lineiter].strip().split(':')[1]
lineiter += 1
statu = record[lineiter].strip()
if 'DONE' in statu:
return 0
elif 'is not activity' in statu:
return 1
elif 'is married' in statu:
return 2
couple = statu.replace(' ','').split('target')
man = self.__findItem('M_'+couple[0], Node, self.statu_scene.items())
woman = self.__findItem('W_'+couple[1], Node, self.statu_scene.items())
lineiter += 1
sui_rank = record[lineiter].replace(' ','').split(':')[1]
lineiter += 1
if 'Husband Rank' in record[lineiter]:
husband_rank = record[lineiter].replace(' ','').split(':')[1]
lineiter += 1
if 'Succeed' in record[lineiter]:
self.__addLink(self.statu_scene, couple[0] + '-' + couple[1], man, woman, link_type = 'marry')
self.__addLink(self.history_scene, couple[0] + '-' + couple[1], man, woman, link_type = 'marry')
self.__changeText(self.statu_scene, 'M_' + couple[0] + '_S', str(self.matching.get_spouse_rank(int(couple[0]) + 1)))
self.__changeText(self.statu_scene, 'W_' + couple[1] + '_S', str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))
self.__changeText(self.history_scene, 'M_' + couple[0] + '_S', str(self.matching.get_spouse_rank(int(couple[0]) + 1)))
self.__changeText(self.history_scene, 'W_' + couple[1] + '_S', str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))
lineiter += 1
if lineiter <= length:
if 'threw away' in record[lineiter]:
throwCouple = record[lineiter].replace(' ','').split('threwaway')
node1 = self.__findItem('M_' + throwCouple[1], Node, self.history_scene.items())
node2 = self.__findItem('W_' + throwCouple[0], Node, self.history_scene.items())
self.__addLink(self.history_scene, throwCouple[1] + '-' + throwCouple[0], node1, node2, (0,255,0) , 'break')
self.__deleteLink(self.statu_scene, throwCouple[1] + '-' + throwCouple[0])
self.__changeText(self.statu_scene, 'M_' + throwCouple[1] + '_S', '-1')
self.__changeText(self.history_scene, 'M_' + throwCouple[1] + '_S', '-1')
self.statu_view.update()
self.history_view.update()
elif 'Failed' in record[lineiter]:
self.__addLink(self.history_scene, couple[0] + '-' + couple[1], man, woman, (0,0,255) , 'failed')
self.statu_view.update()
self.history_view.update()
def nextStep(self):
info = self.matching.step()
self.showText.setText(info)
self.__clearLinks(self.statu_scene)
self.__clearUpLinks(self.history_scene)
self.__refreshViewStep(info)
def nextEpoch(self):
info = self.matching.epoch()
self.__clearLinks(self.statu_scene)
self.__clearUpLinks(self.history_scene)
sep = info.split('\n')[0]
records = info.split(sep+'\n')
del records[0]
for record in records:
self.__refreshViewStep(sep+'\n'+record)
self.showText.setText(info)
def exeToEnd(self):
info = self.matching.exe_to_end()
self.__clearLinks(self.statu_scene)
self.__clearUpLinks(self.history_scene)
records = info.split('EPOCH')
del records[0]
for record in records:
self.__refreshViewStep('EPOCH'+record)
self.showText.setText(info)
def closeEvent(self, event):
reply = QtGui.QMessageBox.question(self, 'Message',
'Are you sure to quit?', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
gui = GUI()
gui.show()
sys.exit(app.exec_())
|
normal
|
{
"blob_id": "edbb721784dff81e3e1ab5e0458a4080508807fe",
"index": 4335,
"step-1": "<mask token>\n\n\nclass Text(QtGui.QGraphicsTextItem):\n <mask token>\n\n def getName(self):\n return self.__name\n\n\nclass GUI(QtGui.QWidget):\n\n def __init__(self):\n super(GUI, self).__init__()\n self.exp = experiment.Experiments(20, 3)\n self.matching = self.exp.unidirectional_match()\n self.man_rank, self.woman_rank = self.matching.get_avg_rank()\n self.man_spouse, self.woman_spouse = self.matching.get_spouse_rank()\n self.initUI()\n self.showMaximized()\n\n def initUI(self):\n self.setWindowTitle(' Stable Matching ')\n grid = QtGui.QGridLayout()\n step_button = QtGui.QPushButton('STEP', self)\n epoch_button = QtGui.QPushButton('EPOCH', self)\n end_button = QtGui.QPushButton('END', self)\n self.showText = QtGui.QTextEdit(self)\n self.showText.setText('START! ')\n self.statu_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.statu_scene)\n self.statu_view = QtGui.QGraphicsView()\n self.statu_view.setScene(self.statu_scene)\n self.statu_view.setMinimumSize(600, 600)\n self.statu_view.show()\n self.history_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.history_scene)\n self.history_view = QtGui.QGraphicsView()\n self.history_view.setScene(self.history_scene)\n self.history_view.setMinimumSize(600, 600)\n self.history_view.show()\n grid.addWidget(step_button, 1, 1)\n grid.addWidget(epoch_button, 2, 1)\n grid.addWidget(end_button, 3, 1)\n grid.addWidget(self.showText, 1, 2, 4, 1)\n grid.addWidget(self.statu_view, 1, 3, 4, 1)\n grid.addWidget(self.history_view, 1, 4, 4, 1)\n self.setLayout(grid)\n self.connect(step_button, QtCore.SIGNAL('clicked()'), self.nextStep)\n self.connect(epoch_button, QtCore.SIGNAL('clicked()'), self.nextEpoch)\n self.connect(end_button, QtCore.SIGNAL('clicked()'), self.exeToEnd)\n\n def initScene(self, scene):\n man_num = self.exp.get_man_num()\n woman_num = self.exp.get_woman_num()\n length = max(man_num, woman_num) * 30\n scene.setSceneRect(0, 0, 600, length)\n for i in range(man_num):\n node = self.__addNode(scene, 'M_' + str(i), 120, i * 30, 20, 20,\n (0, 0, 255))\n for i in range(woman_num):\n node = self.__addNode(scene, 'W_' + str(i), 480, i * 30, 20, 20,\n (255, 0, 0))\n\n def __addNode(self, scene, name, x, y, w, h, color=(0, 0, 0)):\n sex = name.split('_')[0]\n number = name.split('_')[1]\n rank_bias = spouse_bias = rank = 0\n if sex == 'M':\n rank = self.man_rank[int(number)]\n rank_bias = -2.0\n spouse_bias = -4.0\n elif sex == 'W':\n rank = self.woman_rank[int(number)]\n rank_bias = 2.0\n spouse_bias = 4.0\n node = Node(name)\n node.setRect(x, y, w, h)\n node.changeBrush(color, 1)\n if int(number) < 10:\n number = '0' + number\n text = QtGui.QGraphicsTextItem(number, node)\n text.setPos(x, y)\n text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n text.setFont(font)\n rank_text = QtGui.QGraphicsTextItem(str(rank), node)\n rank_text.setPos(x + rank_bias * w, y)\n rank_text.setTextWidth(2 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n rank_text.setFont(font)\n spouse_text = Text(name + '_S', '-1')\n spouse_text.setPos(x + spouse_bias * w, y)\n spouse_text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n spouse_text.setFont(font)\n scene.addItem(node)\n scene.addItem(spouse_text)\n\n def __addLink(self, scene, name, node1, node2, color=(0, 0, 0),\n link_type=''):\n center1 = node1.boundingRect().center()\n center2 = node2.boundingRect().center()\n name1 = node1.getName().split('_')[1]\n name2 = node2.getName().split('_')[1]\n link = Link(name1 + '-' + name2, link_type)\n link.setLine(center1.x(), center1.y(), center2.x(), center2.y())\n link.changeColor(color)\n scene.addItem(link)\n\n def __deleteLink(self, scene, name):\n link = self.__findItem(name, Link, scene.items())\n scene.removeItem(link)\n\n def __changeText(self, scene, name, text):\n txt = self.__findItem(name, Text, scene.items())\n txt.setPlainText(text)\n\n def __findItem(self, name, _type, items):\n for item in items:\n if isinstance(item, _type) and name == item.getName():\n return item\n return False\n\n def __clearLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link) and item.getType() != 'marry':\n scene.removeItem(item)\n\n def __clearUpLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link):\n scene.removeItem(item)\n\n def __refreshViewStep(self, info):\n record = info.split('\\n')\n length = len(record)\n lineiter = 0\n epoch = record[lineiter].strip().split(':')[1]\n lineiter += 1\n step = record[lineiter].strip().split(':')[1]\n lineiter += 1\n statu = record[lineiter].strip()\n if 'DONE' in statu:\n return 0\n elif 'is not activity' in statu:\n return 1\n elif 'is married' in statu:\n return 2\n couple = statu.replace(' ', '').split('target')\n man = self.__findItem('M_' + couple[0], Node, self.statu_scene.items())\n woman = self.__findItem('W_' + couple[1], Node, self.statu_scene.\n items())\n lineiter += 1\n sui_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Husband Rank' in record[lineiter]:\n husband_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Succeed' in record[lineiter]:\n self.__addLink(self.statu_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__changeText(self.statu_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.statu_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n self.__changeText(self.history_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.history_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n lineiter += 1\n if lineiter <= length:\n if 'threw away' in record[lineiter]:\n throwCouple = record[lineiter].replace(' ', '').split(\n 'threwaway')\n node1 = self.__findItem('M_' + throwCouple[1], Node,\n self.history_scene.items())\n node2 = self.__findItem('W_' + throwCouple[0], Node,\n self.history_scene.items())\n self.__addLink(self.history_scene, throwCouple[1] + '-' +\n throwCouple[0], node1, node2, (0, 255, 0), 'break')\n self.__deleteLink(self.statu_scene, throwCouple[1] +\n '-' + throwCouple[0])\n self.__changeText(self.statu_scene, 'M_' + throwCouple[\n 1] + '_S', '-1')\n self.__changeText(self.history_scene, 'M_' +\n throwCouple[1] + '_S', '-1')\n self.statu_view.update()\n self.history_view.update()\n elif 'Failed' in record[lineiter]:\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, (0, 0, 255), 'failed')\n self.statu_view.update()\n self.history_view.update()\n\n def nextStep(self):\n info = self.matching.step()\n self.showText.setText(info)\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n self.__refreshViewStep(info)\n\n def nextEpoch(self):\n info = self.matching.epoch()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n sep = info.split('\\n')[0]\n records = info.split(sep + '\\n')\n del records[0]\n for record in records:\n self.__refreshViewStep(sep + '\\n' + record)\n self.showText.setText(info)\n\n def exeToEnd(self):\n info = self.matching.exe_to_end()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n records = info.split('EPOCH')\n del records[0]\n for record in records:\n self.__refreshViewStep('EPOCH' + record)\n self.showText.setText(info)\n\n def closeEvent(self, event):\n reply = QtGui.QMessageBox.question(self, 'Message',\n 'Are you sure to quit?', QtGui.QMessageBox.Yes, QtGui.\n QMessageBox.No)\n if reply == QtGui.QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Link(QtGui.QGraphicsLineItem):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Text(QtGui.QGraphicsTextItem):\n\n def __init__(self, name, text):\n super(Text, self).__init__(text)\n self.__name = name\n\n def getName(self):\n return self.__name\n\n\nclass GUI(QtGui.QWidget):\n\n def __init__(self):\n super(GUI, self).__init__()\n self.exp = experiment.Experiments(20, 3)\n self.matching = self.exp.unidirectional_match()\n self.man_rank, self.woman_rank = self.matching.get_avg_rank()\n self.man_spouse, self.woman_spouse = self.matching.get_spouse_rank()\n self.initUI()\n self.showMaximized()\n\n def initUI(self):\n self.setWindowTitle(' Stable Matching ')\n grid = QtGui.QGridLayout()\n step_button = QtGui.QPushButton('STEP', self)\n epoch_button = QtGui.QPushButton('EPOCH', self)\n end_button = QtGui.QPushButton('END', self)\n self.showText = QtGui.QTextEdit(self)\n self.showText.setText('START! ')\n self.statu_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.statu_scene)\n self.statu_view = QtGui.QGraphicsView()\n self.statu_view.setScene(self.statu_scene)\n self.statu_view.setMinimumSize(600, 600)\n self.statu_view.show()\n self.history_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.history_scene)\n self.history_view = QtGui.QGraphicsView()\n self.history_view.setScene(self.history_scene)\n self.history_view.setMinimumSize(600, 600)\n self.history_view.show()\n grid.addWidget(step_button, 1, 1)\n grid.addWidget(epoch_button, 2, 1)\n grid.addWidget(end_button, 3, 1)\n grid.addWidget(self.showText, 1, 2, 4, 1)\n grid.addWidget(self.statu_view, 1, 3, 4, 1)\n grid.addWidget(self.history_view, 1, 4, 4, 1)\n self.setLayout(grid)\n self.connect(step_button, QtCore.SIGNAL('clicked()'), self.nextStep)\n self.connect(epoch_button, QtCore.SIGNAL('clicked()'), self.nextEpoch)\n self.connect(end_button, QtCore.SIGNAL('clicked()'), self.exeToEnd)\n\n def initScene(self, scene):\n man_num = self.exp.get_man_num()\n woman_num = self.exp.get_woman_num()\n length = max(man_num, woman_num) * 30\n scene.setSceneRect(0, 0, 600, length)\n for i in range(man_num):\n node = self.__addNode(scene, 'M_' + str(i), 120, i * 30, 20, 20,\n (0, 0, 255))\n for i in range(woman_num):\n node = self.__addNode(scene, 'W_' + str(i), 480, i * 30, 20, 20,\n (255, 0, 0))\n\n def __addNode(self, scene, name, x, y, w, h, color=(0, 0, 0)):\n sex = name.split('_')[0]\n number = name.split('_')[1]\n rank_bias = spouse_bias = rank = 0\n if sex == 'M':\n rank = self.man_rank[int(number)]\n rank_bias = -2.0\n spouse_bias = -4.0\n elif sex == 'W':\n rank = self.woman_rank[int(number)]\n rank_bias = 2.0\n spouse_bias = 4.0\n node = Node(name)\n node.setRect(x, y, w, h)\n node.changeBrush(color, 1)\n if int(number) < 10:\n number = '0' + number\n text = QtGui.QGraphicsTextItem(number, node)\n text.setPos(x, y)\n text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n text.setFont(font)\n rank_text = QtGui.QGraphicsTextItem(str(rank), node)\n rank_text.setPos(x + rank_bias * w, y)\n rank_text.setTextWidth(2 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n rank_text.setFont(font)\n spouse_text = Text(name + '_S', '-1')\n spouse_text.setPos(x + spouse_bias * w, y)\n spouse_text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n spouse_text.setFont(font)\n scene.addItem(node)\n scene.addItem(spouse_text)\n\n def __addLink(self, scene, name, node1, node2, color=(0, 0, 0),\n link_type=''):\n center1 = node1.boundingRect().center()\n center2 = node2.boundingRect().center()\n name1 = node1.getName().split('_')[1]\n name2 = node2.getName().split('_')[1]\n link = Link(name1 + '-' + name2, link_type)\n link.setLine(center1.x(), center1.y(), center2.x(), center2.y())\n link.changeColor(color)\n scene.addItem(link)\n\n def __deleteLink(self, scene, name):\n link = self.__findItem(name, Link, scene.items())\n scene.removeItem(link)\n\n def __changeText(self, scene, name, text):\n txt = self.__findItem(name, Text, scene.items())\n txt.setPlainText(text)\n\n def __findItem(self, name, _type, items):\n for item in items:\n if isinstance(item, _type) and name == item.getName():\n return item\n return False\n\n def __clearLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link) and item.getType() != 'marry':\n scene.removeItem(item)\n\n def __clearUpLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link):\n scene.removeItem(item)\n\n def __refreshViewStep(self, info):\n record = info.split('\\n')\n length = len(record)\n lineiter = 0\n epoch = record[lineiter].strip().split(':')[1]\n lineiter += 1\n step = record[lineiter].strip().split(':')[1]\n lineiter += 1\n statu = record[lineiter].strip()\n if 'DONE' in statu:\n return 0\n elif 'is not activity' in statu:\n return 1\n elif 'is married' in statu:\n return 2\n couple = statu.replace(' ', '').split('target')\n man = self.__findItem('M_' + couple[0], Node, self.statu_scene.items())\n woman = self.__findItem('W_' + couple[1], Node, self.statu_scene.\n items())\n lineiter += 1\n sui_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Husband Rank' in record[lineiter]:\n husband_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Succeed' in record[lineiter]:\n self.__addLink(self.statu_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__changeText(self.statu_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.statu_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n self.__changeText(self.history_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.history_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n lineiter += 1\n if lineiter <= length:\n if 'threw away' in record[lineiter]:\n throwCouple = record[lineiter].replace(' ', '').split(\n 'threwaway')\n node1 = self.__findItem('M_' + throwCouple[1], Node,\n self.history_scene.items())\n node2 = self.__findItem('W_' + throwCouple[0], Node,\n self.history_scene.items())\n self.__addLink(self.history_scene, throwCouple[1] + '-' +\n throwCouple[0], node1, node2, (0, 255, 0), 'break')\n self.__deleteLink(self.statu_scene, throwCouple[1] +\n '-' + throwCouple[0])\n self.__changeText(self.statu_scene, 'M_' + throwCouple[\n 1] + '_S', '-1')\n self.__changeText(self.history_scene, 'M_' +\n throwCouple[1] + '_S', '-1')\n self.statu_view.update()\n self.history_view.update()\n elif 'Failed' in record[lineiter]:\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, (0, 0, 255), 'failed')\n self.statu_view.update()\n self.history_view.update()\n\n def nextStep(self):\n info = self.matching.step()\n self.showText.setText(info)\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n self.__refreshViewStep(info)\n\n def nextEpoch(self):\n info = self.matching.epoch()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n sep = info.split('\\n')[0]\n records = info.split(sep + '\\n')\n del records[0]\n for record in records:\n self.__refreshViewStep(sep + '\\n' + record)\n self.showText.setText(info)\n\n def exeToEnd(self):\n info = self.matching.exe_to_end()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n records = info.split('EPOCH')\n del records[0]\n for record in records:\n self.__refreshViewStep('EPOCH' + record)\n self.showText.setText(info)\n\n def closeEvent(self, event):\n reply = QtGui.QMessageBox.question(self, 'Message',\n 'Are you sure to quit?', QtGui.QMessageBox.Yes, QtGui.\n QMessageBox.No)\n if reply == QtGui.QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Link(QtGui.QGraphicsLineItem):\n\n def __init__(self, name, link_type):\n super(Link, self).__init__()\n self.__link_type = link_type\n self.__name = name\n <mask token>\n\n def getType(self):\n return self.__link_type\n\n def changeType(self, link_type):\n self.__link_type = link_type\n <mask token>\n\n\nclass Text(QtGui.QGraphicsTextItem):\n\n def __init__(self, name, text):\n super(Text, self).__init__(text)\n self.__name = name\n\n def getName(self):\n return self.__name\n\n\nclass GUI(QtGui.QWidget):\n\n def __init__(self):\n super(GUI, self).__init__()\n self.exp = experiment.Experiments(20, 3)\n self.matching = self.exp.unidirectional_match()\n self.man_rank, self.woman_rank = self.matching.get_avg_rank()\n self.man_spouse, self.woman_spouse = self.matching.get_spouse_rank()\n self.initUI()\n self.showMaximized()\n\n def initUI(self):\n self.setWindowTitle(' Stable Matching ')\n grid = QtGui.QGridLayout()\n step_button = QtGui.QPushButton('STEP', self)\n epoch_button = QtGui.QPushButton('EPOCH', self)\n end_button = QtGui.QPushButton('END', self)\n self.showText = QtGui.QTextEdit(self)\n self.showText.setText('START! ')\n self.statu_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.statu_scene)\n self.statu_view = QtGui.QGraphicsView()\n self.statu_view.setScene(self.statu_scene)\n self.statu_view.setMinimumSize(600, 600)\n self.statu_view.show()\n self.history_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.history_scene)\n self.history_view = QtGui.QGraphicsView()\n self.history_view.setScene(self.history_scene)\n self.history_view.setMinimumSize(600, 600)\n self.history_view.show()\n grid.addWidget(step_button, 1, 1)\n grid.addWidget(epoch_button, 2, 1)\n grid.addWidget(end_button, 3, 1)\n grid.addWidget(self.showText, 1, 2, 4, 1)\n grid.addWidget(self.statu_view, 1, 3, 4, 1)\n grid.addWidget(self.history_view, 1, 4, 4, 1)\n self.setLayout(grid)\n self.connect(step_button, QtCore.SIGNAL('clicked()'), self.nextStep)\n self.connect(epoch_button, QtCore.SIGNAL('clicked()'), self.nextEpoch)\n self.connect(end_button, QtCore.SIGNAL('clicked()'), self.exeToEnd)\n\n def initScene(self, scene):\n man_num = self.exp.get_man_num()\n woman_num = self.exp.get_woman_num()\n length = max(man_num, woman_num) * 30\n scene.setSceneRect(0, 0, 600, length)\n for i in range(man_num):\n node = self.__addNode(scene, 'M_' + str(i), 120, i * 30, 20, 20,\n (0, 0, 255))\n for i in range(woman_num):\n node = self.__addNode(scene, 'W_' + str(i), 480, i * 30, 20, 20,\n (255, 0, 0))\n\n def __addNode(self, scene, name, x, y, w, h, color=(0, 0, 0)):\n sex = name.split('_')[0]\n number = name.split('_')[1]\n rank_bias = spouse_bias = rank = 0\n if sex == 'M':\n rank = self.man_rank[int(number)]\n rank_bias = -2.0\n spouse_bias = -4.0\n elif sex == 'W':\n rank = self.woman_rank[int(number)]\n rank_bias = 2.0\n spouse_bias = 4.0\n node = Node(name)\n node.setRect(x, y, w, h)\n node.changeBrush(color, 1)\n if int(number) < 10:\n number = '0' + number\n text = QtGui.QGraphicsTextItem(number, node)\n text.setPos(x, y)\n text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n text.setFont(font)\n rank_text = QtGui.QGraphicsTextItem(str(rank), node)\n rank_text.setPos(x + rank_bias * w, y)\n rank_text.setTextWidth(2 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n rank_text.setFont(font)\n spouse_text = Text(name + '_S', '-1')\n spouse_text.setPos(x + spouse_bias * w, y)\n spouse_text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n spouse_text.setFont(font)\n scene.addItem(node)\n scene.addItem(spouse_text)\n\n def __addLink(self, scene, name, node1, node2, color=(0, 0, 0),\n link_type=''):\n center1 = node1.boundingRect().center()\n center2 = node2.boundingRect().center()\n name1 = node1.getName().split('_')[1]\n name2 = node2.getName().split('_')[1]\n link = Link(name1 + '-' + name2, link_type)\n link.setLine(center1.x(), center1.y(), center2.x(), center2.y())\n link.changeColor(color)\n scene.addItem(link)\n\n def __deleteLink(self, scene, name):\n link = self.__findItem(name, Link, scene.items())\n scene.removeItem(link)\n\n def __changeText(self, scene, name, text):\n txt = self.__findItem(name, Text, scene.items())\n txt.setPlainText(text)\n\n def __findItem(self, name, _type, items):\n for item in items:\n if isinstance(item, _type) and name == item.getName():\n return item\n return False\n\n def __clearLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link) and item.getType() != 'marry':\n scene.removeItem(item)\n\n def __clearUpLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link):\n scene.removeItem(item)\n\n def __refreshViewStep(self, info):\n record = info.split('\\n')\n length = len(record)\n lineiter = 0\n epoch = record[lineiter].strip().split(':')[1]\n lineiter += 1\n step = record[lineiter].strip().split(':')[1]\n lineiter += 1\n statu = record[lineiter].strip()\n if 'DONE' in statu:\n return 0\n elif 'is not activity' in statu:\n return 1\n elif 'is married' in statu:\n return 2\n couple = statu.replace(' ', '').split('target')\n man = self.__findItem('M_' + couple[0], Node, self.statu_scene.items())\n woman = self.__findItem('W_' + couple[1], Node, self.statu_scene.\n items())\n lineiter += 1\n sui_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Husband Rank' in record[lineiter]:\n husband_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Succeed' in record[lineiter]:\n self.__addLink(self.statu_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__changeText(self.statu_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.statu_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n self.__changeText(self.history_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.history_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n lineiter += 1\n if lineiter <= length:\n if 'threw away' in record[lineiter]:\n throwCouple = record[lineiter].replace(' ', '').split(\n 'threwaway')\n node1 = self.__findItem('M_' + throwCouple[1], Node,\n self.history_scene.items())\n node2 = self.__findItem('W_' + throwCouple[0], Node,\n self.history_scene.items())\n self.__addLink(self.history_scene, throwCouple[1] + '-' +\n throwCouple[0], node1, node2, (0, 255, 0), 'break')\n self.__deleteLink(self.statu_scene, throwCouple[1] +\n '-' + throwCouple[0])\n self.__changeText(self.statu_scene, 'M_' + throwCouple[\n 1] + '_S', '-1')\n self.__changeText(self.history_scene, 'M_' +\n throwCouple[1] + '_S', '-1')\n self.statu_view.update()\n self.history_view.update()\n elif 'Failed' in record[lineiter]:\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, (0, 0, 255), 'failed')\n self.statu_view.update()\n self.history_view.update()\n\n def nextStep(self):\n info = self.matching.step()\n self.showText.setText(info)\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n self.__refreshViewStep(info)\n\n def nextEpoch(self):\n info = self.matching.epoch()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n sep = info.split('\\n')[0]\n records = info.split(sep + '\\n')\n del records[0]\n for record in records:\n self.__refreshViewStep(sep + '\\n' + record)\n self.showText.setText(info)\n\n def exeToEnd(self):\n info = self.matching.exe_to_end()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n records = info.split('EPOCH')\n del records[0]\n for record in records:\n self.__refreshViewStep('EPOCH' + record)\n self.showText.setText(info)\n\n def closeEvent(self, event):\n reply = QtGui.QMessageBox.question(self, 'Message',\n 'Are you sure to quit?', QtGui.QMessageBox.Yes, QtGui.\n QMessageBox.No)\n if reply == QtGui.QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Link(QtGui.QGraphicsLineItem):\n\n def __init__(self, name, link_type):\n super(Link, self).__init__()\n self.__link_type = link_type\n self.__name = name\n\n def getName(self):\n return self.__name\n\n def getType(self):\n return self.__link_type\n\n def changeType(self, link_type):\n self.__link_type = link_type\n\n def changeColor(self, color):\n p = QtGui.QPen()\n c = p.color()\n c.setRgb(color[0], color[1], color[2])\n p.setColor(c)\n self.setPen(p)\n\n\nclass Text(QtGui.QGraphicsTextItem):\n\n def __init__(self, name, text):\n super(Text, self).__init__(text)\n self.__name = name\n\n def getName(self):\n return self.__name\n\n\nclass GUI(QtGui.QWidget):\n\n def __init__(self):\n super(GUI, self).__init__()\n self.exp = experiment.Experiments(20, 3)\n self.matching = self.exp.unidirectional_match()\n self.man_rank, self.woman_rank = self.matching.get_avg_rank()\n self.man_spouse, self.woman_spouse = self.matching.get_spouse_rank()\n self.initUI()\n self.showMaximized()\n\n def initUI(self):\n self.setWindowTitle(' Stable Matching ')\n grid = QtGui.QGridLayout()\n step_button = QtGui.QPushButton('STEP', self)\n epoch_button = QtGui.QPushButton('EPOCH', self)\n end_button = QtGui.QPushButton('END', self)\n self.showText = QtGui.QTextEdit(self)\n self.showText.setText('START! ')\n self.statu_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.statu_scene)\n self.statu_view = QtGui.QGraphicsView()\n self.statu_view.setScene(self.statu_scene)\n self.statu_view.setMinimumSize(600, 600)\n self.statu_view.show()\n self.history_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.history_scene)\n self.history_view = QtGui.QGraphicsView()\n self.history_view.setScene(self.history_scene)\n self.history_view.setMinimumSize(600, 600)\n self.history_view.show()\n grid.addWidget(step_button, 1, 1)\n grid.addWidget(epoch_button, 2, 1)\n grid.addWidget(end_button, 3, 1)\n grid.addWidget(self.showText, 1, 2, 4, 1)\n grid.addWidget(self.statu_view, 1, 3, 4, 1)\n grid.addWidget(self.history_view, 1, 4, 4, 1)\n self.setLayout(grid)\n self.connect(step_button, QtCore.SIGNAL('clicked()'), self.nextStep)\n self.connect(epoch_button, QtCore.SIGNAL('clicked()'), self.nextEpoch)\n self.connect(end_button, QtCore.SIGNAL('clicked()'), self.exeToEnd)\n\n def initScene(self, scene):\n man_num = self.exp.get_man_num()\n woman_num = self.exp.get_woman_num()\n length = max(man_num, woman_num) * 30\n scene.setSceneRect(0, 0, 600, length)\n for i in range(man_num):\n node = self.__addNode(scene, 'M_' + str(i), 120, i * 30, 20, 20,\n (0, 0, 255))\n for i in range(woman_num):\n node = self.__addNode(scene, 'W_' + str(i), 480, i * 30, 20, 20,\n (255, 0, 0))\n\n def __addNode(self, scene, name, x, y, w, h, color=(0, 0, 0)):\n sex = name.split('_')[0]\n number = name.split('_')[1]\n rank_bias = spouse_bias = rank = 0\n if sex == 'M':\n rank = self.man_rank[int(number)]\n rank_bias = -2.0\n spouse_bias = -4.0\n elif sex == 'W':\n rank = self.woman_rank[int(number)]\n rank_bias = 2.0\n spouse_bias = 4.0\n node = Node(name)\n node.setRect(x, y, w, h)\n node.changeBrush(color, 1)\n if int(number) < 10:\n number = '0' + number\n text = QtGui.QGraphicsTextItem(number, node)\n text.setPos(x, y)\n text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n text.setFont(font)\n rank_text = QtGui.QGraphicsTextItem(str(rank), node)\n rank_text.setPos(x + rank_bias * w, y)\n rank_text.setTextWidth(2 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n rank_text.setFont(font)\n spouse_text = Text(name + '_S', '-1')\n spouse_text.setPos(x + spouse_bias * w, y)\n spouse_text.setTextWidth(1.5 * w)\n font = QtGui.QFont('Times', 8)\n font.setWeight(99)\n spouse_text.setFont(font)\n scene.addItem(node)\n scene.addItem(spouse_text)\n\n def __addLink(self, scene, name, node1, node2, color=(0, 0, 0),\n link_type=''):\n center1 = node1.boundingRect().center()\n center2 = node2.boundingRect().center()\n name1 = node1.getName().split('_')[1]\n name2 = node2.getName().split('_')[1]\n link = Link(name1 + '-' + name2, link_type)\n link.setLine(center1.x(), center1.y(), center2.x(), center2.y())\n link.changeColor(color)\n scene.addItem(link)\n\n def __deleteLink(self, scene, name):\n link = self.__findItem(name, Link, scene.items())\n scene.removeItem(link)\n\n def __changeText(self, scene, name, text):\n txt = self.__findItem(name, Text, scene.items())\n txt.setPlainText(text)\n\n def __findItem(self, name, _type, items):\n for item in items:\n if isinstance(item, _type) and name == item.getName():\n return item\n return False\n\n def __clearLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link) and item.getType() != 'marry':\n scene.removeItem(item)\n\n def __clearUpLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link):\n scene.removeItem(item)\n\n def __refreshViewStep(self, info):\n record = info.split('\\n')\n length = len(record)\n lineiter = 0\n epoch = record[lineiter].strip().split(':')[1]\n lineiter += 1\n step = record[lineiter].strip().split(':')[1]\n lineiter += 1\n statu = record[lineiter].strip()\n if 'DONE' in statu:\n return 0\n elif 'is not activity' in statu:\n return 1\n elif 'is married' in statu:\n return 2\n couple = statu.replace(' ', '').split('target')\n man = self.__findItem('M_' + couple[0], Node, self.statu_scene.items())\n woman = self.__findItem('W_' + couple[1], Node, self.statu_scene.\n items())\n lineiter += 1\n sui_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Husband Rank' in record[lineiter]:\n husband_rank = record[lineiter].replace(' ', '').split(':')[1]\n lineiter += 1\n if 'Succeed' in record[lineiter]:\n self.__addLink(self.statu_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, link_type='marry')\n self.__changeText(self.statu_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.statu_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n self.__changeText(self.history_scene, 'M_' + couple[0] + '_S',\n str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.history_scene, 'W_' + couple[1] + '_S',\n str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n lineiter += 1\n if lineiter <= length:\n if 'threw away' in record[lineiter]:\n throwCouple = record[lineiter].replace(' ', '').split(\n 'threwaway')\n node1 = self.__findItem('M_' + throwCouple[1], Node,\n self.history_scene.items())\n node2 = self.__findItem('W_' + throwCouple[0], Node,\n self.history_scene.items())\n self.__addLink(self.history_scene, throwCouple[1] + '-' +\n throwCouple[0], node1, node2, (0, 255, 0), 'break')\n self.__deleteLink(self.statu_scene, throwCouple[1] +\n '-' + throwCouple[0])\n self.__changeText(self.statu_scene, 'M_' + throwCouple[\n 1] + '_S', '-1')\n self.__changeText(self.history_scene, 'M_' +\n throwCouple[1] + '_S', '-1')\n self.statu_view.update()\n self.history_view.update()\n elif 'Failed' in record[lineiter]:\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1],\n man, woman, (0, 0, 255), 'failed')\n self.statu_view.update()\n self.history_view.update()\n\n def nextStep(self):\n info = self.matching.step()\n self.showText.setText(info)\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n self.__refreshViewStep(info)\n\n def nextEpoch(self):\n info = self.matching.epoch()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n sep = info.split('\\n')[0]\n records = info.split(sep + '\\n')\n del records[0]\n for record in records:\n self.__refreshViewStep(sep + '\\n' + record)\n self.showText.setText(info)\n\n def exeToEnd(self):\n info = self.matching.exe_to_end()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n records = info.split('EPOCH')\n del records[0]\n for record in records:\n self.__refreshViewStep('EPOCH' + record)\n self.showText.setText(info)\n\n def closeEvent(self, event):\n reply = QtGui.QMessageBox.question(self, 'Message',\n 'Are you sure to quit?', QtGui.QMessageBox.Yes, QtGui.\n QMessageBox.No)\n if reply == QtGui.QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\n\n<mask token>\n",
"step-5": "# -*- coding:utf-8 -*- \nimport sys\nfrom PyQt4 import QtGui,QtCore\nimport experiment\n\nclass Node(QtGui.QGraphicsEllipseItem):\n def __init__(self,name):\n super(Node, self).__init__()\n self.__name = name\n \n def getName(self):\n return self.__name\n def changeBrush(self, color, style):\n b = QtGui.QBrush()\n b.setStyle(style)\n c = b.color()\n c.setRgb(color[0],color[1],color[2])\n b.setColor(c)\n self.setBrush(b)\n\nclass Link(QtGui.QGraphicsLineItem):\n def __init__(self,name,link_type):\n super(Link, self).__init__()\n self.__link_type = link_type\n self.__name = name\n def getName(self):\n return self.__name\n def getType(self):\n return self.__link_type\n def changeType(self,link_type):\n self.__link_type = link_type\n def changeColor(self,color):\n p = QtGui.QPen()\n c = p.color()\n c.setRgb(color[0],color[1],color[2])\n p.setColor(c)\n self.setPen(p)\n\nclass Text(QtGui.QGraphicsTextItem):\n def __init__(self,name,text):\n super(Text, self).__init__(text)\n self.__name = name\n def getName(self):\n return self.__name\n \nclass GUI(QtGui.QWidget):\n\n def __init__(self):\n super(GUI, self).__init__()\n self.exp = experiment.Experiments(20,3)\n self.matching = self.exp.unidirectional_match()\n self.man_rank, self.woman_rank = self.matching.get_avg_rank()\n self.man_spouse, self.woman_spouse = self.matching.get_spouse_rank()\n self.initUI()\n self.showMaximized()\n \n def initUI(self):\n self.setWindowTitle(' Stable Matching ')\n grid = QtGui.QGridLayout()\n step_button = QtGui.QPushButton('STEP',self)\n epoch_button = QtGui.QPushButton('EPOCH',self)\n end_button = QtGui.QPushButton('END',self)\n self.showText = QtGui.QTextEdit(self)\n self.showText.setText('START! ')\n\n self.statu_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.statu_scene)\n self.statu_view = QtGui.QGraphicsView()\n self.statu_view.setScene(self.statu_scene) \n self.statu_view.setMinimumSize(600,600)\n self.statu_view.show()\n \n self.history_scene = QtGui.QGraphicsScene(self)\n self.initScene(self.history_scene)\n self.history_view = QtGui.QGraphicsView()\n self.history_view.setScene(self.history_scene)\n self.history_view.setMinimumSize(600,600)\n self.history_view.show()\n \n grid.addWidget(step_button,1,1)\n grid.addWidget(epoch_button,2,1)\n grid.addWidget(end_button,3,1)\n grid.addWidget(self.showText,1,2,4,1)\n grid.addWidget(self.statu_view,1,3,4,1)\n grid.addWidget(self.history_view,1,4,4,1)\n self.setLayout(grid)\n \n self.connect(step_button,QtCore.SIGNAL('clicked()'),self.nextStep)\n self.connect(epoch_button,QtCore.SIGNAL('clicked()'),self.nextEpoch)\n self.connect(end_button,QtCore.SIGNAL('clicked()'),self.exeToEnd)\n\n def initScene(self,scene):\n man_num = self.exp.get_man_num()\n woman_num = self.exp.get_woman_num()\n length = max(man_num,woman_num) * 30\n scene.setSceneRect(0,0,600,length)\n for i in range(man_num):\n node = self.__addNode(scene, 'M_'+str(i),120,i*30,20,20,(0,0,255))\n \n for i in range(woman_num):\n node = self.__addNode(scene, 'W_'+str(i),480,i*30,20,20,(255,0,0))\n \n def __addNode(self, scene, name, x, y, w, h, color=(0,0,0)):\n sex = name.split('_')[0]\n number = name.split('_')[1]\n rank_bias = spouse_bias = rank = 0\n if sex == 'M':\n rank = self.man_rank[int(number)]\n rank_bias = -2.0\n spouse_bias = -4.0\n elif sex == 'W':\n rank = self.woman_rank[int(number)]\n rank_bias = 2.0\n spouse_bias = 4.0\n node = Node(name)\n node.setRect(x,y,w,h)\n node.changeBrush(color,1)\n if int(number) < 10:\n number = '0' + number\n \n text = QtGui.QGraphicsTextItem (number, node)\n text.setPos(x,y)\n text.setTextWidth(1.5*w)\n font = QtGui.QFont('Times',8)\n font.setWeight(99)\n text.setFont(font)\n\n rank_text = QtGui.QGraphicsTextItem (str(rank), node)\n rank_text.setPos(x + rank_bias*w,y)\n rank_text.setTextWidth(2*w)\n font = QtGui.QFont('Times',8)\n font.setWeight(99)\n rank_text.setFont(font)\n\n spouse_text = Text(name+'_S', '-1')\n spouse_text.setPos(x + spouse_bias*w,y)\n spouse_text.setTextWidth(1.5*w)\n font = QtGui.QFont('Times',8)\n font.setWeight(99)\n spouse_text.setFont(font)\n \n scene.addItem(node)\n scene.addItem(spouse_text)\n\n def __addLink(self, scene, name, node1, node2, color = (0,0,0), link_type = ''):\n center1 = node1.boundingRect().center()\n center2 = node2.boundingRect().center()\n name1 = node1.getName().split('_')[1]\n name2 = node2.getName().split('_')[1]\n link = Link(name1 + '-' + name2, link_type)\n link.setLine(center1.x(),center1.y(),center2.x(),center2.y())\n link.changeColor(color)\n scene.addItem(link)\n \n def __deleteLink(self, scene, name):\n link = self.__findItem(name, Link, scene.items())\n scene.removeItem(link)\n\n def __changeText(self, scene, name, text):\n txt = self.__findItem(name, Text, scene.items())\n txt.setPlainText(text)\n \n def __findItem(self, name, _type, items):\n for item in items:\n if isinstance(item, _type) and name == item.getName():\n return item\n return False\n\n def __clearLinks(self, scene):\n for item in scene.items():\n if isinstance(item,Link) and item.getType() != 'marry':\n scene.removeItem(item)\n\n def __clearUpLinks(self, scene):\n for item in scene.items():\n if isinstance(item, Link):\n scene.removeItem(item)\n\n def __refreshViewStep(self, info):\n record = info.split('\\n')\n length = len(record)\n lineiter = 0\n epoch = record[lineiter].strip().split(':')[1]\n lineiter += 1\n step = record[lineiter].strip().split(':')[1]\n lineiter += 1\n statu = record[lineiter].strip()\n if 'DONE' in statu:\n return 0\n elif 'is not activity' in statu:\n return 1\n elif 'is married' in statu:\n return 2 \n couple = statu.replace(' ','').split('target')\n man = self.__findItem('M_'+couple[0], Node, self.statu_scene.items())\n woman = self.__findItem('W_'+couple[1], Node, self.statu_scene.items())\n lineiter += 1\n sui_rank = record[lineiter].replace(' ','').split(':')[1]\n lineiter += 1\n if 'Husband Rank' in record[lineiter]:\n husband_rank = record[lineiter].replace(' ','').split(':')[1]\n lineiter += 1\n if 'Succeed' in record[lineiter]:\n self.__addLink(self.statu_scene, couple[0] + '-' + couple[1], man, woman, link_type = 'marry')\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1], man, woman, link_type = 'marry')\n self.__changeText(self.statu_scene, 'M_' + couple[0] + '_S', str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.statu_scene, 'W_' + couple[1] + '_S', str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n self.__changeText(self.history_scene, 'M_' + couple[0] + '_S', str(self.matching.get_spouse_rank(int(couple[0]) + 1)))\n self.__changeText(self.history_scene, 'W_' + couple[1] + '_S', str(self.matching.get_spouse_rank(-(int(couple[1]) + 1))))\n lineiter += 1\n if lineiter <= length:\n if 'threw away' in record[lineiter]:\n throwCouple = record[lineiter].replace(' ','').split('threwaway')\n node1 = self.__findItem('M_' + throwCouple[1], Node, self.history_scene.items())\n node2 = self.__findItem('W_' + throwCouple[0], Node, self.history_scene.items())\n self.__addLink(self.history_scene, throwCouple[1] + '-' + throwCouple[0], node1, node2, (0,255,0) , 'break')\n self.__deleteLink(self.statu_scene, throwCouple[1] + '-' + throwCouple[0])\n self.__changeText(self.statu_scene, 'M_' + throwCouple[1] + '_S', '-1')\n self.__changeText(self.history_scene, 'M_' + throwCouple[1] + '_S', '-1')\n self.statu_view.update()\n self.history_view.update()\n elif 'Failed' in record[lineiter]:\n self.__addLink(self.history_scene, couple[0] + '-' + couple[1], man, woman, (0,0,255) , 'failed')\n self.statu_view.update()\n self.history_view.update()\n \n def nextStep(self):\n info = self.matching.step()\n self.showText.setText(info)\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n self.__refreshViewStep(info) \n\n def nextEpoch(self):\n info = self.matching.epoch()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n sep = info.split('\\n')[0]\n records = info.split(sep+'\\n')\n del records[0]\n for record in records:\n self.__refreshViewStep(sep+'\\n'+record) \n self.showText.setText(info)\n\n def exeToEnd(self):\n info = self.matching.exe_to_end()\n self.__clearLinks(self.statu_scene)\n self.__clearUpLinks(self.history_scene)\n records = info.split('EPOCH')\n del records[0]\n for record in records:\n self.__refreshViewStep('EPOCH'+record)\n \n self.showText.setText(info)\n \n def closeEvent(self, event):\n reply = QtGui.QMessageBox.question(self, 'Message',\n 'Are you sure to quit?', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)\n if reply == QtGui.QMessageBox.Yes:\n event.accept()\n else:\n event.ignore()\n\nif __name__ == '__main__':\n app = QtGui.QApplication(sys.argv)\n gui = GUI()\n gui.show()\n sys.exit(app.exec_())\n",
"step-ids": [
18,
20,
23,
25,
32
]
}
|
[
18,
20,
23,
25,
32
] |
from random import randint
#given a list of names, cities and neigborhoods, generate a client table.
#------------------------MODEL------------------------------
#([cliente_id], [nome], [sexo], [telefone], [cpf], [cidade_nome], [cidade_bairro_nome], [cidade_bairro_cep])
class Employee:
def __init__(self, id, name ,sex, phone, cpf, payment):
self.id = id
self.name = name
self.sex = sex
self.phone = phone
self.cpf = cpf
self.payment = payment
def __str__(self):
return '(' + self.name + '-' + self.cpf + ')'
def insertStmnt(self):
return ("INSERT INTO [dbo].[Funcionario] ([funcionario_id], [funcionario_nome], [sexo], [telefone], [cpf], [salario])"+
" VALUES ({!s},'{}','{}','{}','{}',CAST('${:.2f}' AS MONEY));").format(self.id, self.name, self.sex, self.phone, self.cpf, self.payment)
@staticmethod
def employee_from_insert_stmnt(stmnt):
if 'INSERT' not in stmnt:
return None
values = stmnt.find("VALUES")
first_arg = stmnt.find('(', values)
end = stmnt.find(')', values)
args = stmnt[first_arg+1:end].split(',')
for idx, val in enumerate(args):
args[idx] = val.strip('\'')
return Employee(*args)
#-----------------AUXILIARY FUNCTIONS-----------------------
def load_employees(file_path):
"""Given a file path to a sql file with employee insert statements, return me the python list of corresponding Employee instances"""
employees = []
for line in open(file_path):
employee = Employee.employee_from_insert_stmnt(line)
if employee:
employees.append(employee)
return employees
#-----------------------GENERATOR---------------------------
def generate_employees(maleNames, femaleNames, amount):
n_maleNames = len(maleNames)
n_femaleNames = len(femaleNames)
maleNames = maleNames[:amount]
femaleNames = femaleNames[:amount]
employees = []
employee_id = 1
sexes = ['M', 'F']
phone_id = 0
cpf_id = 0
payment = 2000.00
for name in maleNames:
sex = sexes[0]
phone = str(phone_id).zfill(8)
phone = phone[:4] + '-' + phone[4:]
cpf = str(cpf_id).zfill(11)
cpf = cpf[:3] + '.' + cpf[3:6] + '.' + cpf[6:9] + '-' + cpf[9:]
singlePayment = payment + randint(100, 1100)
employee = Employee(employee_id, name, sex, phone, cpf, singlePayment)
employees.append(employee)
employee_id += 1
phone_id += 1
cpf_id += 1
for name in femaleNames:
sex = sexes[1]
phone = str(phone_id).zfill(8)
phone = phone[:4] + '-' + phone[4:]
cpf = str(cpf_id).zfill(11)
cpf = cpf[:3] + '.' + cpf[3:6] + '.' + cpf[6:9] + '-' + cpf[9:]
singlePayment = payment + randint(100, 1100)
employee = Employee(employee_id, name, sex, phone, cpf, singlePayment)
employees.append(employee)
employee_id += 1
phone_id += 1
cpf_id += 1
return employees
#-------------------------ACTUAL SCRIPT-----------------------
def main():
#----------------------INPUT--------------------------------
maleNames = [line.rstrip('\n') for line in open('employeeMaleNames.txt')]
femaleNames = [line.rstrip('\n') for line in open('employeeFemaleNames.txt')]
cities = [line.rstrip('\n') for line in open('cities.txt')]
neighborhoods = [line.rstrip('\n') for line in open('neighborhoods.txt')]
#--------------------EXECUTION--------------------------
employees = generate_employees(maleNames, femaleNames, 15)
#--------------------------OUTPUT-------------------------------
count = {}
sql_employees_file = open("employees.sql", "w")
sql_employees_file.write("USE [lolbibis]\nGO\n\n")
for employee in employees:
#print person
#print person.insertStmnt()
sql_employees_file.write(employee.insertStmnt() + "\n")
sql_employees_file.write('GO\n\n')
sql_employees_file.close()
#------------------STATS REPORTING----------------------
for key in count:
n_maleNames = len(maleNames)
n_femaleNames = len(femaleNames)
print key+':' + str(count[key]) + ' - ' + str((count[key]/(n_names+0.0)))
|
normal
|
{
"blob_id": "a9ce341ffe26ab6c476237030e23e6ae57b8fa33",
"index": 7560,
"step-1": "from random import randint\n\n#given a list of names, cities and neigborhoods, generate a client table.\n\n#------------------------MODEL------------------------------\n#([cliente_id], [nome], [sexo], [telefone], [cpf], [cidade_nome], [cidade_bairro_nome], [cidade_bairro_cep])\nclass Employee:\n\tdef __init__(self, id, name ,sex, phone, cpf, payment):\n\t\tself.id = id\n\t\tself.name = name\n\t\tself.sex = sex\n\t\tself.phone = phone\n\t\tself.cpf = cpf\n\t\tself.payment = payment\n\n\tdef __str__(self):\n\t\treturn '(' + self.name + '-' + self.cpf + ')'\n\n\tdef insertStmnt(self):\n\t\treturn (\"INSERT INTO [dbo].[Funcionario] ([funcionario_id], [funcionario_nome], [sexo], [telefone], [cpf], [salario])\"+\n\t\t\t\" VALUES ({!s},'{}','{}','{}','{}',CAST('${:.2f}' AS MONEY));\").format(self.id, self.name, self.sex, self.phone, self.cpf, self.payment)\n\n\t\n\t@staticmethod\n\tdef employee_from_insert_stmnt(stmnt):\n\t\tif 'INSERT' not in stmnt:\n\t\t\treturn None\n\t\tvalues = stmnt.find(\"VALUES\")\n\t\tfirst_arg = stmnt.find('(', values)\n\t\tend = stmnt.find(')', values)\n\t\targs = stmnt[first_arg+1:end].split(',')\n\t\tfor idx, val in enumerate(args):\n\t\t\targs[idx] = val.strip('\\'')\n\t\treturn Employee(*args)\n\n\n\n#-----------------AUXILIARY FUNCTIONS-----------------------\n\ndef load_employees(file_path):\n\t\"\"\"Given a file path to a sql file with employee insert statements, return me the python list of corresponding Employee instances\"\"\"\n\temployees = []\n\tfor line in open(file_path):\n\t\temployee = Employee.employee_from_insert_stmnt(line)\n\t\tif employee:\n\t\t\temployees.append(employee)\n\treturn employees\n\n#-----------------------GENERATOR---------------------------\ndef generate_employees(maleNames, femaleNames, amount):\n\tn_maleNames = len(maleNames)\n\tn_femaleNames = len(femaleNames)\n\n\tmaleNames = maleNames[:amount]\n\tfemaleNames = femaleNames[:amount]\n\n\temployees = []\n\temployee_id = 1\n\tsexes = ['M', 'F']\n\tphone_id = 0\n\tcpf_id = 0\n\tpayment = 2000.00\n\n\tfor name in maleNames:\n\t\tsex = sexes[0]\n\n\t\tphone = str(phone_id).zfill(8)\n\t\tphone = phone[:4] + '-' + phone[4:]\n\n\t\tcpf = str(cpf_id).zfill(11)\n\t\tcpf = cpf[:3] + '.' + cpf[3:6] + '.' + cpf[6:9] + '-' + cpf[9:]\n\n\t\tsinglePayment = payment + randint(100, 1100)\n\n\t\temployee = Employee(employee_id, name, sex, phone, cpf, singlePayment)\n\t\temployees.append(employee)\n\n\t\temployee_id += 1\n\t\tphone_id += 1\n\t\tcpf_id += 1\n\n\tfor name in femaleNames:\n\t\tsex = sexes[1]\n\n\t\tphone = str(phone_id).zfill(8)\n\t\tphone = phone[:4] + '-' + phone[4:]\n\n\t\tcpf = str(cpf_id).zfill(11)\n\t\tcpf = cpf[:3] + '.' + cpf[3:6] + '.' + cpf[6:9] + '-' + cpf[9:]\n\n\t\tsinglePayment = payment + randint(100, 1100)\n\n\t\temployee = Employee(employee_id, name, sex, phone, cpf, singlePayment)\n\t\temployees.append(employee)\n\n\t\temployee_id += 1\n\t\tphone_id += 1\n\t\tcpf_id += 1\n\n\treturn employees\n\n\n#-------------------------ACTUAL SCRIPT-----------------------\n\ndef main():\n\n\t#----------------------INPUT--------------------------------\n\t\n\tmaleNames = [line.rstrip('\\n') for line in open('employeeMaleNames.txt')]\n\tfemaleNames = [line.rstrip('\\n') for line in open('employeeFemaleNames.txt')]\n\tcities = [line.rstrip('\\n') for line in open('cities.txt')]\n\tneighborhoods = [line.rstrip('\\n') for line in open('neighborhoods.txt')]\n\n\t#--------------------EXECUTION--------------------------\n\temployees = generate_employees(maleNames, femaleNames, 15)\n\n\t#--------------------------OUTPUT-------------------------------\n\tcount = {}\n\tsql_employees_file = open(\"employees.sql\", \"w\")\n\tsql_employees_file.write(\"USE [lolbibis]\\nGO\\n\\n\")\n\tfor employee in employees:\n\t\t#print person\n\t\t#print person.insertStmnt()\t\n\t\tsql_employees_file.write(employee.insertStmnt() + \"\\n\")\n\tsql_employees_file.write('GO\\n\\n')\n\tsql_employees_file.close()\n\n\n\t#------------------STATS REPORTING----------------------\n\tfor key in count:\n\t\tn_maleNames = len(maleNames)\n\t\tn_femaleNames = len(femaleNames)\n\t\tprint key+':' + str(count[key]) + ' - ' + str((count[key]/(n_names+0.0)))\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
class Day8MemoryManeuver:
def __init__(self, use_reference_count=False):
"""
Args:
use_reference_count (bool):
True: If an entry has child nodes, the meta data are referring to the results of
the child node
False: Sum all meta data up
"""
self._use_child_references = use_reference_count
def solve(self, license_input):
_, result = self._solve(license_input.split(" "), 0)
return result
def _solve(self, structure, pos):
if pos >= len(structure):
return pos, 0
child_node_count = int(structure[pos])
pos += 1
meta_count = int(structure[pos])
result = 0
child_results = []
for i in range(child_node_count):
pos += 1
pos, tmp = self._solve(structure, pos)
if not self._use_child_references:
result += tmp
child_results.append(tmp)
if meta_count > 0:
for i in range(pos, pos + meta_count):
current = int(structure[i + 1])
if self._use_child_references and child_node_count > 0:
if current <= len(child_results):
result += child_results[current - 1]
else:
result += current
pos += 1
return pos, result
|
normal
|
{
"blob_id": "84d096a51fa052ee210e975ab61c0cbbf05bc5ae",
"index": 8358,
"step-1": "class Day8MemoryManeuver:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class Day8MemoryManeuver:\n <mask token>\n <mask token>\n\n def _solve(self, structure, pos):\n if pos >= len(structure):\n return pos, 0\n child_node_count = int(structure[pos])\n pos += 1\n meta_count = int(structure[pos])\n result = 0\n child_results = []\n for i in range(child_node_count):\n pos += 1\n pos, tmp = self._solve(structure, pos)\n if not self._use_child_references:\n result += tmp\n child_results.append(tmp)\n if meta_count > 0:\n for i in range(pos, pos + meta_count):\n current = int(structure[i + 1])\n if self._use_child_references and child_node_count > 0:\n if current <= len(child_results):\n result += child_results[current - 1]\n else:\n result += current\n pos += 1\n return pos, result\n",
"step-3": "class Day8MemoryManeuver:\n <mask token>\n\n def solve(self, license_input):\n _, result = self._solve(license_input.split(' '), 0)\n return result\n\n def _solve(self, structure, pos):\n if pos >= len(structure):\n return pos, 0\n child_node_count = int(structure[pos])\n pos += 1\n meta_count = int(structure[pos])\n result = 0\n child_results = []\n for i in range(child_node_count):\n pos += 1\n pos, tmp = self._solve(structure, pos)\n if not self._use_child_references:\n result += tmp\n child_results.append(tmp)\n if meta_count > 0:\n for i in range(pos, pos + meta_count):\n current = int(structure[i + 1])\n if self._use_child_references and child_node_count > 0:\n if current <= len(child_results):\n result += child_results[current - 1]\n else:\n result += current\n pos += 1\n return pos, result\n",
"step-4": "class Day8MemoryManeuver:\n\n def __init__(self, use_reference_count=False):\n \"\"\"\n Args:\n use_reference_count (bool):\n True: If an entry has child nodes, the meta data are referring to the results of\n the child node\n False: Sum all meta data up\n \"\"\"\n self._use_child_references = use_reference_count\n\n def solve(self, license_input):\n _, result = self._solve(license_input.split(' '), 0)\n return result\n\n def _solve(self, structure, pos):\n if pos >= len(structure):\n return pos, 0\n child_node_count = int(structure[pos])\n pos += 1\n meta_count = int(structure[pos])\n result = 0\n child_results = []\n for i in range(child_node_count):\n pos += 1\n pos, tmp = self._solve(structure, pos)\n if not self._use_child_references:\n result += tmp\n child_results.append(tmp)\n if meta_count > 0:\n for i in range(pos, pos + meta_count):\n current = int(structure[i + 1])\n if self._use_child_references and child_node_count > 0:\n if current <= len(child_results):\n result += child_results[current - 1]\n else:\n result += current\n pos += 1\n return pos, result\n",
"step-5": "class Day8MemoryManeuver:\n def __init__(self, use_reference_count=False):\n \"\"\"\n Args:\n use_reference_count (bool):\n True: If an entry has child nodes, the meta data are referring to the results of\n the child node\n False: Sum all meta data up\n \"\"\"\n self._use_child_references = use_reference_count\n\n def solve(self, license_input):\n _, result = self._solve(license_input.split(\" \"), 0)\n return result\n\n def _solve(self, structure, pos):\n if pos >= len(structure):\n return pos, 0\n child_node_count = int(structure[pos])\n pos += 1\n meta_count = int(structure[pos])\n result = 0\n child_results = []\n for i in range(child_node_count):\n pos += 1\n pos, tmp = self._solve(structure, pos)\n if not self._use_child_references:\n result += tmp\n child_results.append(tmp)\n if meta_count > 0:\n for i in range(pos, pos + meta_count):\n current = int(structure[i + 1])\n if self._use_child_references and child_node_count > 0:\n if current <= len(child_results):\n result += child_results[current - 1]\n else:\n result += current\n pos += 1\n return pos, result\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from pandas_datareader import data as pdr
from datetime import date
class YahooHelper:
"""
Class to fetch Yahoo data
"""
def __init__(self):
"""
Default constructor which initiates object
"""
pass
def get_data(self, symbol):
"""
Function to collect Twitter data.
:param symbol: The Symbol used to identify
an NASDAQ-100 stock.
"""
# Collect stock market data
self.data = self.get_stock_data(symbol)
# Symbol lookup:
def get_stock_data(symbol):
"""
Function to get stock data for current year by ticker symbol.
:param symbol: The Symbol used to identify
an NASDAQ-100 stock.
:return: Stock data for current year
"""
# Set current dates
start = date(date.today().year, 1, 1) # first of current year
end = date.today() # today
# Get yahoo Yahoo data
data = pdr.get_data_yahoo(symbol, start=start, end=end)
# Rename columns
data.columns = ["Highest price (USD)",
"Lowest price (USD)",
"Opening price (USD)",
"Closing price (USD)",
"Volume",
"Adjusted closing price (USD)"]
return data
# Export data to csv
def export_data(self):
"""
Function to extract stock data to csv.
"""
with open('../data/yahoodata.csv', 'a', encoding='utf-8') as f:
self.data.to_csv('../data/yahoodata.csv', sep='\t', encoding='utf-8')
# Header information
template = "# TSLA Stocks over time \n" + \
"# --------------------------------------------------------------------- \n" + \
"# Export of stock data of \"Tesla Inc.\" for current year. The dataset\n" + \
"# consists of selected key stock exchange figures on a daily basis. \n" + \
"# The data can be recreated at any time with the \"load_data.py\"-script.\n" + \
"# The data record contains one record sorted per trading day. \n" + \
"#\n" + \
"# The data is restricted to the NASDAQ symbol \"TSLA\" which represents \n" + \
"# the company Tesla Inc. The stock information was limited to the period \n" + \
"# from 1st January to the current day of the year. \n" + \
"#\n" + \
"# Extracted via Yahoo-Finance API, https://pypi.org/project/yahoo-finance/ \n" + \
"# December, 26, 2018, Marco Romanutti \n" + \
"#\n" + \
"#\n" + \
"{}"""
with open('../data/yahoodata.csv', 'w', encoding='utf-8') as fp:
fp.write(template.format(self.data.to_csv(index=True, encoding='utf-8')))
|
normal
|
{
"blob_id": "b4b4dad5cf630dc1a627e323ea63577583d1e1c3",
"index": 1551,
"step-1": "<mask token>\n\n\nclass YahooHelper:\n <mask token>\n\n def __init__(self):\n \"\"\"\n Default constructor which initiates object\n \"\"\"\n pass\n <mask token>\n\n def get_stock_data(symbol):\n \"\"\"\n Function to get stock data for current year by ticker symbol.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n :return: Stock data for current year\n \"\"\"\n start = date(date.today().year, 1, 1)\n end = date.today()\n data = pdr.get_data_yahoo(symbol, start=start, end=end)\n data.columns = ['Highest price (USD)', 'Lowest price (USD)',\n 'Opening price (USD)', 'Closing price (USD)', 'Volume',\n 'Adjusted closing price (USD)']\n return data\n\n def export_data(self):\n \"\"\"\n Function to extract stock data to csv.\n \"\"\"\n with open('../data/yahoodata.csv', 'a', encoding='utf-8') as f:\n self.data.to_csv('../data/yahoodata.csv', sep='\\t', encoding=\n 'utf-8')\n template = ('# TSLA Stocks over time \\n' +\n \"\"\"# --------------------------------------------------------------------- \n\"\"\"\n +\n '# Export of stock data of \"Tesla Inc.\" for current year. The dataset\\n'\n +\n \"\"\"# consists of selected key stock exchange figures on a daily basis. \n\"\"\"\n +\n '# The data can be recreated at any time with the \"load_data.py\"-script.\\n'\n +\n \"\"\"# The data record contains one record sorted per trading day. \n\"\"\"\n + '#\\n' +\n '# The data is restricted to the NASDAQ symbol \"TSLA\" which represents \\n'\n +\n \"\"\"# the company Tesla Inc. The stock information was limited to the period \n\"\"\"\n + '# from 1st January to the current day of the year. \\n' +\n '#\\n' +\n \"\"\"# Extracted via Yahoo-Finance API, https://pypi.org/project/yahoo-finance/ \n\"\"\"\n + '# December, 26, 2018, Marco Romanutti \\n' + '#\\n' + '#\\n' +\n '{}')\n with open('../data/yahoodata.csv', 'w', encoding='utf-8') as fp:\n fp.write(template.format(self.data.to_csv(index=True, encoding=\n 'utf-8')))\n",
"step-2": "<mask token>\n\n\nclass YahooHelper:\n <mask token>\n\n def __init__(self):\n \"\"\"\n Default constructor which initiates object\n \"\"\"\n pass\n\n def get_data(self, symbol):\n \"\"\"\n Function to collect Twitter data.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n \"\"\"\n self.data = self.get_stock_data(symbol)\n\n def get_stock_data(symbol):\n \"\"\"\n Function to get stock data for current year by ticker symbol.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n :return: Stock data for current year\n \"\"\"\n start = date(date.today().year, 1, 1)\n end = date.today()\n data = pdr.get_data_yahoo(symbol, start=start, end=end)\n data.columns = ['Highest price (USD)', 'Lowest price (USD)',\n 'Opening price (USD)', 'Closing price (USD)', 'Volume',\n 'Adjusted closing price (USD)']\n return data\n\n def export_data(self):\n \"\"\"\n Function to extract stock data to csv.\n \"\"\"\n with open('../data/yahoodata.csv', 'a', encoding='utf-8') as f:\n self.data.to_csv('../data/yahoodata.csv', sep='\\t', encoding=\n 'utf-8')\n template = ('# TSLA Stocks over time \\n' +\n \"\"\"# --------------------------------------------------------------------- \n\"\"\"\n +\n '# Export of stock data of \"Tesla Inc.\" for current year. The dataset\\n'\n +\n \"\"\"# consists of selected key stock exchange figures on a daily basis. \n\"\"\"\n +\n '# The data can be recreated at any time with the \"load_data.py\"-script.\\n'\n +\n \"\"\"# The data record contains one record sorted per trading day. \n\"\"\"\n + '#\\n' +\n '# The data is restricted to the NASDAQ symbol \"TSLA\" which represents \\n'\n +\n \"\"\"# the company Tesla Inc. The stock information was limited to the period \n\"\"\"\n + '# from 1st January to the current day of the year. \\n' +\n '#\\n' +\n \"\"\"# Extracted via Yahoo-Finance API, https://pypi.org/project/yahoo-finance/ \n\"\"\"\n + '# December, 26, 2018, Marco Romanutti \\n' + '#\\n' + '#\\n' +\n '{}')\n with open('../data/yahoodata.csv', 'w', encoding='utf-8') as fp:\n fp.write(template.format(self.data.to_csv(index=True, encoding=\n 'utf-8')))\n",
"step-3": "<mask token>\n\n\nclass YahooHelper:\n \"\"\"\n Class to fetch Yahoo data\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Default constructor which initiates object\n \"\"\"\n pass\n\n def get_data(self, symbol):\n \"\"\"\n Function to collect Twitter data.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n \"\"\"\n self.data = self.get_stock_data(symbol)\n\n def get_stock_data(symbol):\n \"\"\"\n Function to get stock data for current year by ticker symbol.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n :return: Stock data for current year\n \"\"\"\n start = date(date.today().year, 1, 1)\n end = date.today()\n data = pdr.get_data_yahoo(symbol, start=start, end=end)\n data.columns = ['Highest price (USD)', 'Lowest price (USD)',\n 'Opening price (USD)', 'Closing price (USD)', 'Volume',\n 'Adjusted closing price (USD)']\n return data\n\n def export_data(self):\n \"\"\"\n Function to extract stock data to csv.\n \"\"\"\n with open('../data/yahoodata.csv', 'a', encoding='utf-8') as f:\n self.data.to_csv('../data/yahoodata.csv', sep='\\t', encoding=\n 'utf-8')\n template = ('# TSLA Stocks over time \\n' +\n \"\"\"# --------------------------------------------------------------------- \n\"\"\"\n +\n '# Export of stock data of \"Tesla Inc.\" for current year. The dataset\\n'\n +\n \"\"\"# consists of selected key stock exchange figures on a daily basis. \n\"\"\"\n +\n '# The data can be recreated at any time with the \"load_data.py\"-script.\\n'\n +\n \"\"\"# The data record contains one record sorted per trading day. \n\"\"\"\n + '#\\n' +\n '# The data is restricted to the NASDAQ symbol \"TSLA\" which represents \\n'\n +\n \"\"\"# the company Tesla Inc. The stock information was limited to the period \n\"\"\"\n + '# from 1st January to the current day of the year. \\n' +\n '#\\n' +\n \"\"\"# Extracted via Yahoo-Finance API, https://pypi.org/project/yahoo-finance/ \n\"\"\"\n + '# December, 26, 2018, Marco Romanutti \\n' + '#\\n' + '#\\n' +\n '{}')\n with open('../data/yahoodata.csv', 'w', encoding='utf-8') as fp:\n fp.write(template.format(self.data.to_csv(index=True, encoding=\n 'utf-8')))\n",
"step-4": "from pandas_datareader import data as pdr\nfrom datetime import date\n\n\nclass YahooHelper:\n \"\"\"\n Class to fetch Yahoo data\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Default constructor which initiates object\n \"\"\"\n pass\n\n def get_data(self, symbol):\n \"\"\"\n Function to collect Twitter data.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n \"\"\"\n self.data = self.get_stock_data(symbol)\n\n def get_stock_data(symbol):\n \"\"\"\n Function to get stock data for current year by ticker symbol.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n :return: Stock data for current year\n \"\"\"\n start = date(date.today().year, 1, 1)\n end = date.today()\n data = pdr.get_data_yahoo(symbol, start=start, end=end)\n data.columns = ['Highest price (USD)', 'Lowest price (USD)',\n 'Opening price (USD)', 'Closing price (USD)', 'Volume',\n 'Adjusted closing price (USD)']\n return data\n\n def export_data(self):\n \"\"\"\n Function to extract stock data to csv.\n \"\"\"\n with open('../data/yahoodata.csv', 'a', encoding='utf-8') as f:\n self.data.to_csv('../data/yahoodata.csv', sep='\\t', encoding=\n 'utf-8')\n template = ('# TSLA Stocks over time \\n' +\n \"\"\"# --------------------------------------------------------------------- \n\"\"\"\n +\n '# Export of stock data of \"Tesla Inc.\" for current year. The dataset\\n'\n +\n \"\"\"# consists of selected key stock exchange figures on a daily basis. \n\"\"\"\n +\n '# The data can be recreated at any time with the \"load_data.py\"-script.\\n'\n +\n \"\"\"# The data record contains one record sorted per trading day. \n\"\"\"\n + '#\\n' +\n '# The data is restricted to the NASDAQ symbol \"TSLA\" which represents \\n'\n +\n \"\"\"# the company Tesla Inc. The stock information was limited to the period \n\"\"\"\n + '# from 1st January to the current day of the year. \\n' +\n '#\\n' +\n \"\"\"# Extracted via Yahoo-Finance API, https://pypi.org/project/yahoo-finance/ \n\"\"\"\n + '# December, 26, 2018, Marco Romanutti \\n' + '#\\n' + '#\\n' +\n '{}')\n with open('../data/yahoodata.csv', 'w', encoding='utf-8') as fp:\n fp.write(template.format(self.data.to_csv(index=True, encoding=\n 'utf-8')))\n",
"step-5": "from pandas_datareader import data as pdr\nfrom datetime import date\n\n\nclass YahooHelper:\n \"\"\"\n Class to fetch Yahoo data\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Default constructor which initiates object\n \"\"\"\n pass\n\n def get_data(self, symbol):\n \"\"\"\n Function to collect Twitter data.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n \"\"\"\n # Collect stock market data\n self.data = self.get_stock_data(symbol)\n\n # Symbol lookup:\n def get_stock_data(symbol):\n \"\"\"\n Function to get stock data for current year by ticker symbol.\n\n :param symbol: The Symbol used to identify\n an NASDAQ-100 stock.\n :return: Stock data for current year\n \"\"\"\n # Set current dates\n start = date(date.today().year, 1, 1) # first of current year\n end = date.today() # today\n\n # Get yahoo Yahoo data\n data = pdr.get_data_yahoo(symbol, start=start, end=end)\n\n # Rename columns\n data.columns = [\"Highest price (USD)\",\n \"Lowest price (USD)\",\n \"Opening price (USD)\",\n \"Closing price (USD)\",\n \"Volume\",\n \"Adjusted closing price (USD)\"]\n\n return data\n\n # Export data to csv\n def export_data(self):\n \"\"\"\n Function to extract stock data to csv.\n \"\"\"\n with open('../data/yahoodata.csv', 'a', encoding='utf-8') as f:\n self.data.to_csv('../data/yahoodata.csv', sep='\\t', encoding='utf-8')\n # Header information\n template = \"# TSLA Stocks over time \\n\" + \\\n \"# --------------------------------------------------------------------- \\n\" + \\\n \"# Export of stock data of \\\"Tesla Inc.\\\" for current year. The dataset\\n\" + \\\n \"# consists of selected key stock exchange figures on a daily basis. \\n\" + \\\n \"# The data can be recreated at any time with the \\\"load_data.py\\\"-script.\\n\" + \\\n \"# The data record contains one record sorted per trading day. \\n\" + \\\n \"#\\n\" + \\\n \"# The data is restricted to the NASDAQ symbol \\\"TSLA\\\" which represents \\n\" + \\\n \"# the company Tesla Inc. The stock information was limited to the period \\n\" + \\\n \"# from 1st January to the current day of the year. \\n\" + \\\n \"#\\n\" + \\\n \"# Extracted via Yahoo-Finance API, https://pypi.org/project/yahoo-finance/ \\n\" + \\\n \"# December, 26, 2018, Marco Romanutti \\n\" + \\\n \"#\\n\" + \\\n \"#\\n\" + \\\n \"{}\"\"\"\n\n with open('../data/yahoodata.csv', 'w', encoding='utf-8') as fp:\n fp.write(template.format(self.data.to_csv(index=True, encoding='utf-8')))\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from mtots.parser import base
from mtots.parser import combinator
from mtots.parser.combinator import All
from mtots.parser.combinator import Any
from mtots.parser.combinator import AnyTokenBut
from mtots.parser.combinator import Forward
from mtots.parser.combinator import Peek
from mtots.parser.combinator import Required
from mtots.parser.combinator import Token
|
normal
|
{
"blob_id": "f9edbef46494cc2993c6a633fe35406524dbbf67",
"index": 1199,
"step-1": "<mask token>\n",
"step-2": "from mtots.parser import base\nfrom mtots.parser import combinator\nfrom mtots.parser.combinator import All\nfrom mtots.parser.combinator import Any\nfrom mtots.parser.combinator import AnyTokenBut\nfrom mtots.parser.combinator import Forward\nfrom mtots.parser.combinator import Peek\nfrom mtots.parser.combinator import Required\nfrom mtots.parser.combinator import Token\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# Generated by Django 3.0.4 on 2020-04-04 11:07
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('product', '0003_cost'),
]
operations = [
migrations.AlterField(
model_name='cost',
name='name',
field=models.CharField(max_length=50, unique=True),
),
migrations.AlterField(
model_name='product',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='product',
name='description',
field=models.TextField(default=''),
),
migrations.AlterField(
model_name='product',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='product',
name='passport_link',
field=models.CharField(default='', max_length=200),
),
migrations.AlterField(
model_name='product',
name='site_link',
field=models.CharField(default='', max_length=200),
),
]
|
normal
|
{
"blob_id": "a4f2ca3155f2bb4c17be5bb56dd889abb5d20293",
"index": 3791,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('product', '0003_cost')]\n operations = [migrations.AlterField(model_name='cost', name='name',\n field=models.CharField(max_length=50, unique=True)), migrations.\n AlterField(model_name='product', name='author', field=models.\n ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings\n .AUTH_USER_MODEL)), migrations.AlterField(model_name='product',\n name='description', field=models.TextField(default='')), migrations\n .AlterField(model_name='product', name='name', field=models.\n CharField(max_length=100, unique=True)), migrations.AlterField(\n model_name='product', name='passport_link', field=models.CharField(\n default='', max_length=200)), migrations.AlterField(model_name=\n 'product', name='site_link', field=models.CharField(default='',\n max_length=200))]\n",
"step-4": "from django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('product', '0003_cost')]\n operations = [migrations.AlterField(model_name='cost', name='name',\n field=models.CharField(max_length=50, unique=True)), migrations.\n AlterField(model_name='product', name='author', field=models.\n ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings\n .AUTH_USER_MODEL)), migrations.AlterField(model_name='product',\n name='description', field=models.TextField(default='')), migrations\n .AlterField(model_name='product', name='name', field=models.\n CharField(max_length=100, unique=True)), migrations.AlterField(\n model_name='product', name='passport_link', field=models.CharField(\n default='', max_length=200)), migrations.AlterField(model_name=\n 'product', name='site_link', field=models.CharField(default='',\n max_length=200))]\n",
"step-5": "# Generated by Django 3.0.4 on 2020-04-04 11:07\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ('product', '0003_cost'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='cost',\n name='name',\n field=models.CharField(max_length=50, unique=True),\n ),\n migrations.AlterField(\n model_name='product',\n name='author',\n field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),\n ),\n migrations.AlterField(\n model_name='product',\n name='description',\n field=models.TextField(default=''),\n ),\n migrations.AlterField(\n model_name='product',\n name='name',\n field=models.CharField(max_length=100, unique=True),\n ),\n migrations.AlterField(\n model_name='product',\n name='passport_link',\n field=models.CharField(default='', max_length=200),\n ),\n migrations.AlterField(\n model_name='product',\n name='site_link',\n field=models.CharField(default='', max_length=200),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Solution(object):
def rotate(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: void Do not return anything, modify nums in-place instead.
"""
ln=len(nums)
k=k%ln
nums[:]=nums+nums[:ln-k]
del nums[0:ln-k]
#menthod 2自己输入[1,2],k=1通过了测试但是leetcode的output和自己的不一样,怎么都不pass。What's wrong?
#后来发现了原因nums后面要加[:]
# ln=len(nums)
# k=k%ln
# lastnum=nums[0:ln-k]
# nums[:]=nums[ln-k:]
# nums[:]=nums+lastnum
#method 1:
# ln=len(nums)
# k=k%ln
# for i in range(ln-k):
# nums.append(nums[i])
# del nums[0:ln-k]
|
normal
|
{
"blob_id": "a3ccd526b70db2061566274852a7fc0c249c165a",
"index": 6931,
"step-1": "class Solution(object):\n def rotate(self, nums, k):\n \"\"\"\n :type nums: List[int]\n :type k: int\n :rtype: void Do not return anything, modify nums in-place instead.\n \"\"\"\n ln=len(nums)\n k=k%ln\n nums[:]=nums+nums[:ln-k]\n \tdel nums[0:ln-k]\n\n\n#menthod 2自己输入[1,2],k=1通过了测试但是leetcode的output和自己的不一样,怎么都不pass。What's wrong?\n#后来发现了原因nums后面要加[:] \n# ln=len(nums)\n# k=k%ln\n# lastnum=nums[0:ln-k]\n# nums[:]=nums[ln-k:]\n# nums[:]=nums+lastnum\n\n\n#method 1:\n# ln=len(nums)\n# k=k%ln\n# for i in range(ln-k):\n# nums.append(nums[i])\n# del nums[0:ln-k]",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#---------------------------------------------
# File name: phase2app.py
# Description: Launches GUI for Twitter User Timeline Sentiment Analysis program
# Author: Gilbert Yap ([email protected])
# Date: October 03, 2020
#---------------------------------------------
from PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout, QMessageBox
from PySide2.QtCore import Qt, QFile, QRegExp
from PySide2.QtGui import QRegExpValidator
from phase2GUI import Ui_Dialog
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
import configparser, csv, datetime, sys
sys.path.insert(1, '..\\SharedFiles\\')
import matplotlib.pyplot as plt
import helper, phase2Functions
SETTINGS_FILE = '..\\SharedFiles\\settings.ini'
class Ui_Window(QDialog):
def __init__(self):
super(Ui_Window, self).__init__()
self.ui = Ui_Dialog()
self.ui.setupUi(self)
# Set regex validator for the username
regex = QRegExp("\w+")
validator = QRegExpValidator(regex)
self.ui.usernameLineEdit.setValidator(validator)
# Set the end date to today by default
self.ui.endMonthSpinBox.setValue(datetime.datetime.now().month)
self.ui.endDaySpinBox.setValue(datetime.datetime.now().day)
self.ui.endYearSpinBox.setValue(datetime.datetime.now().year)
# Place a plot inside of plotDisplayGroupBox
self.figure = plt.figure()
self.canvas = FigureCanvas(self.figure)
self.toolbar = NavigationToolbar(self.canvas, self)
layout = QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
self.ui.plotDisplayGroupBox.setLayout(layout)
# Set up signals
self.ui.processDatesPushButton.clicked.connect(self.plotSentiment)
self.ui.exportPushButton.clicked.connect(self.exportValues)
# Init APIs
settings = configparser.ConfigParser()
settings.read(SETTINGS_FILE)
helper.print_with_stars('Initializing APIs')
(twitterApi, googleClient, errors) = phase2Functions.init_apis(settings['KEYS']['api_key'], settings['KEYS']['api_secret_key'])
if(len(errors) > 0):
self.printMessages(errors)
sys.exit(1)
else:
self.twitterApi = twitterApi
self.googleClient = googleClient
self.show()
'''
Plot the sentiment score
Input - self:Ui_Window
Output - None
'''
def plotSentiment(self):
QApplication.setOverrideCursor(Qt.WaitCursor)
# Get the sentiment data
startDate = self.get_start_date()
endDate = self.get_end_date()
if (startDate is None) or (endDate is None):
return
(dateList, scoreList, magnitudeList, tweetList, errors) = phase2Functions.generate_data_lists(self.twitterApi, self.googleClient, self.get_username(), startDate, endDate)
QApplication.restoreOverrideCursor()
# If there were any errors, print them out
if(len(errors) > 0):
self.printMessages(errors)
else:
# If there are no errors, format and plot out the data
self.plotData = (dateList, scoreList, magnitudeList)
self.tweetList = tweetList
self.figure.clear()
ax = self.figure.add_subplot(111)
self.figure.subplots_adjust(top=0.88,
bottom=0.255,
left=0.17,
right=0.9,
hspace=0.2,
wspace=0.2)
ax.set_title("Sentiment Analysis of @{}'s tweets".format(self.get_username(),))
ax.set_xlabel("Date")
ax.set_ylabel("Sentiment Value")
ax.xaxis.set_major_locator(plt.MaxNLocator(10))
for tick in ax.get_xticklabels():
tick.set_rotation(45)
ax.plot(self.plotData[0],self.plotData[1],"-bo",label='Sentiment Score')
ax.plot(self.plotData[0],self.plotData[2], "-ro",label='Sentiment Magnitude')
ax.legend(loc="lower right")
self.canvas.draw()
self.enableExport()
'''
Gets username from text field
Input - self:Ui_Window
Output - string
'''
def get_username(self):
return (self.ui.usernameLineEdit.text())
'''
Gets start date from spin boxes
Input - self:Ui_Window
Output - datetime.datetime
'''
def get_start_date(self):
start_month = self.ui.startMonthSpinBox.value()
start_day = self.ui.startDaySpinBox.value()
start_year = self.ui.startYearSpinBox.value()
try:
startDate = datetime.datetime(start_year, start_month,start_day)
except:
self.printMessages(['Start date is improperly set. Check to see that the date is correct/exists.'])
return None
return startDate
'''
Gets end date from spin boxes
Input - self:Ui_Window
Output - datetime.datetime
'''
def get_end_date(self):
end_month = self.ui.endMonthSpinBox.value()
end_day = self.ui.endDaySpinBox.value()
end_year = self.ui.endYearSpinBox.value()
try:
endDate = datetime.datetime(end_year, end_month,end_day)
except:
self.printMessages(['End date is improperly set. Check to see that the date is correct/exists.'])
return None
return endDate
'''
Toggles the export button.
Input - self:Ui_Window
Output - None
'''
def enableExport(self):
self.ui.exportPushButton.setEnabled(True)
'''
Exports date, score/magntitude, and tweet text to csv and pops up a window when done
Input - self:Ui_Window
Output - None
'''
def exportValues(self):
currentTimeDate = datetime.datetime.now()
currentTimeDate = str(currentTimeDate.year)+'-'+str(currentTimeDate.month)+'-'+str(currentTimeDate.day)+'-'+str(currentTimeDate.hour)+'-'+str(currentTimeDate.minute)+'-'+str(currentTimeDate.second)
with open(currentTimeDate+'_'+self.get_username()+'_score.csv', mode='w') as score_file:
writer = csv.writer(score_file)
for i in range(len(self.plotData[0])):
writer.writerow( [ str(self.plotData[0][i]), self.plotData[1][i],
self.tweetList[i].full_text.encode(encoding='UTF-8', errors='replace') ] )
with open(currentTimeDate+'_'+self.get_username()+'_magnitude.csv', mode='w') as magnitude_file:
writer = csv.writer(magnitude_file)
for i in range(len(self.plotData[0])):
writer.writerow( [ str(self.plotData[0][i]), self.plotData[2][i],
self.tweetList[i].full_text.encode(encoding='UTF-8', errors='replace') ] )
msgBox = QMessageBox()
msgBox.setText('CSV files exported!')
msgBox.exec()
'''
Prints out messages in a pop up window
Input - self:Ui_Window
Output - None
'''
def printMessages(self, messageList):
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setWindowTitle('Errors occured!')
tempString = ''
for message in messageList:
tempString += (message + '\n')
msgBox.setText(tempString)
msgBox.exec()
if __name__ == "__main__":
app = QApplication(sys.argv)
window = Ui_Window()
window.show()
sys.exit(app.exec_())
|
normal
|
{
"blob_id": "8cabacb64f3b193b957c61d6e1ca21f2046e52d1",
"index": 8199,
"step-1": "<mask token>\n\n\nclass Ui_Window(QDialog):\n\n def __init__(self):\n super(Ui_Window, self).__init__()\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n regex = QRegExp('\\\\w+')\n validator = QRegExpValidator(regex)\n self.ui.usernameLineEdit.setValidator(validator)\n self.ui.endMonthSpinBox.setValue(datetime.datetime.now().month)\n self.ui.endDaySpinBox.setValue(datetime.datetime.now().day)\n self.ui.endYearSpinBox.setValue(datetime.datetime.now().year)\n self.figure = plt.figure()\n self.canvas = FigureCanvas(self.figure)\n self.toolbar = NavigationToolbar(self.canvas, self)\n layout = QVBoxLayout()\n layout.addWidget(self.toolbar)\n layout.addWidget(self.canvas)\n self.ui.plotDisplayGroupBox.setLayout(layout)\n self.ui.processDatesPushButton.clicked.connect(self.plotSentiment)\n self.ui.exportPushButton.clicked.connect(self.exportValues)\n settings = configparser.ConfigParser()\n settings.read(SETTINGS_FILE)\n helper.print_with_stars('Initializing APIs')\n twitterApi, googleClient, errors = phase2Functions.init_apis(settings\n ['KEYS']['api_key'], settings['KEYS']['api_secret_key'])\n if len(errors) > 0:\n self.printMessages(errors)\n sys.exit(1)\n else:\n self.twitterApi = twitterApi\n self.googleClient = googleClient\n self.show()\n <mask token>\n\n def plotSentiment(self):\n QApplication.setOverrideCursor(Qt.WaitCursor)\n startDate = self.get_start_date()\n endDate = self.get_end_date()\n if startDate is None or endDate is None:\n return\n dateList, scoreList, magnitudeList, tweetList, errors = (\n phase2Functions.generate_data_lists(self.twitterApi, self.\n googleClient, self.get_username(), startDate, endDate))\n QApplication.restoreOverrideCursor()\n if len(errors) > 0:\n self.printMessages(errors)\n else:\n self.plotData = dateList, scoreList, magnitudeList\n self.tweetList = tweetList\n self.figure.clear()\n ax = self.figure.add_subplot(111)\n self.figure.subplots_adjust(top=0.88, bottom=0.255, left=0.17,\n right=0.9, hspace=0.2, wspace=0.2)\n ax.set_title(\"Sentiment Analysis of @{}'s tweets\".format(self.\n get_username()))\n ax.set_xlabel('Date')\n ax.set_ylabel('Sentiment Value')\n ax.xaxis.set_major_locator(plt.MaxNLocator(10))\n for tick in ax.get_xticklabels():\n tick.set_rotation(45)\n ax.plot(self.plotData[0], self.plotData[1], '-bo', label=\n 'Sentiment Score')\n ax.plot(self.plotData[0], self.plotData[2], '-ro', label=\n 'Sentiment Magnitude')\n ax.legend(loc='lower right')\n self.canvas.draw()\n self.enableExport()\n <mask token>\n\n def get_username(self):\n return self.ui.usernameLineEdit.text()\n <mask token>\n\n def get_start_date(self):\n start_month = self.ui.startMonthSpinBox.value()\n start_day = self.ui.startDaySpinBox.value()\n start_year = self.ui.startYearSpinBox.value()\n try:\n startDate = datetime.datetime(start_year, start_month, start_day)\n except:\n self.printMessages([\n 'Start date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return startDate\n <mask token>\n\n def get_end_date(self):\n end_month = self.ui.endMonthSpinBox.value()\n end_day = self.ui.endDaySpinBox.value()\n end_year = self.ui.endYearSpinBox.value()\n try:\n endDate = datetime.datetime(end_year, end_month, end_day)\n except:\n self.printMessages([\n 'End date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return endDate\n <mask token>\n\n def enableExport(self):\n self.ui.exportPushButton.setEnabled(True)\n <mask token>\n\n def exportValues(self):\n currentTimeDate = datetime.datetime.now()\n currentTimeDate = str(currentTimeDate.year) + '-' + str(currentTimeDate\n .month) + '-' + str(currentTimeDate.day) + '-' + str(\n currentTimeDate.hour) + '-' + str(currentTimeDate.minute\n ) + '-' + str(currentTimeDate.second)\n with open(currentTimeDate + '_' + self.get_username() +\n '_score.csv', mode='w') as score_file:\n writer = csv.writer(score_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[1]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n with open(currentTimeDate + '_' + self.get_username() +\n '_magnitude.csv', mode='w') as magnitude_file:\n writer = csv.writer(magnitude_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[2]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n msgBox = QMessageBox()\n msgBox.setText('CSV files exported!')\n msgBox.exec()\n <mask token>\n\n def printMessages(self, messageList):\n msgBox = QMessageBox()\n msgBox.setIcon(QMessageBox.Critical)\n msgBox.setWindowTitle('Errors occured!')\n tempString = ''\n for message in messageList:\n tempString += message + '\\n'\n msgBox.setText(tempString)\n msgBox.exec()\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.insert(1, '..\\\\SharedFiles\\\\')\n<mask token>\n\n\nclass Ui_Window(QDialog):\n\n def __init__(self):\n super(Ui_Window, self).__init__()\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n regex = QRegExp('\\\\w+')\n validator = QRegExpValidator(regex)\n self.ui.usernameLineEdit.setValidator(validator)\n self.ui.endMonthSpinBox.setValue(datetime.datetime.now().month)\n self.ui.endDaySpinBox.setValue(datetime.datetime.now().day)\n self.ui.endYearSpinBox.setValue(datetime.datetime.now().year)\n self.figure = plt.figure()\n self.canvas = FigureCanvas(self.figure)\n self.toolbar = NavigationToolbar(self.canvas, self)\n layout = QVBoxLayout()\n layout.addWidget(self.toolbar)\n layout.addWidget(self.canvas)\n self.ui.plotDisplayGroupBox.setLayout(layout)\n self.ui.processDatesPushButton.clicked.connect(self.plotSentiment)\n self.ui.exportPushButton.clicked.connect(self.exportValues)\n settings = configparser.ConfigParser()\n settings.read(SETTINGS_FILE)\n helper.print_with_stars('Initializing APIs')\n twitterApi, googleClient, errors = phase2Functions.init_apis(settings\n ['KEYS']['api_key'], settings['KEYS']['api_secret_key'])\n if len(errors) > 0:\n self.printMessages(errors)\n sys.exit(1)\n else:\n self.twitterApi = twitterApi\n self.googleClient = googleClient\n self.show()\n \"\"\"\n Plot the sentiment score\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def plotSentiment(self):\n QApplication.setOverrideCursor(Qt.WaitCursor)\n startDate = self.get_start_date()\n endDate = self.get_end_date()\n if startDate is None or endDate is None:\n return\n dateList, scoreList, magnitudeList, tweetList, errors = (\n phase2Functions.generate_data_lists(self.twitterApi, self.\n googleClient, self.get_username(), startDate, endDate))\n QApplication.restoreOverrideCursor()\n if len(errors) > 0:\n self.printMessages(errors)\n else:\n self.plotData = dateList, scoreList, magnitudeList\n self.tweetList = tweetList\n self.figure.clear()\n ax = self.figure.add_subplot(111)\n self.figure.subplots_adjust(top=0.88, bottom=0.255, left=0.17,\n right=0.9, hspace=0.2, wspace=0.2)\n ax.set_title(\"Sentiment Analysis of @{}'s tweets\".format(self.\n get_username()))\n ax.set_xlabel('Date')\n ax.set_ylabel('Sentiment Value')\n ax.xaxis.set_major_locator(plt.MaxNLocator(10))\n for tick in ax.get_xticklabels():\n tick.set_rotation(45)\n ax.plot(self.plotData[0], self.plotData[1], '-bo', label=\n 'Sentiment Score')\n ax.plot(self.plotData[0], self.plotData[2], '-ro', label=\n 'Sentiment Magnitude')\n ax.legend(loc='lower right')\n self.canvas.draw()\n self.enableExport()\n \"\"\"\n Gets username from text field\n Input - self:Ui_Window\n Output - string\n \"\"\"\n\n def get_username(self):\n return self.ui.usernameLineEdit.text()\n \"\"\"\n Gets start date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n \"\"\"\n\n def get_start_date(self):\n start_month = self.ui.startMonthSpinBox.value()\n start_day = self.ui.startDaySpinBox.value()\n start_year = self.ui.startYearSpinBox.value()\n try:\n startDate = datetime.datetime(start_year, start_month, start_day)\n except:\n self.printMessages([\n 'Start date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return startDate\n \"\"\"\n Gets end date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n \"\"\"\n\n def get_end_date(self):\n end_month = self.ui.endMonthSpinBox.value()\n end_day = self.ui.endDaySpinBox.value()\n end_year = self.ui.endYearSpinBox.value()\n try:\n endDate = datetime.datetime(end_year, end_month, end_day)\n except:\n self.printMessages([\n 'End date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return endDate\n \"\"\"\n Toggles the export button.\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def enableExport(self):\n self.ui.exportPushButton.setEnabled(True)\n \"\"\"\n Exports date, score/magntitude, and tweet text to csv and pops up a window when done\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def exportValues(self):\n currentTimeDate = datetime.datetime.now()\n currentTimeDate = str(currentTimeDate.year) + '-' + str(currentTimeDate\n .month) + '-' + str(currentTimeDate.day) + '-' + str(\n currentTimeDate.hour) + '-' + str(currentTimeDate.minute\n ) + '-' + str(currentTimeDate.second)\n with open(currentTimeDate + '_' + self.get_username() +\n '_score.csv', mode='w') as score_file:\n writer = csv.writer(score_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[1]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n with open(currentTimeDate + '_' + self.get_username() +\n '_magnitude.csv', mode='w') as magnitude_file:\n writer = csv.writer(magnitude_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[2]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n msgBox = QMessageBox()\n msgBox.setText('CSV files exported!')\n msgBox.exec()\n \"\"\"\n Prints out messages in a pop up window\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def printMessages(self, messageList):\n msgBox = QMessageBox()\n msgBox.setIcon(QMessageBox.Critical)\n msgBox.setWindowTitle('Errors occured!')\n tempString = ''\n for message in messageList:\n tempString += message + '\\n'\n msgBox.setText(tempString)\n msgBox.exec()\n\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n window = Ui_Window()\n window.show()\n sys.exit(app.exec_())\n",
"step-3": "<mask token>\nsys.path.insert(1, '..\\\\SharedFiles\\\\')\n<mask token>\nSETTINGS_FILE = '..\\\\SharedFiles\\\\settings.ini'\n\n\nclass Ui_Window(QDialog):\n\n def __init__(self):\n super(Ui_Window, self).__init__()\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n regex = QRegExp('\\\\w+')\n validator = QRegExpValidator(regex)\n self.ui.usernameLineEdit.setValidator(validator)\n self.ui.endMonthSpinBox.setValue(datetime.datetime.now().month)\n self.ui.endDaySpinBox.setValue(datetime.datetime.now().day)\n self.ui.endYearSpinBox.setValue(datetime.datetime.now().year)\n self.figure = plt.figure()\n self.canvas = FigureCanvas(self.figure)\n self.toolbar = NavigationToolbar(self.canvas, self)\n layout = QVBoxLayout()\n layout.addWidget(self.toolbar)\n layout.addWidget(self.canvas)\n self.ui.plotDisplayGroupBox.setLayout(layout)\n self.ui.processDatesPushButton.clicked.connect(self.plotSentiment)\n self.ui.exportPushButton.clicked.connect(self.exportValues)\n settings = configparser.ConfigParser()\n settings.read(SETTINGS_FILE)\n helper.print_with_stars('Initializing APIs')\n twitterApi, googleClient, errors = phase2Functions.init_apis(settings\n ['KEYS']['api_key'], settings['KEYS']['api_secret_key'])\n if len(errors) > 0:\n self.printMessages(errors)\n sys.exit(1)\n else:\n self.twitterApi = twitterApi\n self.googleClient = googleClient\n self.show()\n \"\"\"\n Plot the sentiment score\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def plotSentiment(self):\n QApplication.setOverrideCursor(Qt.WaitCursor)\n startDate = self.get_start_date()\n endDate = self.get_end_date()\n if startDate is None or endDate is None:\n return\n dateList, scoreList, magnitudeList, tweetList, errors = (\n phase2Functions.generate_data_lists(self.twitterApi, self.\n googleClient, self.get_username(), startDate, endDate))\n QApplication.restoreOverrideCursor()\n if len(errors) > 0:\n self.printMessages(errors)\n else:\n self.plotData = dateList, scoreList, magnitudeList\n self.tweetList = tweetList\n self.figure.clear()\n ax = self.figure.add_subplot(111)\n self.figure.subplots_adjust(top=0.88, bottom=0.255, left=0.17,\n right=0.9, hspace=0.2, wspace=0.2)\n ax.set_title(\"Sentiment Analysis of @{}'s tweets\".format(self.\n get_username()))\n ax.set_xlabel('Date')\n ax.set_ylabel('Sentiment Value')\n ax.xaxis.set_major_locator(plt.MaxNLocator(10))\n for tick in ax.get_xticklabels():\n tick.set_rotation(45)\n ax.plot(self.plotData[0], self.plotData[1], '-bo', label=\n 'Sentiment Score')\n ax.plot(self.plotData[0], self.plotData[2], '-ro', label=\n 'Sentiment Magnitude')\n ax.legend(loc='lower right')\n self.canvas.draw()\n self.enableExport()\n \"\"\"\n Gets username from text field\n Input - self:Ui_Window\n Output - string\n \"\"\"\n\n def get_username(self):\n return self.ui.usernameLineEdit.text()\n \"\"\"\n Gets start date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n \"\"\"\n\n def get_start_date(self):\n start_month = self.ui.startMonthSpinBox.value()\n start_day = self.ui.startDaySpinBox.value()\n start_year = self.ui.startYearSpinBox.value()\n try:\n startDate = datetime.datetime(start_year, start_month, start_day)\n except:\n self.printMessages([\n 'Start date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return startDate\n \"\"\"\n Gets end date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n \"\"\"\n\n def get_end_date(self):\n end_month = self.ui.endMonthSpinBox.value()\n end_day = self.ui.endDaySpinBox.value()\n end_year = self.ui.endYearSpinBox.value()\n try:\n endDate = datetime.datetime(end_year, end_month, end_day)\n except:\n self.printMessages([\n 'End date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return endDate\n \"\"\"\n Toggles the export button.\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def enableExport(self):\n self.ui.exportPushButton.setEnabled(True)\n \"\"\"\n Exports date, score/magntitude, and tweet text to csv and pops up a window when done\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def exportValues(self):\n currentTimeDate = datetime.datetime.now()\n currentTimeDate = str(currentTimeDate.year) + '-' + str(currentTimeDate\n .month) + '-' + str(currentTimeDate.day) + '-' + str(\n currentTimeDate.hour) + '-' + str(currentTimeDate.minute\n ) + '-' + str(currentTimeDate.second)\n with open(currentTimeDate + '_' + self.get_username() +\n '_score.csv', mode='w') as score_file:\n writer = csv.writer(score_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[1]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n with open(currentTimeDate + '_' + self.get_username() +\n '_magnitude.csv', mode='w') as magnitude_file:\n writer = csv.writer(magnitude_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[2]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n msgBox = QMessageBox()\n msgBox.setText('CSV files exported!')\n msgBox.exec()\n \"\"\"\n Prints out messages in a pop up window\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def printMessages(self, messageList):\n msgBox = QMessageBox()\n msgBox.setIcon(QMessageBox.Critical)\n msgBox.setWindowTitle('Errors occured!')\n tempString = ''\n for message in messageList:\n tempString += message + '\\n'\n msgBox.setText(tempString)\n msgBox.exec()\n\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n window = Ui_Window()\n window.show()\n sys.exit(app.exec_())\n",
"step-4": "from PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout, QMessageBox\nfrom PySide2.QtCore import Qt, QFile, QRegExp\nfrom PySide2.QtGui import QRegExpValidator\nfrom phase2GUI import Ui_Dialog\nfrom matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas\nfrom matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar\nimport configparser, csv, datetime, sys\nsys.path.insert(1, '..\\\\SharedFiles\\\\')\nimport matplotlib.pyplot as plt\nimport helper, phase2Functions\nSETTINGS_FILE = '..\\\\SharedFiles\\\\settings.ini'\n\n\nclass Ui_Window(QDialog):\n\n def __init__(self):\n super(Ui_Window, self).__init__()\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n regex = QRegExp('\\\\w+')\n validator = QRegExpValidator(regex)\n self.ui.usernameLineEdit.setValidator(validator)\n self.ui.endMonthSpinBox.setValue(datetime.datetime.now().month)\n self.ui.endDaySpinBox.setValue(datetime.datetime.now().day)\n self.ui.endYearSpinBox.setValue(datetime.datetime.now().year)\n self.figure = plt.figure()\n self.canvas = FigureCanvas(self.figure)\n self.toolbar = NavigationToolbar(self.canvas, self)\n layout = QVBoxLayout()\n layout.addWidget(self.toolbar)\n layout.addWidget(self.canvas)\n self.ui.plotDisplayGroupBox.setLayout(layout)\n self.ui.processDatesPushButton.clicked.connect(self.plotSentiment)\n self.ui.exportPushButton.clicked.connect(self.exportValues)\n settings = configparser.ConfigParser()\n settings.read(SETTINGS_FILE)\n helper.print_with_stars('Initializing APIs')\n twitterApi, googleClient, errors = phase2Functions.init_apis(settings\n ['KEYS']['api_key'], settings['KEYS']['api_secret_key'])\n if len(errors) > 0:\n self.printMessages(errors)\n sys.exit(1)\n else:\n self.twitterApi = twitterApi\n self.googleClient = googleClient\n self.show()\n \"\"\"\n Plot the sentiment score\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def plotSentiment(self):\n QApplication.setOverrideCursor(Qt.WaitCursor)\n startDate = self.get_start_date()\n endDate = self.get_end_date()\n if startDate is None or endDate is None:\n return\n dateList, scoreList, magnitudeList, tweetList, errors = (\n phase2Functions.generate_data_lists(self.twitterApi, self.\n googleClient, self.get_username(), startDate, endDate))\n QApplication.restoreOverrideCursor()\n if len(errors) > 0:\n self.printMessages(errors)\n else:\n self.plotData = dateList, scoreList, magnitudeList\n self.tweetList = tweetList\n self.figure.clear()\n ax = self.figure.add_subplot(111)\n self.figure.subplots_adjust(top=0.88, bottom=0.255, left=0.17,\n right=0.9, hspace=0.2, wspace=0.2)\n ax.set_title(\"Sentiment Analysis of @{}'s tweets\".format(self.\n get_username()))\n ax.set_xlabel('Date')\n ax.set_ylabel('Sentiment Value')\n ax.xaxis.set_major_locator(plt.MaxNLocator(10))\n for tick in ax.get_xticklabels():\n tick.set_rotation(45)\n ax.plot(self.plotData[0], self.plotData[1], '-bo', label=\n 'Sentiment Score')\n ax.plot(self.plotData[0], self.plotData[2], '-ro', label=\n 'Sentiment Magnitude')\n ax.legend(loc='lower right')\n self.canvas.draw()\n self.enableExport()\n \"\"\"\n Gets username from text field\n Input - self:Ui_Window\n Output - string\n \"\"\"\n\n def get_username(self):\n return self.ui.usernameLineEdit.text()\n \"\"\"\n Gets start date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n \"\"\"\n\n def get_start_date(self):\n start_month = self.ui.startMonthSpinBox.value()\n start_day = self.ui.startDaySpinBox.value()\n start_year = self.ui.startYearSpinBox.value()\n try:\n startDate = datetime.datetime(start_year, start_month, start_day)\n except:\n self.printMessages([\n 'Start date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return startDate\n \"\"\"\n Gets end date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n \"\"\"\n\n def get_end_date(self):\n end_month = self.ui.endMonthSpinBox.value()\n end_day = self.ui.endDaySpinBox.value()\n end_year = self.ui.endYearSpinBox.value()\n try:\n endDate = datetime.datetime(end_year, end_month, end_day)\n except:\n self.printMessages([\n 'End date is improperly set. Check to see that the date is correct/exists.'\n ])\n return None\n return endDate\n \"\"\"\n Toggles the export button.\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def enableExport(self):\n self.ui.exportPushButton.setEnabled(True)\n \"\"\"\n Exports date, score/magntitude, and tweet text to csv and pops up a window when done\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def exportValues(self):\n currentTimeDate = datetime.datetime.now()\n currentTimeDate = str(currentTimeDate.year) + '-' + str(currentTimeDate\n .month) + '-' + str(currentTimeDate.day) + '-' + str(\n currentTimeDate.hour) + '-' + str(currentTimeDate.minute\n ) + '-' + str(currentTimeDate.second)\n with open(currentTimeDate + '_' + self.get_username() +\n '_score.csv', mode='w') as score_file:\n writer = csv.writer(score_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[1]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n with open(currentTimeDate + '_' + self.get_username() +\n '_magnitude.csv', mode='w') as magnitude_file:\n writer = csv.writer(magnitude_file)\n for i in range(len(self.plotData[0])):\n writer.writerow([str(self.plotData[0][i]), self.plotData[2]\n [i], self.tweetList[i].full_text.encode(encoding=\n 'UTF-8', errors='replace')])\n msgBox = QMessageBox()\n msgBox.setText('CSV files exported!')\n msgBox.exec()\n \"\"\"\n Prints out messages in a pop up window\n Input - self:Ui_Window\n Output - None\n \"\"\"\n\n def printMessages(self, messageList):\n msgBox = QMessageBox()\n msgBox.setIcon(QMessageBox.Critical)\n msgBox.setWindowTitle('Errors occured!')\n tempString = ''\n for message in messageList:\n tempString += message + '\\n'\n msgBox.setText(tempString)\n msgBox.exec()\n\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n window = Ui_Window()\n window.show()\n sys.exit(app.exec_())\n",
"step-5": "#---------------------------------------------\n# File name: phase2app.py\n# Description: Launches GUI for Twitter User Timeline Sentiment Analysis program\n# Author: Gilbert Yap ([email protected])\n# Date: October 03, 2020\n#---------------------------------------------\n\nfrom PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout, QMessageBox\nfrom PySide2.QtCore import Qt, QFile, QRegExp\nfrom PySide2.QtGui import QRegExpValidator\nfrom phase2GUI import Ui_Dialog\n\nfrom matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas\nfrom matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar\n\nimport configparser, csv, datetime, sys\nsys.path.insert(1, '..\\\\SharedFiles\\\\')\nimport matplotlib.pyplot as plt\nimport helper, phase2Functions\n\nSETTINGS_FILE = '..\\\\SharedFiles\\\\settings.ini'\n\nclass Ui_Window(QDialog):\n def __init__(self):\n super(Ui_Window, self).__init__()\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n\n # Set regex validator for the username\n regex = QRegExp(\"\\w+\")\n validator = QRegExpValidator(regex)\n self.ui.usernameLineEdit.setValidator(validator)\n\n # Set the end date to today by default\n self.ui.endMonthSpinBox.setValue(datetime.datetime.now().month)\n self.ui.endDaySpinBox.setValue(datetime.datetime.now().day)\n self.ui.endYearSpinBox.setValue(datetime.datetime.now().year)\n \n # Place a plot inside of plotDisplayGroupBox\n self.figure = plt.figure()\n self.canvas = FigureCanvas(self.figure)\n self.toolbar = NavigationToolbar(self.canvas, self)\n layout = QVBoxLayout()\n layout.addWidget(self.toolbar)\n layout.addWidget(self.canvas)\n self.ui.plotDisplayGroupBox.setLayout(layout)\n\n # Set up signals\n self.ui.processDatesPushButton.clicked.connect(self.plotSentiment)\n self.ui.exportPushButton.clicked.connect(self.exportValues)\n\n # Init APIs\n settings = configparser.ConfigParser()\n settings.read(SETTINGS_FILE)\n\n helper.print_with_stars('Initializing APIs')\n (twitterApi, googleClient, errors) = phase2Functions.init_apis(settings['KEYS']['api_key'], settings['KEYS']['api_secret_key'])\n\n if(len(errors) > 0):\n self.printMessages(errors)\n sys.exit(1)\n else:\n self.twitterApi = twitterApi\n self.googleClient = googleClient\n self.show()\n\n '''\n Plot the sentiment score\n Input - self:Ui_Window\n Output - None\n '''\n def plotSentiment(self):\n QApplication.setOverrideCursor(Qt.WaitCursor)\n # Get the sentiment data\n startDate = self.get_start_date()\n endDate = self.get_end_date()\n \n if (startDate is None) or (endDate is None):\n return\n \n (dateList, scoreList, magnitudeList, tweetList, errors) = phase2Functions.generate_data_lists(self.twitterApi, self.googleClient, self.get_username(), startDate, endDate)\n QApplication.restoreOverrideCursor()\n \n # If there were any errors, print them out\n if(len(errors) > 0):\n self.printMessages(errors)\n else:\n # If there are no errors, format and plot out the data\n self.plotData = (dateList, scoreList, magnitudeList)\n self.tweetList = tweetList\n self.figure.clear()\n ax = self.figure.add_subplot(111)\n self.figure.subplots_adjust(top=0.88,\n bottom=0.255,\n left=0.17,\n right=0.9,\n hspace=0.2,\n wspace=0.2)\n\n ax.set_title(\"Sentiment Analysis of @{}'s tweets\".format(self.get_username(),)) \n ax.set_xlabel(\"Date\") \n ax.set_ylabel(\"Sentiment Value\") \n ax.xaxis.set_major_locator(plt.MaxNLocator(10))\n \n for tick in ax.get_xticklabels():\n tick.set_rotation(45)\n\n ax.plot(self.plotData[0],self.plotData[1],\"-bo\",label='Sentiment Score') \n ax.plot(self.plotData[0],self.plotData[2], \"-ro\",label='Sentiment Magnitude')\n ax.legend(loc=\"lower right\")\n self.canvas.draw()\n self.enableExport()\n\n\n '''\n Gets username from text field\n Input - self:Ui_Window\n Output - string\n '''\n def get_username(self):\n return (self.ui.usernameLineEdit.text())\n\n '''\n Gets start date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n '''\n def get_start_date(self):\n start_month = self.ui.startMonthSpinBox.value()\n start_day = self.ui.startDaySpinBox.value()\n start_year = self.ui.startYearSpinBox.value()\n \n try:\n startDate = datetime.datetime(start_year, start_month,start_day)\n except:\n self.printMessages(['Start date is improperly set. Check to see that the date is correct/exists.'])\n return None\n \n return startDate\n\n '''\n Gets end date from spin boxes\n Input - self:Ui_Window\n Output - datetime.datetime\n '''\n def get_end_date(self):\n end_month = self.ui.endMonthSpinBox.value()\n end_day = self.ui.endDaySpinBox.value()\n end_year = self.ui.endYearSpinBox.value()\n \n try:\n endDate = datetime.datetime(end_year, end_month,end_day)\n except:\n self.printMessages(['End date is improperly set. Check to see that the date is correct/exists.'])\n return None\n \n return endDate\n\n '''\n Toggles the export button.\n Input - self:Ui_Window\n Output - None\n '''\n def enableExport(self):\n self.ui.exportPushButton.setEnabled(True)\n\n '''\n Exports date, score/magntitude, and tweet text to csv and pops up a window when done\n Input - self:Ui_Window\n Output - None\n '''\n def exportValues(self):\n currentTimeDate = datetime.datetime.now()\n currentTimeDate = str(currentTimeDate.year)+'-'+str(currentTimeDate.month)+'-'+str(currentTimeDate.day)+'-'+str(currentTimeDate.hour)+'-'+str(currentTimeDate.minute)+'-'+str(currentTimeDate.second)\n\n with open(currentTimeDate+'_'+self.get_username()+'_score.csv', mode='w') as score_file:\n writer = csv.writer(score_file)\n for i in range(len(self.plotData[0])):\n writer.writerow( [ str(self.plotData[0][i]), self.plotData[1][i], \n self.tweetList[i].full_text.encode(encoding='UTF-8', errors='replace') ] )\n\n with open(currentTimeDate+'_'+self.get_username()+'_magnitude.csv', mode='w') as magnitude_file:\n writer = csv.writer(magnitude_file)\n for i in range(len(self.plotData[0])):\n writer.writerow( [ str(self.plotData[0][i]), self.plotData[2][i], \n self.tweetList[i].full_text.encode(encoding='UTF-8', errors='replace') ] )\n\n msgBox = QMessageBox()\n msgBox.setText('CSV files exported!')\n msgBox.exec()\n\n '''\n Prints out messages in a pop up window\n Input - self:Ui_Window\n Output - None\n '''\n def printMessages(self, messageList):\n msgBox = QMessageBox()\n msgBox.setIcon(QMessageBox.Critical)\n msgBox.setWindowTitle('Errors occured!')\n tempString = ''\n\n for message in messageList:\n tempString += (message + '\\n')\n msgBox.setText(tempString)\n msgBox.exec()\n\nif __name__ == \"__main__\":\n app = QApplication(sys.argv)\n\n window = Ui_Window()\n window.show()\n\n sys.exit(app.exec_())",
"step-ids": [
9,
11,
12,
13,
14
]
}
|
[
9,
11,
12,
13,
14
] |
# -*- coding:utf-8 -*-
import datetime
import json
import os
import urllib
import requests
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import properties
from time import sleep
from appium import webdriver
def logPrint(logstr):
pyfileName = str(__file__).split(".py")[0].split("/")[-1]
filepath = ".\\log\\" + pyfileName + '-runlog.log'
now = str(datetime.datetime.now())
logstr = now + ' ' + logstr
with open(filepath, 'a', encoding='utf-8') as f:
print(logstr)
f.write(logstr + '\t\n')
def isElementExist(driver, xpath):
try:
driver.find_element_by_xpath(xpath)
return True
except:
return False
def find_toast(driver, contains_message):
'''判断toast信息'''
locat = ("xpath", '//*[contains(@text,"' + contains_message + '")]')
try:
element = WebDriverWait(driver, 2).until(EC.presence_of_element_located(locat))
return True
except:
return False
def restart_app(driver):
optsRestartAPP = {'command': 'am broadcast -a',
'args': ['com.inhand.intent.INBOXCORE_RESTART_APP']}
driver.execute_script("mobile: shell", optsRestartAPP)
def wifi_disable(driver):
opts = {'command': 'su 0',
'args': ['svc wifi disable']}
driver.execute_script("mobile: shell", opts)
def wifi_enable(driver):
opts = {'command': 'su 0',
'args': ['svc wifi enable']}
driver.execute_script("mobile: shell", opts)
if __name__ == '__main__':
try:
logpath = os.getcwd() + "\\log"
# print(logpath)
os.mkdir(logpath)
except:
pass
pyfileName = str(__file__).split(".py")[0].split("/")[-1]
logfilepath = ".\\log\\" + pyfileName + '-runlog.log'
try:
os.remove(logfilepath)
except:
pass
host = 'http://182.150.21.232:10081'
requesturl = "/oauth2/access_token"
headers = {
"Content-Type": "application/x-www-form-urlencoded",
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36'
}
get_token_value = {
"client_id": "000017953450251798098136",
"client_secret": "08E9EC6793345759456CB8BAE52615F3",
"grant_type": "password",
"username": "[email protected]",
"password": "czz123456",
"password_type": "1",
"language": "2"
}
data = urllib.parse.urlencode(get_token_value).encode('utf-8')
url = host + requesturl
request = urllib.request.Request(url, data, headers)
token_response = urllib.request.urlopen(request).read().decode('utf-8')
logPrint(token_response)
access_token = json.loads(token_response)['access_token']
requesturl = "/api/goods/list?cursor=0&limit=30&name=&access_token=" + access_token
url = host + requesturl
response = requests.get(url=url, headers={'Content-Type': 'application/json'})
goods_count = json.loads(response.text)['total']
print(goods_count)
driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.desired_caps)
sleep(0.5)
wifi_enable(driver)
sleep(0.5)
opts1 = {'command': 'rm -rf',
'args': ['/sdcard/inbox/data/picture']}
redata = driver.execute_script("mobile: shell", opts1)
driver.find_element_by_xpath("//android.widget.TextView[@text='货道配置']").click()
driver.find_element_by_xpath("//android.widget.TextView[@text='同步商品(从平台)']").click()
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
try:
xpath = "//android.widget.TextView[contains(@text,'总商品数 " + str(goods_count) + "')]"
logPrint(xpath)
WebDriverWait(driver, 2, 0.5).until(lambda x: x.find_element_by_xpath(xpath))
progressFlag = True
except Exception as e:
print(e)
progressFlag = False
if progressFlag:
logPrint("同步过程:PASS")
else:
logPrint("同步过程:FAIL!!")
loadmasklocator = ("xpath", "//android.widget.ProgressBar")
try:
WebDriverWait(driver, 180).until_not(EC.presence_of_element_located(loadmasklocator))
completeFlag = True
except Exception as e:
completeFlag = False
if completeFlag:
logPrint("同步结果出现:PASS")
else:
logPrint("同步结果出现:FAIL!!")
if isElementExist(driver, "//android.widget.TextView[contains(@text,'操作成功')]"):
logPrint("同步成功:PASS")
else:
logPrint("同步成功:FAIL!!")
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
sleep(20)
driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.desired_caps)
driver.find_element_by_xpath("//android.widget.TextView[@text='货道配置']").click()
driver.find_element_by_xpath("//android.widget.TextView[@text='同步商品(从平台)']").click()
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
try:
WebDriverWait(driver, 180).until_not(EC.presence_of_element_located(loadmasklocator))
completeFlag = True
except Exception as e:
completeFlag = False
if completeFlag:
logPrint("同步结果出现:PASS")
else:
logPrint("同步结果出现:FAIL!!")
if isElementExist(driver, "//android.widget.TextView[contains(@text,'已经是最新配置')]"):
logPrint("已经是最新配置:PASS")
else:
logPrint("已经是最新配置:FAIL!!")
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
wifi_disable(driver)
driver.find_element_by_xpath("//android.widget.TextView[@text='同步商品(从平台)']").click()
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
okdialoglocator = ("xpath", "//android.widget.TextView[contains(@text,'操作失败')]")
try:
WebDriverWait(driver, 3).until(EC.presence_of_element_located(okdialoglocator))
failFlag = True
except Exception as e:
failFlag = False
if failFlag:
logPrint("断网同步,操作失败:PASS")
else:
logPrint("断网同步,操作失败:FAIL!!")
wifi_enable(driver)
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
opts1 = {'command': 'rm -rf',
'args': ['/sdcard/inbox/data/picture']}
redata = driver.execute_script("mobile: shell", opts1)
sleep(10)
driver.find_element_by_xpath("//android.widget.TextView[@text='同步商品(从平台)']").click()
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
sleep(5)
wifi_disable(driver)
loadmasklocator = ("xpath", "//android.widget.ProgressBar")
try:
WebDriverWait(driver, 180).until_not(EC.presence_of_element_located(loadmasklocator))
completeFlag = True
except Exception as e:
completeFlag = False
if completeFlag:
logPrint("同步结果出现:PASS")
else:
logPrint("同步结果出现:FAIL!!")
if isElementExist(driver, "//android.widget.TextView[contains(@text,'操作成功')]"):
logPrint("断网结束同步:PASS")
else:
logPrint("断网结束同步:FAIL!!")
driver.find_element_by_xpath("//android.widget.Button[@text='确定']").click()
sleep(12)
driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.desired_caps)
driver.find_element_by_xpath("//android.widget.TextView[@text='货道配置']").click()
driver.find_element_by_xpath("//android.widget.TextView[@text='同步商品(从平台)']").click()
noNetFlag = find_toast(driver, "平台")
if noNetFlag:
logPrint("未与平台建立连接:PASS")
else:
logPrint("未与平台建立连接:FAIL!!")
wifi_enable(driver)
|
normal
|
{
"blob_id": "2465a73d958d88dcd27cfac75a4e7b1fcd6a884e",
"index": 3389,
"step-1": "<mask token>\n\n\ndef logPrint(logstr):\n pyfileName = str(__file__).split('.py')[0].split('/')[-1]\n filepath = '.\\\\log\\\\' + pyfileName + '-runlog.log'\n now = str(datetime.datetime.now())\n logstr = now + ' ' + logstr\n with open(filepath, 'a', encoding='utf-8') as f:\n print(logstr)\n f.write(logstr + '\\t\\n')\n\n\n<mask token>\n\n\ndef find_toast(driver, contains_message):\n \"\"\"判断toast信息\"\"\"\n locat = 'xpath', '//*[contains(@text,\"' + contains_message + '\")]'\n try:\n element = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located(locat))\n return True\n except:\n return False\n\n\n<mask token>\n\n\ndef wifi_disable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi disable']}\n driver.execute_script('mobile: shell', opts)\n\n\ndef wifi_enable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi enable']}\n driver.execute_script('mobile: shell', opts)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef logPrint(logstr):\n pyfileName = str(__file__).split('.py')[0].split('/')[-1]\n filepath = '.\\\\log\\\\' + pyfileName + '-runlog.log'\n now = str(datetime.datetime.now())\n logstr = now + ' ' + logstr\n with open(filepath, 'a', encoding='utf-8') as f:\n print(logstr)\n f.write(logstr + '\\t\\n')\n\n\n<mask token>\n\n\ndef find_toast(driver, contains_message):\n \"\"\"判断toast信息\"\"\"\n locat = 'xpath', '//*[contains(@text,\"' + contains_message + '\")]'\n try:\n element = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located(locat))\n return True\n except:\n return False\n\n\ndef restart_app(driver):\n optsRestartAPP = {'command': 'am broadcast -a', 'args': [\n 'com.inhand.intent.INBOXCORE_RESTART_APP']}\n driver.execute_script('mobile: shell', optsRestartAPP)\n\n\ndef wifi_disable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi disable']}\n driver.execute_script('mobile: shell', opts)\n\n\ndef wifi_enable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi enable']}\n driver.execute_script('mobile: shell', opts)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef logPrint(logstr):\n pyfileName = str(__file__).split('.py')[0].split('/')[-1]\n filepath = '.\\\\log\\\\' + pyfileName + '-runlog.log'\n now = str(datetime.datetime.now())\n logstr = now + ' ' + logstr\n with open(filepath, 'a', encoding='utf-8') as f:\n print(logstr)\n f.write(logstr + '\\t\\n')\n\n\ndef isElementExist(driver, xpath):\n try:\n driver.find_element_by_xpath(xpath)\n return True\n except:\n return False\n\n\ndef find_toast(driver, contains_message):\n \"\"\"判断toast信息\"\"\"\n locat = 'xpath', '//*[contains(@text,\"' + contains_message + '\")]'\n try:\n element = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located(locat))\n return True\n except:\n return False\n\n\ndef restart_app(driver):\n optsRestartAPP = {'command': 'am broadcast -a', 'args': [\n 'com.inhand.intent.INBOXCORE_RESTART_APP']}\n driver.execute_script('mobile: shell', optsRestartAPP)\n\n\ndef wifi_disable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi disable']}\n driver.execute_script('mobile: shell', opts)\n\n\ndef wifi_enable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi enable']}\n driver.execute_script('mobile: shell', opts)\n\n\nif __name__ == '__main__':\n try:\n logpath = os.getcwd() + '\\\\log'\n os.mkdir(logpath)\n except:\n pass\n pyfileName = str(__file__).split('.py')[0].split('/')[-1]\n logfilepath = '.\\\\log\\\\' + pyfileName + '-runlog.log'\n try:\n os.remove(logfilepath)\n except:\n pass\n host = 'http://182.150.21.232:10081'\n requesturl = '/oauth2/access_token'\n headers = {'Content-Type': 'application/x-www-form-urlencoded',\n 'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36'\n }\n get_token_value = {'client_id': '000017953450251798098136',\n 'client_secret': '08E9EC6793345759456CB8BAE52615F3',\n 'grant_type': 'password', 'username': '[email protected]',\n 'password': 'czz123456', 'password_type': '1', 'language': '2'}\n data = urllib.parse.urlencode(get_token_value).encode('utf-8')\n url = host + requesturl\n request = urllib.request.Request(url, data, headers)\n token_response = urllib.request.urlopen(request).read().decode('utf-8')\n logPrint(token_response)\n access_token = json.loads(token_response)['access_token']\n requesturl = ('/api/goods/list?cursor=0&limit=30&name=&access_token=' +\n access_token)\n url = host + requesturl\n response = requests.get(url=url, headers={'Content-Type':\n 'application/json'})\n goods_count = json.loads(response.text)['total']\n print(goods_count)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.\n desired_caps)\n sleep(0.5)\n wifi_enable(driver)\n sleep(0.5)\n opts1 = {'command': 'rm -rf', 'args': ['/sdcard/inbox/data/picture']}\n redata = driver.execute_script('mobile: shell', opts1)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n try:\n xpath = \"//android.widget.TextView[contains(@text,'总商品数 \" + str(\n goods_count) + \"')]\"\n logPrint(xpath)\n WebDriverWait(driver, 2, 0.5).until(lambda x: x.\n find_element_by_xpath(xpath))\n progressFlag = True\n except Exception as e:\n print(e)\n progressFlag = False\n if progressFlag:\n logPrint('同步过程:PASS')\n else:\n logPrint('同步过程:FAIL!!')\n loadmasklocator = 'xpath', '//android.widget.ProgressBar'\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located\n (loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint('同步结果出现:PASS')\n else:\n logPrint('同步结果出现:FAIL!!')\n if isElementExist(driver,\n \"//android.widget.TextView[contains(@text,'操作成功')]\"):\n logPrint('同步成功:PASS')\n else:\n logPrint('同步成功:FAIL!!')\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(20)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.\n desired_caps)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located\n (loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint('同步结果出现:PASS')\n else:\n logPrint('同步结果出现:FAIL!!')\n if isElementExist(driver,\n \"//android.widget.TextView[contains(@text,'已经是最新配置')]\"):\n logPrint('已经是最新配置:PASS')\n else:\n logPrint('已经是最新配置:FAIL!!')\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n wifi_disable(driver)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n okdialoglocator = ('xpath',\n \"//android.widget.TextView[contains(@text,'操作失败')]\")\n try:\n WebDriverWait(driver, 3).until(EC.presence_of_element_located(\n okdialoglocator))\n failFlag = True\n except Exception as e:\n failFlag = False\n if failFlag:\n logPrint('断网同步,操作失败:PASS')\n else:\n logPrint('断网同步,操作失败:FAIL!!')\n wifi_enable(driver)\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n opts1 = {'command': 'rm -rf', 'args': ['/sdcard/inbox/data/picture']}\n redata = driver.execute_script('mobile: shell', opts1)\n sleep(10)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(5)\n wifi_disable(driver)\n loadmasklocator = 'xpath', '//android.widget.ProgressBar'\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located\n (loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint('同步结果出现:PASS')\n else:\n logPrint('同步结果出现:FAIL!!')\n if isElementExist(driver,\n \"//android.widget.TextView[contains(@text,'操作成功')]\"):\n logPrint('断网结束同步:PASS')\n else:\n logPrint('断网结束同步:FAIL!!')\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(12)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.\n desired_caps)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n noNetFlag = find_toast(driver, '平台')\n if noNetFlag:\n logPrint('未与平台建立连接:PASS')\n else:\n logPrint('未与平台建立连接:FAIL!!')\n wifi_enable(driver)\n",
"step-4": "import datetime\nimport json\nimport os\nimport urllib\nimport requests\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nimport properties\nfrom time import sleep\nfrom appium import webdriver\n\n\ndef logPrint(logstr):\n pyfileName = str(__file__).split('.py')[0].split('/')[-1]\n filepath = '.\\\\log\\\\' + pyfileName + '-runlog.log'\n now = str(datetime.datetime.now())\n logstr = now + ' ' + logstr\n with open(filepath, 'a', encoding='utf-8') as f:\n print(logstr)\n f.write(logstr + '\\t\\n')\n\n\ndef isElementExist(driver, xpath):\n try:\n driver.find_element_by_xpath(xpath)\n return True\n except:\n return False\n\n\ndef find_toast(driver, contains_message):\n \"\"\"判断toast信息\"\"\"\n locat = 'xpath', '//*[contains(@text,\"' + contains_message + '\")]'\n try:\n element = WebDriverWait(driver, 2).until(EC.\n presence_of_element_located(locat))\n return True\n except:\n return False\n\n\ndef restart_app(driver):\n optsRestartAPP = {'command': 'am broadcast -a', 'args': [\n 'com.inhand.intent.INBOXCORE_RESTART_APP']}\n driver.execute_script('mobile: shell', optsRestartAPP)\n\n\ndef wifi_disable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi disable']}\n driver.execute_script('mobile: shell', opts)\n\n\ndef wifi_enable(driver):\n opts = {'command': 'su 0', 'args': ['svc wifi enable']}\n driver.execute_script('mobile: shell', opts)\n\n\nif __name__ == '__main__':\n try:\n logpath = os.getcwd() + '\\\\log'\n os.mkdir(logpath)\n except:\n pass\n pyfileName = str(__file__).split('.py')[0].split('/')[-1]\n logfilepath = '.\\\\log\\\\' + pyfileName + '-runlog.log'\n try:\n os.remove(logfilepath)\n except:\n pass\n host = 'http://182.150.21.232:10081'\n requesturl = '/oauth2/access_token'\n headers = {'Content-Type': 'application/x-www-form-urlencoded',\n 'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36'\n }\n get_token_value = {'client_id': '000017953450251798098136',\n 'client_secret': '08E9EC6793345759456CB8BAE52615F3',\n 'grant_type': 'password', 'username': '[email protected]',\n 'password': 'czz123456', 'password_type': '1', 'language': '2'}\n data = urllib.parse.urlencode(get_token_value).encode('utf-8')\n url = host + requesturl\n request = urllib.request.Request(url, data, headers)\n token_response = urllib.request.urlopen(request).read().decode('utf-8')\n logPrint(token_response)\n access_token = json.loads(token_response)['access_token']\n requesturl = ('/api/goods/list?cursor=0&limit=30&name=&access_token=' +\n access_token)\n url = host + requesturl\n response = requests.get(url=url, headers={'Content-Type':\n 'application/json'})\n goods_count = json.loads(response.text)['total']\n print(goods_count)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.\n desired_caps)\n sleep(0.5)\n wifi_enable(driver)\n sleep(0.5)\n opts1 = {'command': 'rm -rf', 'args': ['/sdcard/inbox/data/picture']}\n redata = driver.execute_script('mobile: shell', opts1)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n try:\n xpath = \"//android.widget.TextView[contains(@text,'总商品数 \" + str(\n goods_count) + \"')]\"\n logPrint(xpath)\n WebDriverWait(driver, 2, 0.5).until(lambda x: x.\n find_element_by_xpath(xpath))\n progressFlag = True\n except Exception as e:\n print(e)\n progressFlag = False\n if progressFlag:\n logPrint('同步过程:PASS')\n else:\n logPrint('同步过程:FAIL!!')\n loadmasklocator = 'xpath', '//android.widget.ProgressBar'\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located\n (loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint('同步结果出现:PASS')\n else:\n logPrint('同步结果出现:FAIL!!')\n if isElementExist(driver,\n \"//android.widget.TextView[contains(@text,'操作成功')]\"):\n logPrint('同步成功:PASS')\n else:\n logPrint('同步成功:FAIL!!')\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(20)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.\n desired_caps)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located\n (loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint('同步结果出现:PASS')\n else:\n logPrint('同步结果出现:FAIL!!')\n if isElementExist(driver,\n \"//android.widget.TextView[contains(@text,'已经是最新配置')]\"):\n logPrint('已经是最新配置:PASS')\n else:\n logPrint('已经是最新配置:FAIL!!')\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n wifi_disable(driver)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n okdialoglocator = ('xpath',\n \"//android.widget.TextView[contains(@text,'操作失败')]\")\n try:\n WebDriverWait(driver, 3).until(EC.presence_of_element_located(\n okdialoglocator))\n failFlag = True\n except Exception as e:\n failFlag = False\n if failFlag:\n logPrint('断网同步,操作失败:PASS')\n else:\n logPrint('断网同步,操作失败:FAIL!!')\n wifi_enable(driver)\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n opts1 = {'command': 'rm -rf', 'args': ['/sdcard/inbox/data/picture']}\n redata = driver.execute_script('mobile: shell', opts1)\n sleep(10)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(5)\n wifi_disable(driver)\n loadmasklocator = 'xpath', '//android.widget.ProgressBar'\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located\n (loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint('同步结果出现:PASS')\n else:\n logPrint('同步结果出现:FAIL!!')\n if isElementExist(driver,\n \"//android.widget.TextView[contains(@text,'操作成功')]\"):\n logPrint('断网结束同步:PASS')\n else:\n logPrint('断网结束同步:FAIL!!')\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(12)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.\n desired_caps)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\"\n ).click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\"\n ).click()\n noNetFlag = find_toast(driver, '平台')\n if noNetFlag:\n logPrint('未与平台建立连接:PASS')\n else:\n logPrint('未与平台建立连接:FAIL!!')\n wifi_enable(driver)\n",
"step-5": "# -*- coding:utf-8 -*-\nimport datetime\nimport json\nimport os\nimport urllib\nimport requests\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nimport properties\nfrom time import sleep\nfrom appium import webdriver\n\n\ndef logPrint(logstr):\n pyfileName = str(__file__).split(\".py\")[0].split(\"/\")[-1]\n filepath = \".\\\\log\\\\\" + pyfileName + '-runlog.log'\n now = str(datetime.datetime.now())\n logstr = now + ' ' + logstr\n with open(filepath, 'a', encoding='utf-8') as f:\n print(logstr)\n f.write(logstr + '\\t\\n')\n\n\ndef isElementExist(driver, xpath):\n try:\n driver.find_element_by_xpath(xpath)\n return True\n except:\n return False\n\n\ndef find_toast(driver, contains_message):\n '''判断toast信息'''\n locat = (\"xpath\", '//*[contains(@text,\"' + contains_message + '\")]')\n try:\n element = WebDriverWait(driver, 2).until(EC.presence_of_element_located(locat))\n return True\n except:\n return False\n\n\ndef restart_app(driver):\n optsRestartAPP = {'command': 'am broadcast -a',\n 'args': ['com.inhand.intent.INBOXCORE_RESTART_APP']}\n driver.execute_script(\"mobile: shell\", optsRestartAPP)\n\n\ndef wifi_disable(driver):\n opts = {'command': 'su 0',\n 'args': ['svc wifi disable']}\n driver.execute_script(\"mobile: shell\", opts)\n\ndef wifi_enable(driver):\n opts = {'command': 'su 0',\n 'args': ['svc wifi enable']}\n driver.execute_script(\"mobile: shell\", opts)\n\nif __name__ == '__main__':\n try:\n logpath = os.getcwd() + \"\\\\log\"\n # print(logpath)\n os.mkdir(logpath)\n except:\n pass\n pyfileName = str(__file__).split(\".py\")[0].split(\"/\")[-1]\n logfilepath = \".\\\\log\\\\\" + pyfileName + '-runlog.log'\n try:\n os.remove(logfilepath)\n except:\n pass\n host = 'http://182.150.21.232:10081'\n requesturl = \"/oauth2/access_token\"\n headers = {\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36'\n }\n get_token_value = {\n \"client_id\": \"000017953450251798098136\",\n \"client_secret\": \"08E9EC6793345759456CB8BAE52615F3\",\n \"grant_type\": \"password\",\n \"username\": \"[email protected]\",\n \"password\": \"czz123456\",\n \"password_type\": \"1\",\n \"language\": \"2\"\n }\n data = urllib.parse.urlencode(get_token_value).encode('utf-8')\n url = host + requesturl\n request = urllib.request.Request(url, data, headers)\n token_response = urllib.request.urlopen(request).read().decode('utf-8')\n logPrint(token_response)\n access_token = json.loads(token_response)['access_token']\n\n requesturl = \"/api/goods/list?cursor=0&limit=30&name=&access_token=\" + access_token\n url = host + requesturl\n response = requests.get(url=url, headers={'Content-Type': 'application/json'})\n goods_count = json.loads(response.text)['total']\n print(goods_count)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.desired_caps)\n sleep(0.5)\n wifi_enable(driver)\n sleep(0.5)\n opts1 = {'command': 'rm -rf',\n 'args': ['/sdcard/inbox/data/picture']}\n redata = driver.execute_script(\"mobile: shell\", opts1)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\").click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\").click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n try:\n xpath = \"//android.widget.TextView[contains(@text,'总商品数 \" + str(goods_count) + \"')]\"\n logPrint(xpath)\n WebDriverWait(driver, 2, 0.5).until(lambda x: x.find_element_by_xpath(xpath))\n progressFlag = True\n except Exception as e:\n print(e)\n progressFlag = False\n if progressFlag:\n logPrint(\"同步过程:PASS\")\n else:\n logPrint(\"同步过程:FAIL!!\")\n loadmasklocator = (\"xpath\", \"//android.widget.ProgressBar\")\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located(loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint(\"同步结果出现:PASS\")\n else:\n logPrint(\"同步结果出现:FAIL!!\")\n if isElementExist(driver, \"//android.widget.TextView[contains(@text,'操作成功')]\"):\n logPrint(\"同步成功:PASS\")\n else:\n logPrint(\"同步成功:FAIL!!\")\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(20)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.desired_caps)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\").click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\").click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located(loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint(\"同步结果出现:PASS\")\n else:\n logPrint(\"同步结果出现:FAIL!!\")\n if isElementExist(driver, \"//android.widget.TextView[contains(@text,'已经是最新配置')]\"):\n logPrint(\"已经是最新配置:PASS\")\n else:\n logPrint(\"已经是最新配置:FAIL!!\")\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n wifi_disable(driver)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\").click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n okdialoglocator = (\"xpath\", \"//android.widget.TextView[contains(@text,'操作失败')]\")\n try:\n WebDriverWait(driver, 3).until(EC.presence_of_element_located(okdialoglocator))\n failFlag = True\n except Exception as e:\n failFlag = False\n if failFlag:\n logPrint(\"断网同步,操作失败:PASS\")\n else:\n logPrint(\"断网同步,操作失败:FAIL!!\")\n wifi_enable(driver)\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n opts1 = {'command': 'rm -rf',\n 'args': ['/sdcard/inbox/data/picture']}\n redata = driver.execute_script(\"mobile: shell\", opts1)\n sleep(10)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\").click()\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(5)\n wifi_disable(driver)\n loadmasklocator = (\"xpath\", \"//android.widget.ProgressBar\")\n try:\n WebDriverWait(driver, 180).until_not(EC.presence_of_element_located(loadmasklocator))\n completeFlag = True\n except Exception as e:\n completeFlag = False\n if completeFlag:\n logPrint(\"同步结果出现:PASS\")\n else:\n logPrint(\"同步结果出现:FAIL!!\")\n if isElementExist(driver, \"//android.widget.TextView[contains(@text,'操作成功')]\"):\n logPrint(\"断网结束同步:PASS\")\n else:\n logPrint(\"断网结束同步:FAIL!!\")\n driver.find_element_by_xpath(\"//android.widget.Button[@text='确定']\").click()\n sleep(12)\n driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', properties.desired_caps)\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='货道配置']\").click()\n driver.find_element_by_xpath(\"//android.widget.TextView[@text='同步商品(从平台)']\").click()\n noNetFlag = find_toast(driver, \"平台\")\n if noNetFlag:\n logPrint(\"未与平台建立连接:PASS\")\n else:\n logPrint(\"未与平台建立连接:FAIL!!\")\n wifi_enable(driver)\n",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
from utils import *
from wordEmbedding import *
print("bat dau")
def predict(text, phobert, tokenizer):
model = load_model('model.h5')
X_test = word2vec(text, phobert, tokenizer)
x_test_tensor = tf.convert_to_tensor(X_test)
X_tests = []
X_tests.append(x_test_tensor)
X_tests = tf.convert_to_tensor(X_tests)
y = model.predict(X_tests)
y_predict = np.argmax(y, axis=-1)
print(y_predict+1)
if __name__ == "__main__":
print("1 Chỗ này hơi lâu bạn đợi tí")
phobert = AutoModel.from_pretrained("vinai/phobert-base")
print("2")
tokenizer = AutoTokenizer.from_pretrained("vinai/phobert-base", use_fast=False)
print("3")
predict("tôi làm giấy X ở đâu", phobert, tokenizer)
print("4")
predict("tôi làm giấy X ở đâu", phobert, tokenizer)
print("5")
predict("tôi làm giấy X cần những gì", phobert, tokenizer)
|
normal
|
{
"blob_id": "d2c9ee64472c74767812d842d2c49eec962e28c6",
"index": 4451,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef predict(text, phobert, tokenizer):\n model = load_model('model.h5')\n X_test = word2vec(text, phobert, tokenizer)\n x_test_tensor = tf.convert_to_tensor(X_test)\n X_tests = []\n X_tests.append(x_test_tensor)\n X_tests = tf.convert_to_tensor(X_tests)\n y = model.predict(X_tests)\n y_predict = np.argmax(y, axis=-1)\n print(y_predict + 1)\n\n\n<mask token>\n",
"step-3": "<mask token>\nprint('bat dau')\n\n\ndef predict(text, phobert, tokenizer):\n model = load_model('model.h5')\n X_test = word2vec(text, phobert, tokenizer)\n x_test_tensor = tf.convert_to_tensor(X_test)\n X_tests = []\n X_tests.append(x_test_tensor)\n X_tests = tf.convert_to_tensor(X_tests)\n y = model.predict(X_tests)\n y_predict = np.argmax(y, axis=-1)\n print(y_predict + 1)\n\n\nif __name__ == '__main__':\n print('1 Chỗ này hơi lâu bạn đợi tí')\n phobert = AutoModel.from_pretrained('vinai/phobert-base')\n print('2')\n tokenizer = AutoTokenizer.from_pretrained('vinai/phobert-base',\n use_fast=False)\n print('3')\n predict('tôi làm giấy X ở đâu', phobert, tokenizer)\n print('4')\n predict('tôi làm giấy X ở đâu', phobert, tokenizer)\n print('5')\n predict('tôi làm giấy X cần những gì', phobert, tokenizer)\n",
"step-4": "from utils import *\nfrom wordEmbedding import *\nprint('bat dau')\n\n\ndef predict(text, phobert, tokenizer):\n model = load_model('model.h5')\n X_test = word2vec(text, phobert, tokenizer)\n x_test_tensor = tf.convert_to_tensor(X_test)\n X_tests = []\n X_tests.append(x_test_tensor)\n X_tests = tf.convert_to_tensor(X_tests)\n y = model.predict(X_tests)\n y_predict = np.argmax(y, axis=-1)\n print(y_predict + 1)\n\n\nif __name__ == '__main__':\n print('1 Chỗ này hơi lâu bạn đợi tí')\n phobert = AutoModel.from_pretrained('vinai/phobert-base')\n print('2')\n tokenizer = AutoTokenizer.from_pretrained('vinai/phobert-base',\n use_fast=False)\n print('3')\n predict('tôi làm giấy X ở đâu', phobert, tokenizer)\n print('4')\n predict('tôi làm giấy X ở đâu', phobert, tokenizer)\n print('5')\n predict('tôi làm giấy X cần những gì', phobert, tokenizer)\n",
"step-5": "from utils import *\nfrom wordEmbedding import *\nprint(\"bat dau\")\n\ndef predict(text, phobert, tokenizer):\n model = load_model('model.h5')\n X_test = word2vec(text, phobert, tokenizer)\n x_test_tensor = tf.convert_to_tensor(X_test)\n\n X_tests = []\n X_tests.append(x_test_tensor)\n\n X_tests = tf.convert_to_tensor(X_tests)\n y = model.predict(X_tests)\n y_predict = np.argmax(y, axis=-1)\n\n print(y_predict+1)\n\nif __name__ == \"__main__\":\n print(\"1 Chỗ này hơi lâu bạn đợi tí\")\n phobert = AutoModel.from_pretrained(\"vinai/phobert-base\")\n print(\"2\")\n tokenizer = AutoTokenizer.from_pretrained(\"vinai/phobert-base\", use_fast=False)\n print(\"3\")\n predict(\"tôi làm giấy X ở đâu\", phobert, tokenizer)\n print(\"4\")\n predict(\"tôi làm giấy X ở đâu\", phobert, tokenizer)\n print(\"5\")\n predict(\"tôi làm giấy X cần những gì\", phobert, tokenizer)\n \n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import base64
import string
def hexStringtoBytes(hexstring):
byteArray = bytes.fromhex(hexstring)
return byteArray
def xorBytes(bytes1, bytes2):
xored = bytes([x^bytes2[i] for i,x in enumerate(bytes1)])
return xored
def xorAgainstCharacter(byteArray, character):
str2 = [ord(character)] * len(byteArray)
return xorBytes(byteArray,str2)
def scoreString(input):
arr = [(chr(x) in string.printable) for x in input]
return arr.count(True)
if __name__ == "__main__":
hexstring = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
bytes1 = hexStringtoBytes(hexstring)
scores = []
for x in string.printable:
temp = xorAgainstCharacter(bytes1, x)
print(str(x), str(temp))
scores.append(scoreString(temp))
|
normal
|
{
"blob_id": "a32fb683f8d46f901e8dcd2d075ace22ee81e076",
"index": 451,
"step-1": "<mask token>\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\n<mask token>\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\ndef xorAgainstCharacter(byteArray, character):\n str2 = [ord(character)] * len(byteArray)\n return xorBytes(byteArray, str2)\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\ndef xorAgainstCharacter(byteArray, character):\n str2 = [ord(character)] * len(byteArray)\n return xorBytes(byteArray, str2)\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\nif __name__ == '__main__':\n hexstring = (\n '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736')\n bytes1 = hexStringtoBytes(hexstring)\n scores = []\n for x in string.printable:\n temp = xorAgainstCharacter(bytes1, x)\n print(str(x), str(temp))\n scores.append(scoreString(temp))\n",
"step-4": "import base64\nimport string\n\n\ndef hexStringtoBytes(hexstring):\n byteArray = bytes.fromhex(hexstring)\n return byteArray\n\n\ndef xorBytes(bytes1, bytes2):\n xored = bytes([(x ^ bytes2[i]) for i, x in enumerate(bytes1)])\n return xored\n\n\ndef xorAgainstCharacter(byteArray, character):\n str2 = [ord(character)] * len(byteArray)\n return xorBytes(byteArray, str2)\n\n\ndef scoreString(input):\n arr = [(chr(x) in string.printable) for x in input]\n return arr.count(True)\n\n\nif __name__ == '__main__':\n hexstring = (\n '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736')\n bytes1 = hexStringtoBytes(hexstring)\n scores = []\n for x in string.printable:\n temp = xorAgainstCharacter(bytes1, x)\n print(str(x), str(temp))\n scores.append(scoreString(temp))\n",
"step-5": "import base64\r\nimport string\r\ndef hexStringtoBytes(hexstring):\r\n byteArray = bytes.fromhex(hexstring)\r\n return byteArray\r\n\r\ndef xorBytes(bytes1, bytes2):\r\n xored = bytes([x^bytes2[i] for i,x in enumerate(bytes1)])\r\n return xored\r\n\r\ndef xorAgainstCharacter(byteArray, character):\r\n str2 = [ord(character)] * len(byteArray)\r\n return xorBytes(byteArray,str2)\r\n\r\ndef scoreString(input):\r\n arr = [(chr(x) in string.printable) for x in input]\r\n return arr.count(True)\r\n\r\nif __name__ == \"__main__\":\r\n hexstring = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'\r\n bytes1 = hexStringtoBytes(hexstring)\r\n scores = []\r\n for x in string.printable:\r\n temp = xorAgainstCharacter(bytes1, x)\r\n print(str(x), str(temp))\r\n scores.append(scoreString(temp))\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import mechanicalsoup
from bs4 import BeautifulSoup
import re
import json
def extract_title(page):
return page.find("header").find("h1").contents[0]
def extract_colours(page):
color_list = page.find("ul")
return list(dict.fromkeys(re.findall("#\w+", str(color_list.contents))))
def get_colours_from_page(browser, baseurl, target_page):
response = browser.open(baseurl + target_page)
soup = BeautifulSoup(response.text, 'lxml')
extract = soup.find("section", {"id": "item"})
entity = {"title": extract_title(extract), "colours": extract_colours(extract)}
return entity
def get_links_from_article(articles):
links = []
for article in articles:
links.append(article.find("a").attrs['href'])
return links
def scrape_flag_pagination_page(browser, baseurl, pageCount):
response = browser.open(baseurl + "/flags?page={0}".format(pageCount))
soup = BeautifulSoup(response.text, 'lxml')
flag_articles = soup.findAll("article")
return get_links_from_article(flag_articles)
baseurl = "https://encycolorpedia.com"
browser = mechanicalsoup.StatefulBrowser(raise_on_404=True)
list_of_urls = []
flag_count = 0
pageCount = 1
while(True):
try:
list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount)
except mechanicalsoup.utils.LinkNotFoundError:
break
pageCount += 1
package = []
for url in list_of_urls:
package.append(get_colours_from_page(browser, baseurl, url))
with open('flag_colours.json', 'w', encoding='utf-8') as f:
json.dump(package, f, ensure_ascii=False, indent=4)
|
normal
|
{
"blob_id": "9fd33089a9dc919ef2fb2698059e60a24a0e05e6",
"index": 6118,
"step-1": "<mask token>\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\n<mask token>\nwhile True:\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount\n )\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\n<mask token>\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)\n",
"step-3": "<mask token>\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\nbaseurl = 'https://encycolorpedia.com'\nbrowser = mechanicalsoup.StatefulBrowser(raise_on_404=True)\nlist_of_urls = []\nflag_count = 0\npageCount = 1\nwhile True:\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount\n )\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\npackage = []\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)\n",
"step-4": "import mechanicalsoup\nfrom bs4 import BeautifulSoup\nimport re\nimport json\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\nbaseurl = 'https://encycolorpedia.com'\nbrowser = mechanicalsoup.StatefulBrowser(raise_on_404=True)\nlist_of_urls = []\nflag_count = 0\npageCount = 1\nwhile True:\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount\n )\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\npackage = []\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)\n",
"step-5": "import mechanicalsoup\nfrom bs4 import BeautifulSoup\nimport re\nimport json\n\n\ndef extract_title(page):\n return page.find(\"header\").find(\"h1\").contents[0]\n\n\n\ndef extract_colours(page):\n color_list = page.find(\"ul\")\n return list(dict.fromkeys(re.findall(\"#\\w+\", str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find(\"section\", {\"id\": \"item\"})\n entity = {\"title\": extract_title(extract), \"colours\": extract_colours(extract)}\n return entity\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find(\"a\").attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + \"/flags?page={0}\".format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll(\"article\")\n return get_links_from_article(flag_articles)\n\n\n\nbaseurl = \"https://encycolorpedia.com\"\nbrowser = mechanicalsoup.StatefulBrowser(raise_on_404=True)\nlist_of_urls = []\nflag_count = 0\npageCount = 1\nwhile(True):\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount)\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\npackage = []\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\n\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.