code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
<|reserved_special_token_0|>
@register.inclusion_tag('user/user_list.html')
def user_list():
"""show user name list"""
users = User.objects.all()
return {'users': users}
@register.simple_tag()
def accept_request(pk_login_user, pk_other_user):
RequestFollow.objects.accept_request(pk_login_user, pk_other_user)
return 'accept request'
<|reserved_special_token_0|>
@register.simple_tag()
def count_followers(pk):
""" count followers user"""
followers = Follow.objects.followers(pk)
return len(followers)
@register.simple_tag()
def count_following(pk):
""" count following user"""
following = Follow.objects.following(pk)
return len(following)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register.inclusion_tag('user/user_list.html')
def user_list():
"""show user name list"""
users = User.objects.all()
return {'users': users}
@register.simple_tag()
def accept_request(pk_login_user, pk_other_user):
RequestFollow.objects.accept_request(pk_login_user, pk_other_user)
return 'accept request'
@register.simple_tag()
def delete_request(pk_login_user, pk_other_user):
RequestFollow.objects.delete_request(pk_login_user, pk_other_user)
return 'delete request'
@register.simple_tag()
def count_followers(pk):
""" count followers user"""
followers = Follow.objects.followers(pk)
return len(followers)
@register.simple_tag()
def count_following(pk):
""" count following user"""
following = Follow.objects.following(pk)
return len(following)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
register = template.Library()
@register.inclusion_tag('user/user_list.html')
def user_list():
"""show user name list"""
users = User.objects.all()
return {'users': users}
@register.simple_tag()
def accept_request(pk_login_user, pk_other_user):
RequestFollow.objects.accept_request(pk_login_user, pk_other_user)
return 'accept request'
@register.simple_tag()
def delete_request(pk_login_user, pk_other_user):
RequestFollow.objects.delete_request(pk_login_user, pk_other_user)
return 'delete request'
@register.simple_tag()
def count_followers(pk):
""" count followers user"""
followers = Follow.objects.followers(pk)
return len(followers)
@register.simple_tag()
def count_following(pk):
""" count following user"""
following = Follow.objects.following(pk)
return len(following)
<|reserved_special_token_1|>
from django import template
from apps.account.models import User, Follow, RequestFollow
from apps.post.models import Post
register = template.Library()
@register.inclusion_tag('user/user_list.html')
def user_list():
"""show user name list"""
users = User.objects.all()
return {'users': users}
@register.simple_tag()
def accept_request(pk_login_user, pk_other_user):
RequestFollow.objects.accept_request(pk_login_user, pk_other_user)
return 'accept request'
@register.simple_tag()
def delete_request(pk_login_user, pk_other_user):
RequestFollow.objects.delete_request(pk_login_user, pk_other_user)
return 'delete request'
@register.simple_tag()
def count_followers(pk):
""" count followers user"""
followers = Follow.objects.followers(pk)
return len(followers)
@register.simple_tag()
def count_following(pk):
""" count following user"""
following = Follow.objects.following(pk)
return len(following)
<|reserved_special_token_1|>
from django import template
from apps.account.models import User, Follow, RequestFollow
from apps.post.models import Post
register = template.Library()
@register.inclusion_tag('user/user_list.html')
def user_list():
"""show user name list"""
users = User.objects.all()
return {"users": users}
# @register.inclusion_tag('user/following_post_list.html')
# def following_post_list(pk):
# """show user following
# input:pk user
# output: list following
# """
# following = Follow.objects.following(pk)
# posts = Post.objects.filter(author__email__in=following).values('title', 'author__email')
# return {'posts': posts}
# @register.simple_tag()
# def send_request_follow(pk_login_user, pk_other_user):
# """
# Follow the user
# :param pk_login_user:
# :param pk_other_user:
# :return: message
# """
# return RequestFollow.objects.request_following_user(pk_login_user, pk_other_user)
@register.simple_tag()
def accept_request(pk_login_user, pk_other_user):
RequestFollow.objects.accept_request(pk_login_user, pk_other_user)
return "accept request"
@register.simple_tag()
def delete_request(pk_login_user, pk_other_user):
RequestFollow.objects.delete_request(pk_login_user, pk_other_user)
return "delete request"
@register.simple_tag()
def count_followers(pk):
""" count followers user"""
followers = Follow.objects.followers(pk)
return len(followers)
@register.simple_tag()
def count_following(pk):
""" count following user"""
following = Follow.objects.following(pk)
return len(following)
|
flexible
|
{
"blob_id": "999c19fd760ffc482a15f5a14e188d416fcc5f21",
"index": 7218,
"step-1": "<mask token>\n\n\[email protected]_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\[email protected]_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\n<mask token>\n\n\[email protected]_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\[email protected]_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-2": "<mask token>\n\n\[email protected]_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\[email protected]_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\[email protected]_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return 'delete request'\n\n\[email protected]_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\[email protected]_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-3": "<mask token>\nregister = template.Library()\n\n\[email protected]_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\[email protected]_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\[email protected]_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return 'delete request'\n\n\[email protected]_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\[email protected]_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-4": "from django import template\nfrom apps.account.models import User, Follow, RequestFollow\nfrom apps.post.models import Post\nregister = template.Library()\n\n\[email protected]_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {'users': users}\n\n\[email protected]_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return 'accept request'\n\n\[email protected]_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return 'delete request'\n\n\[email protected]_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\[email protected]_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-5": "from django import template\n\nfrom apps.account.models import User, Follow, RequestFollow\nfrom apps.post.models import Post\n\nregister = template.Library()\n\n\[email protected]_tag('user/user_list.html')\ndef user_list():\n \"\"\"show user name list\"\"\"\n users = User.objects.all()\n return {\"users\": users}\n\n\n# @register.inclusion_tag('user/following_post_list.html')\n# def following_post_list(pk):\n# \"\"\"show user following\n# input:pk user\n# output: list following\n# \"\"\"\n# following = Follow.objects.following(pk)\n# posts = Post.objects.filter(author__email__in=following).values('title', 'author__email')\n# return {'posts': posts}\n\n\n# @register.simple_tag()\n# def send_request_follow(pk_login_user, pk_other_user):\n# \"\"\"\n# Follow the user\n# :param pk_login_user:\n# :param pk_other_user:\n# :return: message\n# \"\"\"\n# return RequestFollow.objects.request_following_user(pk_login_user, pk_other_user)\n\n\[email protected]_tag()\ndef accept_request(pk_login_user, pk_other_user):\n RequestFollow.objects.accept_request(pk_login_user, pk_other_user)\n return \"accept request\"\n\n\[email protected]_tag()\ndef delete_request(pk_login_user, pk_other_user):\n RequestFollow.objects.delete_request(pk_login_user, pk_other_user)\n return \"delete request\"\n\n\[email protected]_tag()\ndef count_followers(pk):\n \"\"\" count followers user\"\"\"\n\n followers = Follow.objects.followers(pk)\n return len(followers)\n\n\[email protected]_tag()\ndef count_following(pk):\n \"\"\" count following user\"\"\"\n following = Follow.objects.following(pk)\n return len(following)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import sys
import pathlib
from matplotlib import pyplot as plt
import matplotlib as mpl
script_name = pathlib.Path(sys.argv[0]).stem
FIGURES_DIR = pathlib.Path(
__file__).parents[2] / "figures" / "simulations" / script_name
FIGURES_DIR.mkdir(exist_ok=True, parents=True)
# mpl.rc("text", usetex=True)
# mpl.rc("font", family="serif")
# mpl.rc(
# "text.latex",
# preamble=r"\usepackage{mathpazo} \usepackage{eulervm} \usepackage{amssymb}"
# r"\usepackage{amsmath} \usepackage{bm} \usepackage{DejaVuSans}",
# )
|
normal
|
{
"blob_id": "fc26574ac8628d7e2896e3e6d055ac61264c7db0",
"index": 1302,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nFIGURES_DIR.mkdir(exist_ok=True, parents=True)\n",
"step-3": "<mask token>\nscript_name = pathlib.Path(sys.argv[0]).stem\nFIGURES_DIR = pathlib.Path(__file__).parents[2\n ] / 'figures' / 'simulations' / script_name\nFIGURES_DIR.mkdir(exist_ok=True, parents=True)\n",
"step-4": "import sys\nimport pathlib\nfrom matplotlib import pyplot as plt\nimport matplotlib as mpl\nscript_name = pathlib.Path(sys.argv[0]).stem\nFIGURES_DIR = pathlib.Path(__file__).parents[2\n ] / 'figures' / 'simulations' / script_name\nFIGURES_DIR.mkdir(exist_ok=True, parents=True)\n",
"step-5": "import sys\nimport pathlib\n\nfrom matplotlib import pyplot as plt\nimport matplotlib as mpl\n\nscript_name = pathlib.Path(sys.argv[0]).stem\nFIGURES_DIR = pathlib.Path(\n __file__).parents[2] / \"figures\" / \"simulations\" / script_name\nFIGURES_DIR.mkdir(exist_ok=True, parents=True)\n\n# mpl.rc(\"text\", usetex=True)\n# mpl.rc(\"font\", family=\"serif\")\n# mpl.rc(\n# \"text.latex\",\n# preamble=r\"\\usepackage{mathpazo} \\usepackage{eulervm} \\usepackage{amssymb}\"\n# r\"\\usepackage{amsmath} \\usepackage{bm} \\usepackage{DejaVuSans}\",\n# )\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Class 1: Flight which contains the flight number(f_id), its origin and destination, the number of stops between the
# origin and destination and the type of airlines(f_type)
class Flight():
# INIT CONSTRUCTOR
def __init__(self, f_id, f_origin, f_destination, no_of_stops, flight_type, p_id, p_type):
self.f_id = f_id
self.origin = f_origin
self.destination = f_destination
self.stops = no_of_stops
self.flight_type = flight_type
self.pid = p_id
self.ptype = p_type
def get_flight_details(self,f_id):
print("Flight No:", f_id)
print("ORG:", self.origin)
print("DEST:", self.destination)
print("Flight Type:", self.flight_type)
# Class2: Person which contains the personID(p_id), their name, phone number, gender, type of person(
# employee/passenger) and it inherits the Flight class to get the flight details.
class Person(Flight):
# INIT CONSTRUCTOR
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type):
self.name = p_name
self.gender = p_gender
self.p_phonenumber = p_phonenumber
# Here we also use super class to use the parameters from Flight class
super(Person, self).__init__(f_id, f_origin, f_destination, no_of_stops, flight_type, p_id, p_type)
# Here we used MULTIPLE INHERITANCE as the Person is derived from Flight and the Employee and Passenger is derived
# from Person.
# Class3: Employee which is an inherited class from Person, SSN is the private data member, since we cant reveal the
# SSN.
class Employee(Person):
# INIT CONSTRUCTOR
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, e_SSN, f_origin, f_destination, no_of_stops, flight_type):
super(Employee,self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type)
self.__emp_SSN = e_SSN
# This method is to get the travel details of the employee
def get_travel_details_employee(self):
# print("Travel Details of ", self.emp_SSN)
print("Hello Pilot ", self.name, "Here are your flight details")
print("Flight_ID:", self.f_id)
print("ORG:", self.origin)
print("DEST:", self.destination)
# Class 4:Passenger which is an inherited class from Person, Passport Number is the private data member,
# since we cant reveal it.
class Passenger(Person):
names = []
d = dict()
# INIT CONSTRUCTOR
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops, flight_type):
super(Passenger, self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type)
self.pno = pno
# This is to get the travellers on the plane into a list, where we have the flightNumber(f_id)
# as the key and the passengername(name) as the value.
if self.f_id in Passenger.d.keys():
Passenger.d[self.f_id].append(self.name)
else:
Passenger.d[self.f_id] = [self.name]
# This method is to get the travel details of the passenger
def get_travel_details_passanger(self):
print("Travel Details of ", self.name)
print("Flight Id:", self.f_id)
print("Flight Type:", self.flight_type)
print("ORG:", self.origin)
print("DEST:", self.destination)
# This method is to print the dictionary where we have stored the passengers list for different flights
def get_travelling_passengers(self):
print("Passengers on the flight", Passenger.d)
class Ticket(Passenger):
def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops,
flight_type, boarding_group_no, row, seat_no):
super(Ticket, self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination,
no_of_stops, flight_type)
self.boarding_group_no = boarding_group_no
self.row = row
self.seat_no = seat_no
print("Your ticket details are below: ")
def get_boarding_pass(self, p_name):
for k, v in Passenger.d.items():
names = v
for i in names:
if i == p_name:
print("Passenger Name:", p_name)
print("Flight Id:", k)
print("Boarding Group and Seat No:", self.boarding_group_no, self.row, self.seat_no)
print("ORG:", self.origin)
print("DEST:", self.destination)
|
normal
|
{
"blob_id": "95a2f5abb37642651316a8954a4289e5b04e4916",
"index": 4357,
"step-1": "<mask token>\n\n\nclass Passenger(Person):\n <mask token>\n <mask token>\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type):\n super(Passenger, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, f_origin, f_destination, no_of_stops,\n flight_type)\n self.pno = pno\n if self.f_id in Passenger.d.keys():\n Passenger.d[self.f_id].append(self.name)\n else:\n Passenger.d[self.f_id] = [self.name]\n <mask token>\n <mask token>\n\n\nclass Ticket(Passenger):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type,\n boarding_group_no, row, seat_no):\n super(Ticket, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops,\n flight_type)\n self.boarding_group_no = boarding_group_no\n self.row = row\n self.seat_no = seat_no\n print('Your ticket details are below: ')\n\n def get_boarding_pass(self, p_name):\n for k, v in Passenger.d.items():\n names = v\n for i in names:\n if i == p_name:\n print('Passenger Name:', p_name)\n print('Flight Id:', k)\n print('Boarding Group and Seat No:', self.\n boarding_group_no, self.row, self.seat_no)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n",
"step-2": "<mask token>\n\n\nclass Passenger(Person):\n <mask token>\n <mask token>\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type):\n super(Passenger, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, f_origin, f_destination, no_of_stops,\n flight_type)\n self.pno = pno\n if self.f_id in Passenger.d.keys():\n Passenger.d[self.f_id].append(self.name)\n else:\n Passenger.d[self.f_id] = [self.name]\n\n def get_travel_details_passanger(self):\n print('Travel Details of ', self.name)\n print('Flight Id:', self.f_id)\n print('Flight Type:', self.flight_type)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n\n def get_travelling_passengers(self):\n print('Passengers on the flight', Passenger.d)\n\n\nclass Ticket(Passenger):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type,\n boarding_group_no, row, seat_no):\n super(Ticket, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops,\n flight_type)\n self.boarding_group_no = boarding_group_no\n self.row = row\n self.seat_no = seat_no\n print('Your ticket details are below: ')\n\n def get_boarding_pass(self, p_name):\n for k, v in Passenger.d.items():\n names = v\n for i in names:\n if i == p_name:\n print('Passenger Name:', p_name)\n print('Flight Id:', k)\n print('Boarding Group and Seat No:', self.\n boarding_group_no, self.row, self.seat_no)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n",
"step-3": "<mask token>\n\n\nclass Person(Flight):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n f_origin, f_destination, no_of_stops, flight_type):\n self.name = p_name\n self.gender = p_gender\n self.p_phonenumber = p_phonenumber\n super(Person, self).__init__(f_id, f_origin, f_destination,\n no_of_stops, flight_type, p_id, p_type)\n\n\nclass Employee(Person):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n e_SSN, f_origin, f_destination, no_of_stops, flight_type):\n super(Employee, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, f_origin, f_destination, no_of_stops,\n flight_type)\n self.__emp_SSN = e_SSN\n\n def get_travel_details_employee(self):\n print('Hello Pilot ', self.name, 'Here are your flight details')\n print('Flight_ID:', self.f_id)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n\n\nclass Passenger(Person):\n names = []\n d = dict()\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type):\n super(Passenger, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, f_origin, f_destination, no_of_stops,\n flight_type)\n self.pno = pno\n if self.f_id in Passenger.d.keys():\n Passenger.d[self.f_id].append(self.name)\n else:\n Passenger.d[self.f_id] = [self.name]\n\n def get_travel_details_passanger(self):\n print('Travel Details of ', self.name)\n print('Flight Id:', self.f_id)\n print('Flight Type:', self.flight_type)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n\n def get_travelling_passengers(self):\n print('Passengers on the flight', Passenger.d)\n\n\nclass Ticket(Passenger):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type,\n boarding_group_no, row, seat_no):\n super(Ticket, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops,\n flight_type)\n self.boarding_group_no = boarding_group_no\n self.row = row\n self.seat_no = seat_no\n print('Your ticket details are below: ')\n\n def get_boarding_pass(self, p_name):\n for k, v in Passenger.d.items():\n names = v\n for i in names:\n if i == p_name:\n print('Passenger Name:', p_name)\n print('Flight Id:', k)\n print('Boarding Group and Seat No:', self.\n boarding_group_no, self.row, self.seat_no)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n",
"step-4": "class Flight:\n\n def __init__(self, f_id, f_origin, f_destination, no_of_stops,\n flight_type, p_id, p_type):\n self.f_id = f_id\n self.origin = f_origin\n self.destination = f_destination\n self.stops = no_of_stops\n self.flight_type = flight_type\n self.pid = p_id\n self.ptype = p_type\n\n def get_flight_details(self, f_id):\n print('Flight No:', f_id)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n print('Flight Type:', self.flight_type)\n\n\nclass Person(Flight):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n f_origin, f_destination, no_of_stops, flight_type):\n self.name = p_name\n self.gender = p_gender\n self.p_phonenumber = p_phonenumber\n super(Person, self).__init__(f_id, f_origin, f_destination,\n no_of_stops, flight_type, p_id, p_type)\n\n\nclass Employee(Person):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n e_SSN, f_origin, f_destination, no_of_stops, flight_type):\n super(Employee, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, f_origin, f_destination, no_of_stops,\n flight_type)\n self.__emp_SSN = e_SSN\n\n def get_travel_details_employee(self):\n print('Hello Pilot ', self.name, 'Here are your flight details')\n print('Flight_ID:', self.f_id)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n\n\nclass Passenger(Person):\n names = []\n d = dict()\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type):\n super(Passenger, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, f_origin, f_destination, no_of_stops,\n flight_type)\n self.pno = pno\n if self.f_id in Passenger.d.keys():\n Passenger.d[self.f_id].append(self.name)\n else:\n Passenger.d[self.f_id] = [self.name]\n\n def get_travel_details_passanger(self):\n print('Travel Details of ', self.name)\n print('Flight Id:', self.f_id)\n print('Flight Type:', self.flight_type)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n\n def get_travelling_passengers(self):\n print('Passengers on the flight', Passenger.d)\n\n\nclass Ticket(Passenger):\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id,\n pno, f_origin, f_destination, no_of_stops, flight_type,\n boarding_group_no, row, seat_no):\n super(Ticket, self).__init__(p_id, p_type, p_gender, p_name,\n p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops,\n flight_type)\n self.boarding_group_no = boarding_group_no\n self.row = row\n self.seat_no = seat_no\n print('Your ticket details are below: ')\n\n def get_boarding_pass(self, p_name):\n for k, v in Passenger.d.items():\n names = v\n for i in names:\n if i == p_name:\n print('Passenger Name:', p_name)\n print('Flight Id:', k)\n print('Boarding Group and Seat No:', self.\n boarding_group_no, self.row, self.seat_no)\n print('ORG:', self.origin)\n print('DEST:', self.destination)\n",
"step-5": "# Class 1: Flight which contains the flight number(f_id), its origin and destination, the number of stops between the\n# origin and destination and the type of airlines(f_type)\nclass Flight():\n # INIT CONSTRUCTOR\n def __init__(self, f_id, f_origin, f_destination, no_of_stops, flight_type, p_id, p_type):\n self.f_id = f_id\n self.origin = f_origin\n self.destination = f_destination\n self.stops = no_of_stops\n self.flight_type = flight_type\n self.pid = p_id\n self.ptype = p_type\n\n def get_flight_details(self,f_id):\n print(\"Flight No:\", f_id)\n print(\"ORG:\", self.origin)\n print(\"DEST:\", self.destination)\n print(\"Flight Type:\", self.flight_type)\n\n# Class2: Person which contains the personID(p_id), their name, phone number, gender, type of person(\n# employee/passenger) and it inherits the Flight class to get the flight details.\n\nclass Person(Flight):\n # INIT CONSTRUCTOR\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type):\n self.name = p_name\n self.gender = p_gender\n self.p_phonenumber = p_phonenumber\n# Here we also use super class to use the parameters from Flight class\n super(Person, self).__init__(f_id, f_origin, f_destination, no_of_stops, flight_type, p_id, p_type)\n\n# Here we used MULTIPLE INHERITANCE as the Person is derived from Flight and the Employee and Passenger is derived\n# from Person.\n\n# Class3: Employee which is an inherited class from Person, SSN is the private data member, since we cant reveal the\n# SSN.\n\nclass Employee(Person):\n # INIT CONSTRUCTOR\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, e_SSN, f_origin, f_destination, no_of_stops, flight_type):\n super(Employee,self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type)\n self.__emp_SSN = e_SSN\n\n# This method is to get the travel details of the employee\n def get_travel_details_employee(self):\n # print(\"Travel Details of \", self.emp_SSN)\n print(\"Hello Pilot \", self.name, \"Here are your flight details\")\n print(\"Flight_ID:\", self.f_id)\n print(\"ORG:\", self.origin)\n print(\"DEST:\", self.destination)\n\n# Class 4:Passenger which is an inherited class from Person, Passport Number is the private data member,\n# since we cant reveal it.\nclass Passenger(Person):\n names = []\n d = dict()\n # INIT CONSTRUCTOR\n\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops, flight_type):\n super(Passenger, self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, f_origin, f_destination, no_of_stops, flight_type)\n self.pno = pno\n\n# This is to get the travellers on the plane into a list, where we have the flightNumber(f_id)\n # as the key and the passengername(name) as the value.\n if self.f_id in Passenger.d.keys():\n Passenger.d[self.f_id].append(self.name)\n else:\n Passenger.d[self.f_id] = [self.name]\n\n # This method is to get the travel details of the passenger\n def get_travel_details_passanger(self):\n print(\"Travel Details of \", self.name)\n print(\"Flight Id:\", self.f_id)\n print(\"Flight Type:\", self.flight_type)\n print(\"ORG:\", self.origin)\n print(\"DEST:\", self.destination)\n\n# This method is to print the dictionary where we have stored the passengers list for different flights\n def get_travelling_passengers(self):\n print(\"Passengers on the flight\", Passenger.d)\n\n\nclass Ticket(Passenger):\n def __init__(self, p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination, no_of_stops,\n flight_type, boarding_group_no, row, seat_no):\n super(Ticket, self).__init__(p_id, p_type, p_gender, p_name, p_phonenumber, f_id, pno, f_origin, f_destination,\n no_of_stops, flight_type)\n self.boarding_group_no = boarding_group_no\n self.row = row\n self.seat_no = seat_no\n print(\"Your ticket details are below: \")\n\n def get_boarding_pass(self, p_name):\n for k, v in Passenger.d.items():\n names = v\n for i in names:\n if i == p_name:\n print(\"Passenger Name:\", p_name)\n print(\"Flight Id:\", k)\n print(\"Boarding Group and Seat No:\", self.boarding_group_no, self.row, self.seat_no)\n print(\"ORG:\", self.origin)\n print(\"DEST:\", self.destination)\n\n\n\n\n\n",
"step-ids": [
5,
7,
13,
16,
17
]
}
|
[
5,
7,
13,
16,
17
] |
<|reserved_special_token_0|>
class BookRoomThread(threading.Thread):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def inUserCenter(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text.strip() == '个人中心':
result = self.driver.find_element_by_css_selector(
'.orange.uni_trans')
if result.text.strip() == '预约成功':
return True
else:
self.driver.find_element_by_link_text('研究小间').click()
return False
else:
return False
except Exception as e:
print(str(e))
return False
<|reserved_special_token_0|>
def comitBook(self):
try:
dialogtitle = self.driver.find_element_by_class_name(
'ui-dialog-title')
if dialogtitle.text == '预约申请':
st = self.driver.find_elements_by_name('start_time')[2]
et = self.driver.find_elements_by_name('end_time')[2]
Select(st).select_by_value(self.startTime)
Select(et).select_by_value(self.endTime)
self.driver.find_element_by_class_name('submitarea'
).find_element_by_xpath("//input[@value='提交']").click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BookRoomThread(threading.Thread):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, name):
super().__init__()
self.__running = threading.Event()
self.__running.set()
self.name = name
self.isStart = False
def stop(self):
if self.driver:
self.driver.close()
self.__running.clear()
<|reserved_special_token_0|>
def openChrome(self):
option = webdriver.ChromeOptions()
option.add_argument('disable-infobars')
self.driver = webdriver.Chrome(chrome_options=option)
<|reserved_special_token_0|>
def goUserCenter(self):
try:
dialogtitle = self.driver.find_element_by_id('ui-id-3')
if dialogtitle.text == '提醒':
self.driver.find_element_by_class_name('ui-button-text-only'
).click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
<|reserved_special_token_0|>
def inUserCenter(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text.strip() == '个人中心':
result = self.driver.find_element_by_css_selector(
'.orange.uni_trans')
if result.text.strip() == '预约成功':
return True
else:
self.driver.find_element_by_link_text('研究小间').click()
return False
else:
return False
except Exception as e:
print(str(e))
return False
<|reserved_special_token_0|>
def comitBook(self):
try:
dialogtitle = self.driver.find_element_by_class_name(
'ui-dialog-title')
if dialogtitle.text == '预约申请':
st = self.driver.find_elements_by_name('start_time')[2]
et = self.driver.find_elements_by_name('end_time')[2]
Select(st).select_by_value(self.startTime)
Select(et).select_by_value(self.endTime)
self.driver.find_element_by_class_name('submitarea'
).find_element_by_xpath("//input[@value='提交']").click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def book_room(self):
if self.driver.title == 'IC空间管理系统':
if not self.goLogin():
print('not login')
if not self.inUserCenter():
print('not in user center')
if not self.goUserCenter():
print('not go user center')
if not self.comitBook():
print('not go 研究小间')
if not self.changeTheDate():
print('not go commit')
if not self.goBookRoomSelection():
print('not go Date')
else:
print('book success')
self.driver.close()
self.stop()
return
self.start_timer()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BookRoomThread(threading.Thread):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, name):
super().__init__()
self.__running = threading.Event()
self.__running.set()
self.name = name
self.isStart = False
def stop(self):
if self.driver:
self.driver.close()
self.__running.clear()
def run(self):
while self.__running.isSet():
if not self.isStart:
self.openbrowser()
self.isStart = True
time.sleep(1)
def openChrome(self):
option = webdriver.ChromeOptions()
option.add_argument('disable-infobars')
self.driver = webdriver.Chrome(chrome_options=option)
def goLogin(self):
try:
username = self.driver.find_element_by_id('username')
password = self.driver.find_element_by_id('password')
username.send_keys(self.userName)
password.send_keys(self.passWord)
self.driver.find_element_by_class_name('btn-success').click()
return True
except Exception as e:
print(str(e))
return False
def goUserCenter(self):
try:
dialogtitle = self.driver.find_element_by_id('ui-id-3')
if dialogtitle.text == '提醒':
self.driver.find_element_by_class_name('ui-button-text-only'
).click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def goBookRoomSelection(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text == 'Home Page':
self.driver.find_element_by_link_text('研究小间').click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def inUserCenter(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text.strip() == '个人中心':
result = self.driver.find_element_by_css_selector(
'.orange.uni_trans')
if result.text.strip() == '预约成功':
return True
else:
self.driver.find_element_by_link_text('研究小间').click()
return False
else:
return False
except Exception as e:
print(str(e))
return False
def changeTheDate(self):
try:
datetitles = self.driver.find_elements_by_class_name('cld-h-cell')
isFindDateTitle = False
print(self.bookDate)
for i in range(len(datetitles)):
if datetitles[i].get_attribute('date') == str(self.bookDate):
isFindDateTitle = True
if datetitles[i].get_attribute('class').find('cld-d-sel'
) == -1:
datetitles[i].click()
elif self.isSlectDate:
self.isSlectDate = False
if i == 6:
datetitles[5].click()
else:
datetitles[i + 1].click()
else:
self.isSlectDate = True
if not isFindDateTitle:
datetitles[9].click()
else:
roomtitles = self.driver.find_elements_by_class_name(
'cld-obj-qz')
for i in range(len(roomtitles)):
if roomtitles[i].get_attribute('objname') == self.roomName:
if len(roomtitles[i].find_elements_by_class_name(
'cld-ttd')) > 2:
roomtitles[i].find_element_by_class_name(
'cld-ttd-title').click()
break
return True
except Exception as e:
print(str(e))
return False
def comitBook(self):
try:
dialogtitle = self.driver.find_element_by_class_name(
'ui-dialog-title')
if dialogtitle.text == '预约申请':
st = self.driver.find_elements_by_name('start_time')[2]
et = self.driver.find_elements_by_name('end_time')[2]
Select(st).select_by_value(self.startTime)
Select(et).select_by_value(self.endTime)
self.driver.find_element_by_class_name('submitarea'
).find_element_by_xpath("//input[@value='提交']").click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def book_room(self):
if self.driver.title == 'IC空间管理系统':
if not self.goLogin():
print('not login')
if not self.inUserCenter():
print('not in user center')
if not self.goUserCenter():
print('not go user center')
if not self.comitBook():
print('not go 研究小间')
if not self.changeTheDate():
print('not go commit')
if not self.goBookRoomSelection():
print('not go Date')
else:
print('book success')
self.driver.close()
self.stop()
return
self.start_timer()
def operationBook(self):
url = 'http://seatlib.fjtcm.edu.cn'
self.driver.get(url)
while True:
now = datetime.datetime.now()
if (now.hour > self.startHour or now.hour == self.startHour and
now.minute >= self.startMin):
self.driver.refresh()
break
time.sleep(10)
self.start_timer()
def start_timer(self, interval=0.5):
self.timer = threading.Timer(interval, self.book_room)
self.timer.start()
def openbrowser(self):
self.openChrome()
self.operationBook()
<|reserved_special_token_1|>
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import NoSuchElementException, ElementNotVisibleException
from selenium.webdriver.support.select import Select
import time
import threading
import random
import string
from urllib import parse, request
import re
import json
import datetime
import threading
from datetime import timedelta, date
class BookRoomThread(threading.Thread):
startHour = 23
startMin = 58
userName = '2160652004'
passWord = '270749'
roomName = '研究间17'
startTime = '800'
endTime = '2200'
bookDate = date.today() + timedelta(2)
isSlectDate = False
def __init__(self, name):
super().__init__()
self.__running = threading.Event()
self.__running.set()
self.name = name
self.isStart = False
def stop(self):
if self.driver:
self.driver.close()
self.__running.clear()
def run(self):
while self.__running.isSet():
if not self.isStart:
self.openbrowser()
self.isStart = True
time.sleep(1)
def openChrome(self):
option = webdriver.ChromeOptions()
option.add_argument('disable-infobars')
self.driver = webdriver.Chrome(chrome_options=option)
def goLogin(self):
try:
username = self.driver.find_element_by_id('username')
password = self.driver.find_element_by_id('password')
username.send_keys(self.userName)
password.send_keys(self.passWord)
self.driver.find_element_by_class_name('btn-success').click()
return True
except Exception as e:
print(str(e))
return False
def goUserCenter(self):
try:
dialogtitle = self.driver.find_element_by_id('ui-id-3')
if dialogtitle.text == '提醒':
self.driver.find_element_by_class_name('ui-button-text-only'
).click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def goBookRoomSelection(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text == 'Home Page':
self.driver.find_element_by_link_text('研究小间').click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def inUserCenter(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text.strip() == '个人中心':
result = self.driver.find_element_by_css_selector(
'.orange.uni_trans')
if result.text.strip() == '预约成功':
return True
else:
self.driver.find_element_by_link_text('研究小间').click()
return False
else:
return False
except Exception as e:
print(str(e))
return False
def changeTheDate(self):
try:
datetitles = self.driver.find_elements_by_class_name('cld-h-cell')
isFindDateTitle = False
print(self.bookDate)
for i in range(len(datetitles)):
if datetitles[i].get_attribute('date') == str(self.bookDate):
isFindDateTitle = True
if datetitles[i].get_attribute('class').find('cld-d-sel'
) == -1:
datetitles[i].click()
elif self.isSlectDate:
self.isSlectDate = False
if i == 6:
datetitles[5].click()
else:
datetitles[i + 1].click()
else:
self.isSlectDate = True
if not isFindDateTitle:
datetitles[9].click()
else:
roomtitles = self.driver.find_elements_by_class_name(
'cld-obj-qz')
for i in range(len(roomtitles)):
if roomtitles[i].get_attribute('objname') == self.roomName:
if len(roomtitles[i].find_elements_by_class_name(
'cld-ttd')) > 2:
roomtitles[i].find_element_by_class_name(
'cld-ttd-title').click()
break
return True
except Exception as e:
print(str(e))
return False
def comitBook(self):
try:
dialogtitle = self.driver.find_element_by_class_name(
'ui-dialog-title')
if dialogtitle.text == '预约申请':
st = self.driver.find_elements_by_name('start_time')[2]
et = self.driver.find_elements_by_name('end_time')[2]
Select(st).select_by_value(self.startTime)
Select(et).select_by_value(self.endTime)
self.driver.find_element_by_class_name('submitarea'
).find_element_by_xpath("//input[@value='提交']").click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def book_room(self):
if self.driver.title == 'IC空间管理系统':
if not self.goLogin():
print('not login')
if not self.inUserCenter():
print('not in user center')
if not self.goUserCenter():
print('not go user center')
if not self.comitBook():
print('not go 研究小间')
if not self.changeTheDate():
print('not go commit')
if not self.goBookRoomSelection():
print('not go Date')
else:
print('book success')
self.driver.close()
self.stop()
return
self.start_timer()
def operationBook(self):
url = 'http://seatlib.fjtcm.edu.cn'
self.driver.get(url)
while True:
now = datetime.datetime.now()
if (now.hour > self.startHour or now.hour == self.startHour and
now.minute >= self.startMin):
self.driver.refresh()
break
time.sleep(10)
self.start_timer()
def start_timer(self, interval=0.5):
self.timer = threading.Timer(interval, self.book_room)
self.timer.start()
def openbrowser(self):
self.openChrome()
self.operationBook()
<|reserved_special_token_1|>
# encoding=utf8
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import NoSuchElementException, ElementNotVisibleException
from selenium.webdriver.support.select import Select
import time
import threading
import random
import string
from urllib import parse, request
import re
import json
import datetime
import threading
from datetime import timedelta, date
class BookRoomThread(threading.Thread):
startHour = 23
startMin = 58
userName = '2160652004'
passWord = '270749'
roomName = '研究间17'
startTime = '800'
endTime = '2200'
bookDate = date.today()+timedelta(2)
isSlectDate = False
def __init__(self, name):
super().__init__()
self.__running = threading.Event() # 用于停止线程的标识
self.__running.set() # 将running设置为True
self.name = name
self.isStart = False
def stop(self):
if self.driver:
self.driver.close()
self.__running.clear() # 设置为False
def run(self): # 固定名字run !!!必须用固定名
while self.__running.isSet():
if not self.isStart:
self.openbrowser()
self.isStart = True
time.sleep(1)
# 前台开启浏览器模式
def openChrome(self):
# 加启动配置
option = webdriver.ChromeOptions()
option.add_argument('disable-infobars')
self.driver = webdriver.Chrome(chrome_options=option)
# 打开chrome浏览器
# driver = webdriver.Chrome()
def goLogin(self):
try:
username = self.driver.find_element_by_id("username")
password = self.driver.find_element_by_id("password")
username.send_keys(self.userName)
password.send_keys(self.passWord)
self.driver.find_element_by_class_name('btn-success').click()
return True
except Exception as e:
print(str(e))
return False
def goUserCenter(self):
try:
dialogtitle = self.driver.find_element_by_id('ui-id-3')
if dialogtitle.text == '提醒':
self.driver.find_element_by_class_name('ui-button-text-only').click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def goBookRoomSelection(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text == 'Home Page':
self.driver.find_element_by_link_text('研究小间').click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def inUserCenter(self):
try:
title = self.driver.find_element_by_class_name('h_title')
if title.text.strip() == '个人中心':
result = self.driver.find_element_by_css_selector('.orange.uni_trans')
if result.text.strip() == '预约成功':
return True
else:
self.driver.find_element_by_link_text('研究小间').click()
return False
else:
return False
except Exception as e:
print(str(e))
return False
def changeTheDate(self):
try:
datetitles = self.driver.find_elements_by_class_name('cld-h-cell')
isFindDateTitle = False
print(self.bookDate)
for i in range(len(datetitles)):
if datetitles[i].get_attribute('date') == str(self.bookDate):
isFindDateTitle = True
if datetitles[i].get_attribute('class').find('cld-d-sel') == -1:
datetitles[i].click()
else:
if self.isSlectDate:
self.isSlectDate = False
if i == 6:
datetitles[5].click()
else:
datetitles[i+1].click()
else:
self.isSlectDate = True
if not isFindDateTitle:
datetitles[9].click()
else:
roomtitles = self.driver.find_elements_by_class_name('cld-obj-qz')
for i in range(len(roomtitles)):
if roomtitles[i].get_attribute('objname') == self.roomName:
if len(roomtitles[i].find_elements_by_class_name('cld-ttd')) > 2:
roomtitles[i].find_element_by_class_name('cld-ttd-title').click()
break
return True
except Exception as e:
print(str(e))
return False
def comitBook(self):
try:
dialogtitle = self.driver.find_element_by_class_name('ui-dialog-title')
if dialogtitle.text == '预约申请':
st = self.driver.find_elements_by_name('start_time')[2]
et = self.driver.find_elements_by_name('end_time')[2]
Select(st).select_by_value(self.startTime)
Select(et).select_by_value(self.endTime)
self.driver.find_element_by_class_name('submitarea').find_element_by_xpath(
"//input[@value='提交']").click()
return True
else:
return False
except Exception as e:
print(str(e))
return False
def book_room(self):
if self.driver.title == "IC空间管理系统":
if not self.goLogin():
print('not login')
if not self.inUserCenter():
print('not in user center')
if not self.goUserCenter():
print('not go user center')
if not self.comitBook():
print('not go 研究小间')
if not self.changeTheDate():
print('not go commit')
if not self.goBookRoomSelection():
print('not go Date')
else:
print('book success')
self.driver.close()
self.stop()
return
self.start_timer()
# 注册操作
def operationBook(self):
url = "http://seatlib.fjtcm.edu.cn"
self.driver.get(url)
while True:
now = datetime.datetime.now()
if now.hour > self.startHour or (now.hour == self.startHour and now.minute >= self.startMin):
self.driver.refresh()
break
# 每隔10秒检测一次
time.sleep(10)
self.start_timer()
def start_timer(self, interval=0.5):
self.timer = threading.Timer(interval, self.book_room)
self.timer.start()
def openbrowser(self):
self.openChrome()
self.operationBook()
|
flexible
|
{
"blob_id": "ae775e25179546156485e15d05491e010cf5daca",
"index": 9360,
"step-1": "<mask token>\n\n\nclass BookRoomThread(threading.Thread):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def inUserCenter(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text.strip() == '个人中心':\n result = self.driver.find_element_by_css_selector(\n '.orange.uni_trans')\n if result.text.strip() == '预约成功':\n return True\n else:\n self.driver.find_element_by_link_text('研究小间').click()\n return False\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n <mask token>\n\n def comitBook(self):\n try:\n dialogtitle = self.driver.find_element_by_class_name(\n 'ui-dialog-title')\n if dialogtitle.text == '预约申请':\n st = self.driver.find_elements_by_name('start_time')[2]\n et = self.driver.find_elements_by_name('end_time')[2]\n Select(st).select_by_value(self.startTime)\n Select(et).select_by_value(self.endTime)\n self.driver.find_element_by_class_name('submitarea'\n ).find_element_by_xpath(\"//input[@value='提交']\").click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass BookRoomThread(threading.Thread):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, name):\n super().__init__()\n self.__running = threading.Event()\n self.__running.set()\n self.name = name\n self.isStart = False\n\n def stop(self):\n if self.driver:\n self.driver.close()\n self.__running.clear()\n <mask token>\n\n def openChrome(self):\n option = webdriver.ChromeOptions()\n option.add_argument('disable-infobars')\n self.driver = webdriver.Chrome(chrome_options=option)\n <mask token>\n\n def goUserCenter(self):\n try:\n dialogtitle = self.driver.find_element_by_id('ui-id-3')\n if dialogtitle.text == '提醒':\n self.driver.find_element_by_class_name('ui-button-text-only'\n ).click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n <mask token>\n\n def inUserCenter(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text.strip() == '个人中心':\n result = self.driver.find_element_by_css_selector(\n '.orange.uni_trans')\n if result.text.strip() == '预约成功':\n return True\n else:\n self.driver.find_element_by_link_text('研究小间').click()\n return False\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n <mask token>\n\n def comitBook(self):\n try:\n dialogtitle = self.driver.find_element_by_class_name(\n 'ui-dialog-title')\n if dialogtitle.text == '预约申请':\n st = self.driver.find_elements_by_name('start_time')[2]\n et = self.driver.find_elements_by_name('end_time')[2]\n Select(st).select_by_value(self.startTime)\n Select(et).select_by_value(self.endTime)\n self.driver.find_element_by_class_name('submitarea'\n ).find_element_by_xpath(\"//input[@value='提交']\").click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def book_room(self):\n if self.driver.title == 'IC空间管理系统':\n if not self.goLogin():\n print('not login')\n if not self.inUserCenter():\n print('not in user center')\n if not self.goUserCenter():\n print('not go user center')\n if not self.comitBook():\n print('not go 研究小间')\n if not self.changeTheDate():\n print('not go commit')\n if not self.goBookRoomSelection():\n print('not go Date')\n else:\n print('book success')\n self.driver.close()\n self.stop()\n return\n self.start_timer()\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass BookRoomThread(threading.Thread):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, name):\n super().__init__()\n self.__running = threading.Event()\n self.__running.set()\n self.name = name\n self.isStart = False\n\n def stop(self):\n if self.driver:\n self.driver.close()\n self.__running.clear()\n\n def run(self):\n while self.__running.isSet():\n if not self.isStart:\n self.openbrowser()\n self.isStart = True\n time.sleep(1)\n\n def openChrome(self):\n option = webdriver.ChromeOptions()\n option.add_argument('disable-infobars')\n self.driver = webdriver.Chrome(chrome_options=option)\n\n def goLogin(self):\n try:\n username = self.driver.find_element_by_id('username')\n password = self.driver.find_element_by_id('password')\n username.send_keys(self.userName)\n password.send_keys(self.passWord)\n self.driver.find_element_by_class_name('btn-success').click()\n return True\n except Exception as e:\n print(str(e))\n return False\n\n def goUserCenter(self):\n try:\n dialogtitle = self.driver.find_element_by_id('ui-id-3')\n if dialogtitle.text == '提醒':\n self.driver.find_element_by_class_name('ui-button-text-only'\n ).click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def goBookRoomSelection(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text == 'Home Page':\n self.driver.find_element_by_link_text('研究小间').click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def inUserCenter(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text.strip() == '个人中心':\n result = self.driver.find_element_by_css_selector(\n '.orange.uni_trans')\n if result.text.strip() == '预约成功':\n return True\n else:\n self.driver.find_element_by_link_text('研究小间').click()\n return False\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def changeTheDate(self):\n try:\n datetitles = self.driver.find_elements_by_class_name('cld-h-cell')\n isFindDateTitle = False\n print(self.bookDate)\n for i in range(len(datetitles)):\n if datetitles[i].get_attribute('date') == str(self.bookDate):\n isFindDateTitle = True\n if datetitles[i].get_attribute('class').find('cld-d-sel'\n ) == -1:\n datetitles[i].click()\n elif self.isSlectDate:\n self.isSlectDate = False\n if i == 6:\n datetitles[5].click()\n else:\n datetitles[i + 1].click()\n else:\n self.isSlectDate = True\n if not isFindDateTitle:\n datetitles[9].click()\n else:\n roomtitles = self.driver.find_elements_by_class_name(\n 'cld-obj-qz')\n for i in range(len(roomtitles)):\n if roomtitles[i].get_attribute('objname') == self.roomName:\n if len(roomtitles[i].find_elements_by_class_name(\n 'cld-ttd')) > 2:\n roomtitles[i].find_element_by_class_name(\n 'cld-ttd-title').click()\n break\n return True\n except Exception as e:\n print(str(e))\n return False\n\n def comitBook(self):\n try:\n dialogtitle = self.driver.find_element_by_class_name(\n 'ui-dialog-title')\n if dialogtitle.text == '预约申请':\n st = self.driver.find_elements_by_name('start_time')[2]\n et = self.driver.find_elements_by_name('end_time')[2]\n Select(st).select_by_value(self.startTime)\n Select(et).select_by_value(self.endTime)\n self.driver.find_element_by_class_name('submitarea'\n ).find_element_by_xpath(\"//input[@value='提交']\").click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def book_room(self):\n if self.driver.title == 'IC空间管理系统':\n if not self.goLogin():\n print('not login')\n if not self.inUserCenter():\n print('not in user center')\n if not self.goUserCenter():\n print('not go user center')\n if not self.comitBook():\n print('not go 研究小间')\n if not self.changeTheDate():\n print('not go commit')\n if not self.goBookRoomSelection():\n print('not go Date')\n else:\n print('book success')\n self.driver.close()\n self.stop()\n return\n self.start_timer()\n\n def operationBook(self):\n url = 'http://seatlib.fjtcm.edu.cn'\n self.driver.get(url)\n while True:\n now = datetime.datetime.now()\n if (now.hour > self.startHour or now.hour == self.startHour and\n now.minute >= self.startMin):\n self.driver.refresh()\n break\n time.sleep(10)\n self.start_timer()\n\n def start_timer(self, interval=0.5):\n self.timer = threading.Timer(interval, self.book_room)\n self.timer.start()\n\n def openbrowser(self):\n self.openChrome()\n self.operationBook()\n",
"step-4": "from selenium import webdriver\nfrom selenium.webdriver.common.desired_capabilities import DesiredCapabilities\nfrom selenium.common.exceptions import NoSuchElementException, ElementNotVisibleException\nfrom selenium.webdriver.support.select import Select\nimport time\nimport threading\nimport random\nimport string\nfrom urllib import parse, request\nimport re\nimport json\nimport datetime\nimport threading\nfrom datetime import timedelta, date\n\n\nclass BookRoomThread(threading.Thread):\n startHour = 23\n startMin = 58\n userName = '2160652004'\n passWord = '270749'\n roomName = '研究间17'\n startTime = '800'\n endTime = '2200'\n bookDate = date.today() + timedelta(2)\n isSlectDate = False\n\n def __init__(self, name):\n super().__init__()\n self.__running = threading.Event()\n self.__running.set()\n self.name = name\n self.isStart = False\n\n def stop(self):\n if self.driver:\n self.driver.close()\n self.__running.clear()\n\n def run(self):\n while self.__running.isSet():\n if not self.isStart:\n self.openbrowser()\n self.isStart = True\n time.sleep(1)\n\n def openChrome(self):\n option = webdriver.ChromeOptions()\n option.add_argument('disable-infobars')\n self.driver = webdriver.Chrome(chrome_options=option)\n\n def goLogin(self):\n try:\n username = self.driver.find_element_by_id('username')\n password = self.driver.find_element_by_id('password')\n username.send_keys(self.userName)\n password.send_keys(self.passWord)\n self.driver.find_element_by_class_name('btn-success').click()\n return True\n except Exception as e:\n print(str(e))\n return False\n\n def goUserCenter(self):\n try:\n dialogtitle = self.driver.find_element_by_id('ui-id-3')\n if dialogtitle.text == '提醒':\n self.driver.find_element_by_class_name('ui-button-text-only'\n ).click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def goBookRoomSelection(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text == 'Home Page':\n self.driver.find_element_by_link_text('研究小间').click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def inUserCenter(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text.strip() == '个人中心':\n result = self.driver.find_element_by_css_selector(\n '.orange.uni_trans')\n if result.text.strip() == '预约成功':\n return True\n else:\n self.driver.find_element_by_link_text('研究小间').click()\n return False\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def changeTheDate(self):\n try:\n datetitles = self.driver.find_elements_by_class_name('cld-h-cell')\n isFindDateTitle = False\n print(self.bookDate)\n for i in range(len(datetitles)):\n if datetitles[i].get_attribute('date') == str(self.bookDate):\n isFindDateTitle = True\n if datetitles[i].get_attribute('class').find('cld-d-sel'\n ) == -1:\n datetitles[i].click()\n elif self.isSlectDate:\n self.isSlectDate = False\n if i == 6:\n datetitles[5].click()\n else:\n datetitles[i + 1].click()\n else:\n self.isSlectDate = True\n if not isFindDateTitle:\n datetitles[9].click()\n else:\n roomtitles = self.driver.find_elements_by_class_name(\n 'cld-obj-qz')\n for i in range(len(roomtitles)):\n if roomtitles[i].get_attribute('objname') == self.roomName:\n if len(roomtitles[i].find_elements_by_class_name(\n 'cld-ttd')) > 2:\n roomtitles[i].find_element_by_class_name(\n 'cld-ttd-title').click()\n break\n return True\n except Exception as e:\n print(str(e))\n return False\n\n def comitBook(self):\n try:\n dialogtitle = self.driver.find_element_by_class_name(\n 'ui-dialog-title')\n if dialogtitle.text == '预约申请':\n st = self.driver.find_elements_by_name('start_time')[2]\n et = self.driver.find_elements_by_name('end_time')[2]\n Select(st).select_by_value(self.startTime)\n Select(et).select_by_value(self.endTime)\n self.driver.find_element_by_class_name('submitarea'\n ).find_element_by_xpath(\"//input[@value='提交']\").click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def book_room(self):\n if self.driver.title == 'IC空间管理系统':\n if not self.goLogin():\n print('not login')\n if not self.inUserCenter():\n print('not in user center')\n if not self.goUserCenter():\n print('not go user center')\n if not self.comitBook():\n print('not go 研究小间')\n if not self.changeTheDate():\n print('not go commit')\n if not self.goBookRoomSelection():\n print('not go Date')\n else:\n print('book success')\n self.driver.close()\n self.stop()\n return\n self.start_timer()\n\n def operationBook(self):\n url = 'http://seatlib.fjtcm.edu.cn'\n self.driver.get(url)\n while True:\n now = datetime.datetime.now()\n if (now.hour > self.startHour or now.hour == self.startHour and\n now.minute >= self.startMin):\n self.driver.refresh()\n break\n time.sleep(10)\n self.start_timer()\n\n def start_timer(self, interval=0.5):\n self.timer = threading.Timer(interval, self.book_room)\n self.timer.start()\n\n def openbrowser(self):\n self.openChrome()\n self.operationBook()\n",
"step-5": "# encoding=utf8\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.desired_capabilities import DesiredCapabilities\nfrom selenium.common.exceptions import NoSuchElementException, ElementNotVisibleException\nfrom selenium.webdriver.support.select import Select\nimport time\nimport threading\nimport random\nimport string\nfrom urllib import parse, request\nimport re\nimport json\nimport datetime\nimport threading\nfrom datetime import timedelta, date\n\n\nclass BookRoomThread(threading.Thread):\n\n startHour = 23\n startMin = 58\n userName = '2160652004'\n passWord = '270749'\n roomName = '研究间17'\n startTime = '800'\n endTime = '2200'\n bookDate = date.today()+timedelta(2)\n isSlectDate = False\n\n def __init__(self, name):\n super().__init__()\n self.__running = threading.Event() # 用于停止线程的标识\n self.__running.set() # 将running设置为True\n self.name = name\n self.isStart = False\n\n def stop(self):\n if self.driver:\n self.driver.close()\n self.__running.clear() # 设置为False\n\n def run(self): # 固定名字run !!!必须用固定名\n while self.__running.isSet():\n if not self.isStart:\n self.openbrowser()\n self.isStart = True\n time.sleep(1)\n\n # 前台开启浏览器模式\n def openChrome(self):\n # 加启动配置\n option = webdriver.ChromeOptions()\n option.add_argument('disable-infobars')\n self.driver = webdriver.Chrome(chrome_options=option)\n # 打开chrome浏览器\n # driver = webdriver.Chrome()\n\n def goLogin(self):\n try:\n username = self.driver.find_element_by_id(\"username\")\n password = self.driver.find_element_by_id(\"password\")\n username.send_keys(self.userName)\n password.send_keys(self.passWord)\n self.driver.find_element_by_class_name('btn-success').click()\n return True\n except Exception as e:\n print(str(e))\n return False\n\n def goUserCenter(self):\n try:\n dialogtitle = self.driver.find_element_by_id('ui-id-3')\n if dialogtitle.text == '提醒':\n self.driver.find_element_by_class_name('ui-button-text-only').click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def goBookRoomSelection(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text == 'Home Page':\n self.driver.find_element_by_link_text('研究小间').click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def inUserCenter(self):\n try:\n title = self.driver.find_element_by_class_name('h_title')\n if title.text.strip() == '个人中心':\n result = self.driver.find_element_by_css_selector('.orange.uni_trans')\n if result.text.strip() == '预约成功':\n return True\n else: \n self.driver.find_element_by_link_text('研究小间').click()\n return False\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def changeTheDate(self):\n try:\n datetitles = self.driver.find_elements_by_class_name('cld-h-cell')\n isFindDateTitle = False\n print(self.bookDate)\n for i in range(len(datetitles)):\n if datetitles[i].get_attribute('date') == str(self.bookDate):\n isFindDateTitle = True\n if datetitles[i].get_attribute('class').find('cld-d-sel') == -1:\n datetitles[i].click()\n else:\n if self.isSlectDate:\n self.isSlectDate = False\n if i == 6:\n datetitles[5].click()\n else:\n datetitles[i+1].click()\n else:\n self.isSlectDate = True\n \n if not isFindDateTitle:\n datetitles[9].click()\n else:\n roomtitles = self.driver.find_elements_by_class_name('cld-obj-qz')\n for i in range(len(roomtitles)):\n if roomtitles[i].get_attribute('objname') == self.roomName:\n if len(roomtitles[i].find_elements_by_class_name('cld-ttd')) > 2:\n roomtitles[i].find_element_by_class_name('cld-ttd-title').click()\n break\n\n return True\n except Exception as e:\n print(str(e))\n return False\n\n def comitBook(self):\n try:\n dialogtitle = self.driver.find_element_by_class_name('ui-dialog-title')\n if dialogtitle.text == '预约申请':\n st = self.driver.find_elements_by_name('start_time')[2]\n et = self.driver.find_elements_by_name('end_time')[2]\n Select(st).select_by_value(self.startTime)\n Select(et).select_by_value(self.endTime)\n self.driver.find_element_by_class_name('submitarea').find_element_by_xpath(\n \"//input[@value='提交']\").click()\n return True\n else:\n return False\n except Exception as e:\n print(str(e))\n return False\n\n def book_room(self):\n if self.driver.title == \"IC空间管理系统\":\n if not self.goLogin():\n print('not login')\n if not self.inUserCenter():\n print('not in user center')\n if not self.goUserCenter():\n print('not go user center')\n if not self.comitBook():\n print('not go 研究小间')\n if not self.changeTheDate():\n print('not go commit')\n if not self.goBookRoomSelection():\n print('not go Date')\n else:\n print('book success')\n self.driver.close()\n self.stop()\n return\n\n\n self.start_timer()\n\n # 注册操作\n def operationBook(self):\n url = \"http://seatlib.fjtcm.edu.cn\"\n self.driver.get(url)\n\n while True:\n now = datetime.datetime.now()\n if now.hour > self.startHour or (now.hour == self.startHour and now.minute >= self.startMin):\n self.driver.refresh()\n break\n # 每隔10秒检测一次\n time.sleep(10)\n self.start_timer()\n\n def start_timer(self, interval=0.5):\n self.timer = threading.Timer(interval, self.book_room)\n self.timer.start()\n\n def openbrowser(self):\n self.openChrome()\n self.operationBook()\n",
"step-ids": [
3,
8,
15,
17,
18
]
}
|
[
3,
8,
15,
17,
18
] |
<|reserved_special_token_0|>
class Solution:
def __new__(self, p):
nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,
p)
inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)
replaced_by_ = p.replace(' ', '-')
combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'
combined_queries += str(self.reverse_words(inversed)) + '::'
combined_queries += str(replaced_by_) + '::' + str(replaced)
return combined_queries
<|reserved_special_token_0|>
def reverse_words(word):
list_string = word.split(' ')
list_string.reverse()
string = ' '.join(list_string)
return string
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def __new__(self, p):
nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,
p)
inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)
replaced_by_ = p.replace(' ', '-')
combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'
combined_queries += str(self.reverse_words(inversed)) + '::'
combined_queries += str(replaced_by_) + '::' + str(replaced)
return combined_queries
def count_vowels_consonants(self, text):
vowels_list = ['A', 'E', 'I', 'O', 'U']
consonants = 0
vowels = 0
string = ''
for character in text:
if character.isalpha():
if character.upper() in vowels_list:
vowels += 1
string += 'pv'
else:
consonants += 1
string += character
return vowels, consonants, string
def reverse_words(word):
list_string = word.split(' ')
list_string.reverse()
string = ' '.join(list_string)
return string
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def __new__(self, p):
nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,
p)
inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)
replaced_by_ = p.replace(' ', '-')
combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'
combined_queries += str(self.reverse_words(inversed)) + '::'
combined_queries += str(replaced_by_) + '::' + str(replaced)
return combined_queries
def count_vowels_consonants(self, text):
vowels_list = ['A', 'E', 'I', 'O', 'U']
consonants = 0
vowels = 0
string = ''
for character in text:
if character.isalpha():
if character.upper() in vowels_list:
vowels += 1
string += 'pv'
else:
consonants += 1
string += character
return vowels, consonants, string
def reverse_words(word):
list_string = word.split(' ')
list_string.reverse()
string = ' '.join(list_string)
return string
if __name__ == '__main__':
solutions = Solution('The iterator is just clutter')
print(solutions)
<|reserved_special_token_1|>
import re
class Solution:
def __new__(self, p):
nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,
p)
inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)
replaced_by_ = p.replace(' ', '-')
combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'
combined_queries += str(self.reverse_words(inversed)) + '::'
combined_queries += str(replaced_by_) + '::' + str(replaced)
return combined_queries
def count_vowels_consonants(self, text):
vowels_list = ['A', 'E', 'I', 'O', 'U']
consonants = 0
vowels = 0
string = ''
for character in text:
if character.isalpha():
if character.upper() in vowels_list:
vowels += 1
string += 'pv'
else:
consonants += 1
string += character
return vowels, consonants, string
def reverse_words(word):
list_string = word.split(' ')
list_string.reverse()
string = ' '.join(list_string)
return string
if __name__ == '__main__':
solutions = Solution('The iterator is just clutter')
print(solutions)
<|reserved_special_token_1|>
#!/usr/bin/env python
import re
class Solution:
def __new__(self, p):
nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self, p)
inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)
replaced_by_ = p.replace(' ' ,'-')
combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'
combined_queries += str(self.reverse_words(inversed)) + '::'
combined_queries += str(replaced_by_ )+ '::' + str(replaced)
return combined_queries
def count_vowels_consonants(self, text):
vowels_list = ['A', 'E', 'I', 'O', 'U']
consonants = 0
vowels = 0
string = ''
for character in text:
if character.isalpha():
if character.upper() in vowels_list:
vowels += 1
string += 'pv'
else:
consonants += 1
string += character
return (vowels, consonants, string)
def reverse_words(word):
list_string = word.split(' ')
list_string.reverse()
string = ' '.join(list_string)
return string
if __name__ == '__main__':
solutions = Solution('The iterator is just clutter')
# solutions = Solution('The')
print(solutions)
|
flexible
|
{
"blob_id": "ec9de8d54113806ab327f05e077edefa74258adb",
"index": 2662,
"step-1": "<mask token>\n\n\nclass Solution:\n\n def __new__(self, p):\n nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,\n p)\n inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)\n replaced_by_ = p.replace(' ', '-')\n combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'\n combined_queries += str(self.reverse_words(inversed)) + '::'\n combined_queries += str(replaced_by_) + '::' + str(replaced)\n return combined_queries\n <mask token>\n\n def reverse_words(word):\n list_string = word.split(' ')\n list_string.reverse()\n string = ' '.join(list_string)\n return string\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def __new__(self, p):\n nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,\n p)\n inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)\n replaced_by_ = p.replace(' ', '-')\n combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'\n combined_queries += str(self.reverse_words(inversed)) + '::'\n combined_queries += str(replaced_by_) + '::' + str(replaced)\n return combined_queries\n\n def count_vowels_consonants(self, text):\n vowels_list = ['A', 'E', 'I', 'O', 'U']\n consonants = 0\n vowels = 0\n string = ''\n for character in text:\n if character.isalpha():\n if character.upper() in vowels_list:\n vowels += 1\n string += 'pv'\n else:\n consonants += 1\n string += character\n return vowels, consonants, string\n\n def reverse_words(word):\n list_string = word.split(' ')\n list_string.reverse()\n string = ' '.join(list_string)\n return string\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def __new__(self, p):\n nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,\n p)\n inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)\n replaced_by_ = p.replace(' ', '-')\n combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'\n combined_queries += str(self.reverse_words(inversed)) + '::'\n combined_queries += str(replaced_by_) + '::' + str(replaced)\n return combined_queries\n\n def count_vowels_consonants(self, text):\n vowels_list = ['A', 'E', 'I', 'O', 'U']\n consonants = 0\n vowels = 0\n string = ''\n for character in text:\n if character.isalpha():\n if character.upper() in vowels_list:\n vowels += 1\n string += 'pv'\n else:\n consonants += 1\n string += character\n return vowels, consonants, string\n\n def reverse_words(word):\n list_string = word.split(' ')\n list_string.reverse()\n string = ' '.join(list_string)\n return string\n\n\nif __name__ == '__main__':\n solutions = Solution('The iterator is just clutter')\n print(solutions)\n",
"step-4": "import re\n\n\nclass Solution:\n\n def __new__(self, p):\n nr_counts, nr_consonants, replaced = self.count_vowels_consonants(self,\n p)\n inversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)\n replaced_by_ = p.replace(' ', '-')\n combined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::'\n combined_queries += str(self.reverse_words(inversed)) + '::'\n combined_queries += str(replaced_by_) + '::' + str(replaced)\n return combined_queries\n\n def count_vowels_consonants(self, text):\n vowels_list = ['A', 'E', 'I', 'O', 'U']\n consonants = 0\n vowels = 0\n string = ''\n for character in text:\n if character.isalpha():\n if character.upper() in vowels_list:\n vowels += 1\n string += 'pv'\n else:\n consonants += 1\n string += character\n return vowels, consonants, string\n\n def reverse_words(word):\n list_string = word.split(' ')\n list_string.reverse()\n string = ' '.join(list_string)\n return string\n\n\nif __name__ == '__main__':\n solutions = Solution('The iterator is just clutter')\n print(solutions)\n",
"step-5": "#!/usr/bin/env python\n\nimport re \n\n\n\nclass Solution:\n\n\tdef __new__(self, p):\n\t\tnr_counts, nr_consonants, replaced = self.count_vowels_consonants(self, p)\n\t\tinversed = ''.join(c.lower() if c.isupper() else c.upper() for c in p)\n\t\treplaced_by_ = p.replace(' ' ,'-')\n\t\tcombined_queries = str(nr_counts) + ' ' + str(nr_consonants) + '::' \n\t\tcombined_queries += str(self.reverse_words(inversed)) + '::' \n\t\tcombined_queries += str(replaced_by_ )+ '::' + str(replaced)\n\t\treturn combined_queries\n\n\tdef count_vowels_consonants(self, text):\n\t\tvowels_list = ['A', 'E', 'I', 'O', 'U']\n\t\tconsonants = 0\n\t\tvowels = 0\n\t\tstring = ''\n\t\tfor character in text:\n\t\t if character.isalpha():\n\t\t \tif character.upper() in vowels_list:\n\t\t\t \tvowels += 1\n\t\t\t \tstring += 'pv'\n\t\t \telse:\n\t\t \t\tconsonants += 1\n\t\t string += character\n\t\treturn (vowels, consonants, string)\n\n\tdef reverse_words(word):\n\t\tlist_string = word.split(' ')\n\t\tlist_string.reverse()\n\t\tstring = ' '.join(list_string) \n\t\treturn string\t\n\nif __name__ == '__main__':\n\tsolutions = Solution('The iterator is just clutter')\n\t# solutions = Solution('The')\n\tprint(solutions)",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# Generated by Django 2.0.13 on 2019-05-23 14:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0001_initial'),
('users', '0003_user_projects'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='projects',
),
migrations.AddField(
model_name='user',
name='projects',
field=models.ManyToManyField(related_name='projects', to='projects.Project'),
),
]
|
normal
|
{
"blob_id": "547935a67fb079e551534126534234ceb96ed0dd",
"index": 7648,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('projects', '0001_initial'), ('users',\n '0003_user_projects')]\n operations = [migrations.RemoveField(model_name='user', name='projects'\n ), migrations.AddField(model_name='user', name='projects', field=\n models.ManyToManyField(related_name='projects', to='projects.Project'))\n ]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('projects', '0001_initial'), ('users',\n '0003_user_projects')]\n operations = [migrations.RemoveField(model_name='user', name='projects'\n ), migrations.AddField(model_name='user', name='projects', field=\n models.ManyToManyField(related_name='projects', to='projects.Project'))\n ]\n",
"step-5": "# Generated by Django 2.0.13 on 2019-05-23 14:12\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('projects', '0001_initial'),\n ('users', '0003_user_projects'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='user',\n name='projects',\n ),\n migrations.AddField(\n model_name='user',\n name='projects',\n field=models.ManyToManyField(related_name='projects', to='projects.Project'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def update_customer(first_name, surname, cid, customer_repository):
customer = customer_repository.fetch_by_id(cid)
customer.first_name = first_name
customer.surname = surname
customer_repository.store(customer)
return customer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_customer(customer_id, customer_repository):
return customer_repository.fetch_by_id(customer_id)
<|reserved_special_token_0|>
def update_customer(first_name, surname, cid, customer_repository):
customer = customer_repository.fetch_by_id(cid)
customer.first_name = first_name
customer.surname = surname
customer_repository.store(customer)
return customer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_customer(customer_id, customer_repository):
return customer_repository.fetch_by_id(customer_id)
def create_customer(first_name, surname, customer_repository):
customer = Customer(first_name=first_name, surname=surname)
customer_repository.store(customer)
return customer.customer_id
def update_customer(first_name, surname, cid, customer_repository):
customer = customer_repository.fetch_by_id(cid)
customer.first_name = first_name
customer.surname = surname
customer_repository.store(customer)
return customer
<|reserved_special_token_1|>
from customer_service.model.customer import Customer
def get_customer(customer_id, customer_repository):
return customer_repository.fetch_by_id(customer_id)
def create_customer(first_name, surname, customer_repository):
customer = Customer(first_name=first_name, surname=surname)
customer_repository.store(customer)
return customer.customer_id
def update_customer(first_name, surname, cid, customer_repository):
customer = customer_repository.fetch_by_id(cid)
customer.first_name = first_name
customer.surname = surname
customer_repository.store(customer)
return customer
|
flexible
|
{
"blob_id": "f5e60f2d384242b9675e756f67391ea09afcc262",
"index": 5408,
"step-1": "<mask token>\n\n\ndef update_customer(first_name, surname, cid, customer_repository):\n customer = customer_repository.fetch_by_id(cid)\n customer.first_name = first_name\n customer.surname = surname\n customer_repository.store(customer)\n return customer\n",
"step-2": "<mask token>\n\n\ndef get_customer(customer_id, customer_repository):\n return customer_repository.fetch_by_id(customer_id)\n\n\n<mask token>\n\n\ndef update_customer(first_name, surname, cid, customer_repository):\n customer = customer_repository.fetch_by_id(cid)\n customer.first_name = first_name\n customer.surname = surname\n customer_repository.store(customer)\n return customer\n",
"step-3": "<mask token>\n\n\ndef get_customer(customer_id, customer_repository):\n return customer_repository.fetch_by_id(customer_id)\n\n\ndef create_customer(first_name, surname, customer_repository):\n customer = Customer(first_name=first_name, surname=surname)\n customer_repository.store(customer)\n return customer.customer_id\n\n\ndef update_customer(first_name, surname, cid, customer_repository):\n customer = customer_repository.fetch_by_id(cid)\n customer.first_name = first_name\n customer.surname = surname\n customer_repository.store(customer)\n return customer\n",
"step-4": "from customer_service.model.customer import Customer\n\n\ndef get_customer(customer_id, customer_repository):\n return customer_repository.fetch_by_id(customer_id)\n\n\ndef create_customer(first_name, surname, customer_repository):\n customer = Customer(first_name=first_name, surname=surname)\n customer_repository.store(customer)\n return customer.customer_id\n\n\ndef update_customer(first_name, surname, cid, customer_repository):\n customer = customer_repository.fetch_by_id(cid)\n customer.first_name = first_name\n customer.surname = surname\n customer_repository.store(customer)\n return customer\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
from django.shortcuts import render
from django.http import HttpResponseRedirect
from .forms import PostForm
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from .models import Post
from django.contrib import messages
# Create your views here.
@login_required
def post_create(request):
"""
This makes sure that the form accpets a POST requests (of some data) or Nothing.
Without this the form would even accept empty data.
"""
form = PostForm(request.POST or None, request.FILES or None)
if request.POST:
if form.is_valid():
instance = form.save(commit=False)
instance.user = request.user
instance.save()
messages.success(request, "Post created!")
return HttpResponseRedirect(instance.get_absolute_url())
else:
messages.error(request, "Sorry! Something went wrong.", extra_tags="")
context = {
'title': "Create Post",
'form' : form,
}
return render(request, 'post/create.html', context)
def post_view(request, slug):
instance = get_object_or_404(Post, slug=slug)
context = {
'instance' : instance
}
return render(request, 'post/view.html', context)
|
normal
|
{
"blob_id": "4a2437d3d6ba549910bc30a67bf391b9bbafd25f",
"index": 6210,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@login_required\ndef post_create(request):\n \"\"\"\n\t\tThis makes sure that the form accpets a POST requests (of some data) or Nothing.\n\t\tWithout this the form would even accept empty data.\n\t\"\"\"\n form = PostForm(request.POST or None, request.FILES or None)\n if request.POST:\n if form.is_valid():\n instance = form.save(commit=False)\n instance.user = request.user\n instance.save()\n messages.success(request, 'Post created!')\n return HttpResponseRedirect(instance.get_absolute_url())\n else:\n messages.error(request, 'Sorry! Something went wrong.',\n extra_tags='')\n context = {'title': 'Create Post', 'form': form}\n return render(request, 'post/create.html', context)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@login_required\ndef post_create(request):\n \"\"\"\n\t\tThis makes sure that the form accpets a POST requests (of some data) or Nothing.\n\t\tWithout this the form would even accept empty data.\n\t\"\"\"\n form = PostForm(request.POST or None, request.FILES or None)\n if request.POST:\n if form.is_valid():\n instance = form.save(commit=False)\n instance.user = request.user\n instance.save()\n messages.success(request, 'Post created!')\n return HttpResponseRedirect(instance.get_absolute_url())\n else:\n messages.error(request, 'Sorry! Something went wrong.',\n extra_tags='')\n context = {'title': 'Create Post', 'form': form}\n return render(request, 'post/create.html', context)\n\n\ndef post_view(request, slug):\n instance = get_object_or_404(Post, slug=slug)\n context = {'instance': instance}\n return render(request, 'post/view.html', context)\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponseRedirect\nfrom .forms import PostForm\nfrom django.contrib.auth.decorators import login_required\nfrom django.shortcuts import get_object_or_404\nfrom .models import Post\nfrom django.contrib import messages\n\n\n@login_required\ndef post_create(request):\n \"\"\"\n\t\tThis makes sure that the form accpets a POST requests (of some data) or Nothing.\n\t\tWithout this the form would even accept empty data.\n\t\"\"\"\n form = PostForm(request.POST or None, request.FILES or None)\n if request.POST:\n if form.is_valid():\n instance = form.save(commit=False)\n instance.user = request.user\n instance.save()\n messages.success(request, 'Post created!')\n return HttpResponseRedirect(instance.get_absolute_url())\n else:\n messages.error(request, 'Sorry! Something went wrong.',\n extra_tags='')\n context = {'title': 'Create Post', 'form': form}\n return render(request, 'post/create.html', context)\n\n\ndef post_view(request, slug):\n instance = get_object_or_404(Post, slug=slug)\n context = {'instance': instance}\n return render(request, 'post/view.html', context)\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponseRedirect\nfrom .forms import PostForm\nfrom django.contrib.auth.decorators import login_required\nfrom django.shortcuts import get_object_or_404\nfrom .models import Post\nfrom django.contrib import messages\n# Create your views here.\n@login_required\ndef post_create(request):\n\t\"\"\"\n\t\tThis makes sure that the form accpets a POST requests (of some data) or Nothing.\n\t\tWithout this the form would even accept empty data.\n\t\"\"\"\n\tform = PostForm(request.POST or None, request.FILES or None)\n\tif request.POST:\n\t\tif form.is_valid():\n\t\t\tinstance = form.save(commit=False)\n\t\t\tinstance.user = request.user\n\t\t\tinstance.save()\n\t\t\tmessages.success(request, \"Post created!\")\n\t\t\treturn HttpResponseRedirect(instance.get_absolute_url())\n\t\telse:\n\t\t\tmessages.error(request, \"Sorry! Something went wrong.\", extra_tags=\"\")\n\tcontext = {\n\t\t'title': \"Create Post\",\n\t\t'form' : form,\n\t}\n\treturn render(request, 'post/create.html', context)\n\n\ndef post_view(request, slug):\n\tinstance = get_object_or_404(Post, slug=slug)\n\n\tcontext = {\n\t\t'instance' : instance\t\n\t}\n\treturn render(request, 'post/view.html', context)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
a=float.input('Valor da conta')
print('Valor da conta com 10%: R$',(a))
|
normal
|
{
"blob_id": "d1ce6c081dce2e4bdb6087cd61d7f857dbb1348d",
"index": 8781,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Valor da conta com 10%: R$', a)\n",
"step-3": "a = float.input('Valor da conta')\nprint('Valor da conta com 10%: R$', a)\n",
"step-4": "a=float.input('Valor da conta')\nprint('Valor da conta com 10%: R$',(a))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class BasicBlock(layers.Layer):
<|reserved_special_token_0|>
def call(self, inputs, training=None):
out = self.conv1(inputs)
out = self.bn1(out)
out = self.relu1(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu2(out)
identity = self.downsample(inputs)
output = layers.add([out, identity])
output = tf.nn.relu(output)
return output
class ResNet(keras.Model):
def __init__(self, layer_dims, num_classes=4):
super(ResNet, self).__init__()
self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),
layers.BatchNormalization(), layers.Activation('relu')])
self.layer1 = self.build_resblock(16, layer_dims[0])
self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,
strides=4)
self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,
strides=4)
self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)
self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)
self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)
self.avgpool = layers.GlobalAveragePooling1D()
self.fc = layers.Dense(num_classes)
def call(self, inputs, training=None):
x = self.stem(inputs)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.avgpool(x)
x = self.fc(x)
return x
def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):
res_blocks = Sequential()
res_blocks.add(BasicBlock(filter_num, kernel_size, strides))
for _ in range(1, blocks):
res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))
return res_blocks
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BasicBlock(layers.Layer):
def __init__(self, filter_num, kernel_size, strides=1):
super(BasicBlock, self).__init__()
self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,
padding='same')
self.bn1 = layers.BatchNormalization()
self.relu1 = layers.Activation('relu')
self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,
padding='same')
self.bn2 = layers.BatchNormalization()
self.relu2 = layers.Activation('relu')
if strides != 1:
self.downsample = Sequential()
self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))
else:
self.downsample = lambda x: x
def call(self, inputs, training=None):
out = self.conv1(inputs)
out = self.bn1(out)
out = self.relu1(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu2(out)
identity = self.downsample(inputs)
output = layers.add([out, identity])
output = tf.nn.relu(output)
return output
class ResNet(keras.Model):
def __init__(self, layer_dims, num_classes=4):
super(ResNet, self).__init__()
self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),
layers.BatchNormalization(), layers.Activation('relu')])
self.layer1 = self.build_resblock(16, layer_dims[0])
self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,
strides=4)
self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,
strides=4)
self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)
self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)
self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)
self.avgpool = layers.GlobalAveragePooling1D()
self.fc = layers.Dense(num_classes)
def call(self, inputs, training=None):
x = self.stem(inputs)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.avgpool(x)
x = self.fc(x)
return x
def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):
res_blocks = Sequential()
res_blocks.add(BasicBlock(filter_num, kernel_size, strides))
for _ in range(1, blocks):
res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))
return res_blocks
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
set_session(sess)
print("""
Tensorflow GPU installed: """ + str(tf.test.is_built_with_cuda()))
print('Is Tensorflow using GPU: \n' + str(tf.test.is_gpu_available()))
class BasicBlock(layers.Layer):
def __init__(self, filter_num, kernel_size, strides=1):
super(BasicBlock, self).__init__()
self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,
padding='same')
self.bn1 = layers.BatchNormalization()
self.relu1 = layers.Activation('relu')
self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,
padding='same')
self.bn2 = layers.BatchNormalization()
self.relu2 = layers.Activation('relu')
if strides != 1:
self.downsample = Sequential()
self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))
else:
self.downsample = lambda x: x
def call(self, inputs, training=None):
out = self.conv1(inputs)
out = self.bn1(out)
out = self.relu1(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu2(out)
identity = self.downsample(inputs)
output = layers.add([out, identity])
output = tf.nn.relu(output)
return output
class ResNet(keras.Model):
def __init__(self, layer_dims, num_classes=4):
super(ResNet, self).__init__()
self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),
layers.BatchNormalization(), layers.Activation('relu')])
self.layer1 = self.build_resblock(16, layer_dims[0])
self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,
strides=4)
self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,
strides=4)
self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)
self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)
self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)
self.avgpool = layers.GlobalAveragePooling1D()
self.fc = layers.Dense(num_classes)
def call(self, inputs, training=None):
x = self.stem(inputs)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.avgpool(x)
x = self.fc(x)
return x
def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):
res_blocks = Sequential()
res_blocks.add(BasicBlock(filter_num, kernel_size, strides))
for _ in range(1, blocks):
res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))
return res_blocks
<|reserved_special_token_0|>
model.build(input_shape=(512, 512, 1))
<|reserved_special_token_0|>
for epoch in range(500):
for step, (x, y) in enumerate(train_db):
with tf.GradientTape() as tape:
logits = model(x, training=True)
y_onehot = tf.one_hot(y, depth=4)
loss = tf.losses.categorical_crossentropy(y_onehot, logits,
from_logits=True)
loss = tf.reduce_mean(loss)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
if step % 100 == 0:
print(epoch, step, 'loss:', float(loss))
train_loss.append(loss)
total_num = 0
total_correct = 0
for x, y in test_db:
logits = model(x)
prob = tf.nn.softmax(logits, axis=1)
pred = tf.argmax(prob, axis=1)
pred = tf.cast(pred, dtype=tf.int32)
correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)
correct = tf.reduce_sum(correct)
total_num += x.shape[0]
total_correct += int(correct)
acc = total_correct / total_num
test_acc.append(acc)
print(epoch, 'acc:', acc)
if acc > acc_max:
acc_max = acc
model.save_weights('ResNet/weights.ckpt')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
<|reserved_special_token_0|>
config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth = True
config.log_device_placement = True
sess = tf.compat.v1.Session(config=config)
set_session(sess)
print("""
Tensorflow GPU installed: """ + str(tf.test.is_built_with_cuda()))
print('Is Tensorflow using GPU: \n' + str(tf.test.is_gpu_available()))
class BasicBlock(layers.Layer):
def __init__(self, filter_num, kernel_size, strides=1):
super(BasicBlock, self).__init__()
self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,
padding='same')
self.bn1 = layers.BatchNormalization()
self.relu1 = layers.Activation('relu')
self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,
padding='same')
self.bn2 = layers.BatchNormalization()
self.relu2 = layers.Activation('relu')
if strides != 1:
self.downsample = Sequential()
self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))
else:
self.downsample = lambda x: x
def call(self, inputs, training=None):
out = self.conv1(inputs)
out = self.bn1(out)
out = self.relu1(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu2(out)
identity = self.downsample(inputs)
output = layers.add([out, identity])
output = tf.nn.relu(output)
return output
class ResNet(keras.Model):
def __init__(self, layer_dims, num_classes=4):
super(ResNet, self).__init__()
self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),
layers.BatchNormalization(), layers.Activation('relu')])
self.layer1 = self.build_resblock(16, layer_dims[0])
self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,
strides=4)
self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,
strides=4)
self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)
self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)
self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)
self.avgpool = layers.GlobalAveragePooling1D()
self.fc = layers.Dense(num_classes)
def call(self, inputs, training=None):
x = self.stem(inputs)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.avgpool(x)
x = self.fc(x)
return x
def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):
res_blocks = Sequential()
res_blocks.add(BasicBlock(filter_num, kernel_size, strides))
for _ in range(1, blocks):
res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))
return res_blocks
x_train = np.loadtxt('/content/drive/My Drive/Data/x_train').reshape(-1, 512, 1
).astype(np.float32)
y_train = np.loadtxt('/content/drive/My Drive/Data/y_train').astype(np.int32)
x_test = np.loadtxt('/content/drive/My Drive/Data/x_test').reshape(-1, 512, 1
).astype(np.float32)
y_test = np.loadtxt('/content/drive/My Drive/Data/y_test').astype(np.int32)
train_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(512)
test_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(512)
model = ResNet([2, 2, 2, 2, 2, 2])
model.build(input_shape=(512, 512, 1))
optimizer = optimizers.Adam(lr=0.001)
train_loss = []
test_acc = []
acc_max = 0
for epoch in range(500):
for step, (x, y) in enumerate(train_db):
with tf.GradientTape() as tape:
logits = model(x, training=True)
y_onehot = tf.one_hot(y, depth=4)
loss = tf.losses.categorical_crossentropy(y_onehot, logits,
from_logits=True)
loss = tf.reduce_mean(loss)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
if step % 100 == 0:
print(epoch, step, 'loss:', float(loss))
train_loss.append(loss)
total_num = 0
total_correct = 0
for x, y in test_db:
logits = model(x)
prob = tf.nn.softmax(logits, axis=1)
pred = tf.argmax(prob, axis=1)
pred = tf.cast(pred, dtype=tf.int32)
correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)
correct = tf.reduce_sum(correct)
total_num += x.shape[0]
total_correct += int(correct)
acc = total_correct / total_num
test_acc.append(acc)
print(epoch, 'acc:', acc)
if acc > acc_max:
acc_max = acc
model.save_weights('ResNet/weights.ckpt')
<|reserved_special_token_1|>
import tensorflow as tf
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, Sequential, optimizers
import numpy as np
from tensorflow.compat.v1.keras.backend import set_session
config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU
config.log_device_placement = True # to log device placement (on which device the operation ran)
sess = tf.compat.v1.Session(config=config)
set_session(sess)
print('\nTensorflow GPU installed: ' + str(tf.test.is_built_with_cuda()))
print('Is Tensorflow using GPU: \n' + str(tf.test.is_gpu_available()))
class BasicBlock(layers.Layer):
# 残差模块
def __init__(self, filter_num, kernel_size, strides=1):
super(BasicBlock, self).__init__()
# 第一个卷积单元
self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides, padding='same')
self.bn1 = layers.BatchNormalization()
self.relu1 = layers.Activation('relu')
# 第二个卷积单元
self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1, padding='same')
self.bn2 = layers.BatchNormalization()
self.relu2 = layers.Activation('relu')
if strides != 1:
self.downsample = Sequential()
self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))
else:
self.downsample = lambda x: x
def call(self, inputs, training=None):
out = self.conv1(inputs)
out = self.bn1(out)
out = self.relu1(out)
# 通过第二个卷积单元
out = self.conv2(out)
out = self.bn2(out)
out = self.relu2(out)
# 通过identity模块
identity = self.downsample(inputs)
# 2条路径输出直接相加
output = layers.add([out, identity])
output = tf.nn.relu(output) # 激活函数
return output
class ResNet(keras.Model):
def __init__(self, layer_dims, num_classes=4):
# layer_dims:list[2,2,2,2,2,2]
super(ResNet, self).__init__()
self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),
layers.BatchNormalization(),
layers.Activation('relu')
])
self.layer1 = self.build_resblock(16, layer_dims[0]) # 512
self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5, strides=4) # 128
self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5, strides=4) # 32
self.layer4 = self.build_resblock(128, layer_dims[3], strides=2) # 16
self.layer5 = self.build_resblock(256, layer_dims[4], strides=2) # 8
self.layer6 = self.build_resblock(512, layer_dims[5], strides=2) # 4
self.avgpool = layers.GlobalAveragePooling1D() # 512大小的向量: 512*1
self.fc = layers.Dense(num_classes)
def call(self, inputs, training=None):
x = self.stem(inputs)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.layer5(x)
x = self.layer6(x)
x = self.avgpool(x)
x = self.fc(x)
return x
def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):
# 辅助函数,堆叠filter_num个BasicBlock
res_blocks = Sequential()
# 只有第一个BasicBlock的步长可能不为1,实现下采样
res_blocks.add(BasicBlock(filter_num, kernel_size, strides))
for _ in range(1, blocks): # 其他BasicBlock步长都为1
res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))
return res_blocks
x_train = np.loadtxt(r'/content/drive/My Drive/Data/x_train').reshape(-1, 512, 1).astype(np.float32)
y_train = np.loadtxt(r'/content/drive/My Drive/Data/y_train').astype(np.int32)
x_test = np.loadtxt(r'/content/drive/My Drive/Data/x_test').reshape(-1, 512, 1).astype(np.float32)
y_test = np.loadtxt(r'/content/drive/My Drive/Data/y_test').astype(np.int32)
train_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(512)
test_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(512)
# sample = next(iter(train_db))
# print(sample)
model = ResNet([2,2,2,2,2,2])
model.build(input_shape=(512,512,1))
# conv_net.summary()
# fc_net.summary()
optimizer = optimizers.Adam(lr=1e-3)
train_loss = []
test_acc = []
acc_max = 0
for epoch in range(500):
for step, (x, y) in enumerate(train_db):
with tf.GradientTape() as tape:
# [b,512,1]=>[b,4]
logits = model(x, training=True)
y_onehot = tf.one_hot(y, depth=4)
loss = tf.losses.categorical_crossentropy(y_onehot, logits, from_logits=True)
loss = tf.reduce_mean(loss)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
if step % 100 == 0:
print(epoch, step, 'loss:', float(loss))
train_loss.append(loss)
total_num = 0
total_correct = 0
for x, y in test_db:
logits = model(x)
prob = tf.nn.softmax(logits, axis=1)
pred = tf.argmax(prob, axis=1)
pred = tf.cast(pred, dtype=tf.int32)
correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)
correct = tf.reduce_sum(correct)
total_num += x.shape[0]
total_correct += int(correct)
acc = total_correct / total_num
test_acc.append(acc)
print(epoch, 'acc:', acc)
if acc > acc_max:
acc_max = acc
model.save_weights(r'ResNet/weights.ckpt')
|
flexible
|
{
"blob_id": "e626a7f3f9241db8684c3b8c1bd79ea49e03490d",
"index": 8141,
"step-1": "<mask token>\n\n\nclass BasicBlock(layers.Layer):\n <mask token>\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass BasicBlock(layers.Layer):\n\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,\n padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,\n padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\n<mask token>\n",
"step-3": "<mask token>\nset_session(sess)\nprint(\"\"\"\nTensorflow GPU installed: \"\"\" + str(tf.test.is_built_with_cuda()))\nprint('Is Tensorflow using GPU: \\n' + str(tf.test.is_gpu_available()))\n\n\nclass BasicBlock(layers.Layer):\n\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,\n padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,\n padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\n<mask token>\nmodel.build(input_shape=(512, 512, 1))\n<mask token>\nfor epoch in range(500):\n for step, (x, y) in enumerate(train_db):\n with tf.GradientTape() as tape:\n logits = model(x, training=True)\n y_onehot = tf.one_hot(y, depth=4)\n loss = tf.losses.categorical_crossentropy(y_onehot, logits,\n from_logits=True)\n loss = tf.reduce_mean(loss)\n grads = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(grads, model.trainable_variables))\n if step % 100 == 0:\n print(epoch, step, 'loss:', float(loss))\n train_loss.append(loss)\n total_num = 0\n total_correct = 0\n for x, y in test_db:\n logits = model(x)\n prob = tf.nn.softmax(logits, axis=1)\n pred = tf.argmax(prob, axis=1)\n pred = tf.cast(pred, dtype=tf.int32)\n correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)\n correct = tf.reduce_sum(correct)\n total_num += x.shape[0]\n total_correct += int(correct)\n acc = total_correct / total_num\n test_acc.append(acc)\n print(epoch, 'acc:', acc)\n if acc > acc_max:\n acc_max = acc\n model.save_weights('ResNet/weights.ckpt')\n",
"step-4": "<mask token>\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\n<mask token>\nconfig = tf.compat.v1.ConfigProto()\nconfig.gpu_options.allow_growth = True\nconfig.log_device_placement = True\nsess = tf.compat.v1.Session(config=config)\nset_session(sess)\nprint(\"\"\"\nTensorflow GPU installed: \"\"\" + str(tf.test.is_built_with_cuda()))\nprint('Is Tensorflow using GPU: \\n' + str(tf.test.is_gpu_available()))\n\n\nclass BasicBlock(layers.Layer):\n\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,\n padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,\n padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\nx_train = np.loadtxt('/content/drive/My Drive/Data/x_train').reshape(-1, 512, 1\n ).astype(np.float32)\ny_train = np.loadtxt('/content/drive/My Drive/Data/y_train').astype(np.int32)\nx_test = np.loadtxt('/content/drive/My Drive/Data/x_test').reshape(-1, 512, 1\n ).astype(np.float32)\ny_test = np.loadtxt('/content/drive/My Drive/Data/y_test').astype(np.int32)\ntrain_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(512)\ntest_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(512)\nmodel = ResNet([2, 2, 2, 2, 2, 2])\nmodel.build(input_shape=(512, 512, 1))\noptimizer = optimizers.Adam(lr=0.001)\ntrain_loss = []\ntest_acc = []\nacc_max = 0\nfor epoch in range(500):\n for step, (x, y) in enumerate(train_db):\n with tf.GradientTape() as tape:\n logits = model(x, training=True)\n y_onehot = tf.one_hot(y, depth=4)\n loss = tf.losses.categorical_crossentropy(y_onehot, logits,\n from_logits=True)\n loss = tf.reduce_mean(loss)\n grads = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(grads, model.trainable_variables))\n if step % 100 == 0:\n print(epoch, step, 'loss:', float(loss))\n train_loss.append(loss)\n total_num = 0\n total_correct = 0\n for x, y in test_db:\n logits = model(x)\n prob = tf.nn.softmax(logits, axis=1)\n pred = tf.argmax(prob, axis=1)\n pred = tf.cast(pred, dtype=tf.int32)\n correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)\n correct = tf.reduce_sum(correct)\n total_num += x.shape[0]\n total_correct += int(correct)\n acc = total_correct / total_num\n test_acc.append(acc)\n print(epoch, 'acc:', acc)\n if acc > acc_max:\n acc_max = acc\n model.save_weights('ResNet/weights.ckpt')\n",
"step-5": "import tensorflow as tf\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras import layers, Sequential, optimizers\nimport numpy as np\nfrom tensorflow.compat.v1.keras.backend import set_session\n\nconfig = tf.compat.v1.ConfigProto()\nconfig.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU\nconfig.log_device_placement = True # to log device placement (on which device the operation ran)\nsess = tf.compat.v1.Session(config=config)\nset_session(sess)\nprint('\\nTensorflow GPU installed: ' + str(tf.test.is_built_with_cuda()))\nprint('Is Tensorflow using GPU: \\n' + str(tf.test.is_gpu_available()))\n\n\nclass BasicBlock(layers.Layer):\n # 残差模块\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n # 第一个卷积单元\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides, padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n # 第二个卷积单元\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1, padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n # 通过第二个卷积单元\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n # 通过identity模块\n identity = self.downsample(inputs)\n # 2条路径输出直接相加\n output = layers.add([out, identity])\n output = tf.nn.relu(output) # 激活函数\n return output\n\n\nclass ResNet(keras.Model):\n def __init__(self, layer_dims, num_classes=4):\n # layer_dims:list[2,2,2,2,2,2]\n super(ResNet, self).__init__()\n\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(),\n layers.Activation('relu')\n ])\n self.layer1 = self.build_resblock(16, layer_dims[0]) # 512\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5, strides=4) # 128\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5, strides=4) # 32\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2) # 16\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2) # 8\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2) # 4\n\n self.avgpool = layers.GlobalAveragePooling1D() # 512大小的向量: 512*1\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n # 辅助函数,堆叠filter_num个BasicBlock\n res_blocks = Sequential()\n # 只有第一个BasicBlock的步长可能不为1,实现下采样\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n\n for _ in range(1, blocks): # 其他BasicBlock步长都为1\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n\n return res_blocks\n \nx_train = np.loadtxt(r'/content/drive/My Drive/Data/x_train').reshape(-1, 512, 1).astype(np.float32)\ny_train = np.loadtxt(r'/content/drive/My Drive/Data/y_train').astype(np.int32)\nx_test = np.loadtxt(r'/content/drive/My Drive/Data/x_test').reshape(-1, 512, 1).astype(np.float32)\ny_test = np.loadtxt(r'/content/drive/My Drive/Data/y_test').astype(np.int32)\ntrain_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(512)\ntest_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(512)\n# sample = next(iter(train_db))\n# print(sample)\n\nmodel = ResNet([2,2,2,2,2,2])\nmodel.build(input_shape=(512,512,1))\n# conv_net.summary()\n# fc_net.summary()\noptimizer = optimizers.Adam(lr=1e-3)\n\ntrain_loss = []\ntest_acc = []\nacc_max = 0\nfor epoch in range(500):\n for step, (x, y) in enumerate(train_db):\n with tf.GradientTape() as tape:\n # [b,512,1]=>[b,4]\n logits = model(x, training=True)\n\n y_onehot = tf.one_hot(y, depth=4)\n loss = tf.losses.categorical_crossentropy(y_onehot, logits, from_logits=True)\n loss = tf.reduce_mean(loss)\n grads = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(grads, model.trainable_variables))\n if step % 100 == 0:\n print(epoch, step, 'loss:', float(loss))\n train_loss.append(loss)\n total_num = 0\n total_correct = 0\n for x, y in test_db:\n logits = model(x)\n prob = tf.nn.softmax(logits, axis=1)\n pred = tf.argmax(prob, axis=1)\n pred = tf.cast(pred, dtype=tf.int32)\n\n correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)\n correct = tf.reduce_sum(correct)\n\n total_num += x.shape[0]\n total_correct += int(correct)\n\n acc = total_correct / total_num\n test_acc.append(acc)\n print(epoch, 'acc:', acc)\n if acc > acc_max:\n acc_max = acc\n model.save_weights(r'ResNet/weights.ckpt')\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
#!/usr/bin/python
import os;
import math;
# os.chdir('data/postgres/linux.env')
os.chdir('data/mysql/linux.env')
# os.chdir('data/mongo/linux.env')
col_time = 0;
col_read_ops = 1
col_read_err = 2
col_write_ops = 3
col_write_err = 4
class ColumnData:
def __init__(self, chart, title, data):
self.chart = chart;
self.title = title;
self.data = data;
self.sum = sum(self.data);
self.avg = self.sum / len(self.data);
self.sd = math.sqrt(sum([math.pow(x - self.avg, 2) for x in data]) / len(self.data));
def aggregate(self, group_size):
assert len(self.data) % group_size == 0
agg_data = [0.0 for i in range(len(self.data) / group_size)]
for i in range(len(self.data)):
agg_data[i / group_size] += self.data[i]
agg_column = ColumnData(self.chart, self.title + '_agg', [x / group_size for x in agg_data])
agg_column.sum = self.sum
agg_column.avg = self.avg
agg_column.sd = self.sd
return agg_column
class ChartData:
def __init__(self, file):
assert file.endswith('.csv')
# read meta-data:
for field in file[:-len('.csv')].split(','):
[key, value] = field.split('=');
setattr(self, key, int(value));
# read raw data:
first_line = True;
input_matrix = None
for line in open(file, 'r'):
line = line.strip();
if line <> '':
items = line.split(',')
if first_line:
input_matrix = [[title.replace("'", '')] for title in items]
first_line = False;
else:
values = [float(value) for value in items]
for i in range(len(values)):
input_matrix[i].append(values[i])
self.columns = [ColumnData(self, input_column[0], input_column[1:]) for input_column in input_matrix]
self.time_line = self.columns[0]
self.read_th = self.r_lite + self.r_heavy;
read_title = 'r%d_R%d' % (self.r_lite, self.r_heavy)
self.read_ops = self.columns[1]
self.read_ops.title = 'R_' + read_title
self.read_err = self.columns[2]
self.read_err.title = 'RE_' + read_title
self.write_th = self.w_ins + self.w_up_tiny + self.w_up_wide;
write_title = 'i%d_u%d_U%d' % (self.w_ins, self.w_up_tiny, self.w_up_wide)
self.write_ops = self.columns[3]
self.write_ops.title = 'W_' + write_title
self.write_err = self.columns[4]
self.write_err.title = 'WE_' + write_title
name_index = 0;
def draw_chart(columns, name='', notes=''):
if name == '':
global name_index;
name_index += 1;
name = 'chart_%s' % name_index
id = 'chart_' + name;
result = "";
result += """
function %s() {
var data = google.visualization.arrayToDataTable([
""" % id;
result += '[%s],\n' % ', '.join(['"' + c.title + '"' for c in columns])
for i in range(len(columns[0].data)):
result += '[%s],\n' % (', '.join([str(c.data[i]) for c in columns]))
result += """
]);
var options = {
title: '%s',
//curveType: 'function',
chartArea:{left:60,top:10,width:'65%%',height:'85%%'}
};
var chart = new google.visualization.LineChart(document.getElementById('%s'));
chart.draw(data, options);
}
""" % (name, id);
return id, result
charts = []
def draw_aggregated_chart(name, columns, read_from=0, read_to=0, write_from=0, write_to=0):
read_chart = []
for file_csv in os.listdir('.'):
if file_csv.endswith('.csv'):
items = file_csv.replace('=', '_').replace('.', '_').split('_');
read_threads = int(items[4]);
write_threads = int(items[6]);
if read_from <= read_threads <= read_to and write_from <= write_threads <= write_to:
chart = read_chart_data(file_csv);
if len(read_chart) == 0:
read_chart = [[t] for t in extract_column(chart, col_time)];
for column in columns:
column_data = extract_column(chart, column)
if sum(column_data[1:]) == 0.0:
continue;
read_chart = append_column(read_chart, column_data);
return draw_chart(read_chart, name);
def meta_column(columns, title, metric):
return ColumnData(None, title, [metric(c) for c in columns])
def render_group(time_line, group_list, meta_prefix, threads_metric):
global c
charts.append(draw_chart([time_line] + [c.write_ops for c in group_list]));
charts.append(draw_chart([time_line.aggregate(10)] + [c.write_ops.aggregate(10) for c in group_list]));
charts.append(draw_chart([
meta_column([c.write_ops for c in group_list], meta_prefix + ' Threads', threads_metric),
meta_column([c.write_ops for c in group_list], meta_prefix + ' ops avg', lambda c: c.avg),
meta_column([c.write_ops for c in group_list], meta_prefix + ' ops sd', lambda c: c.sd),
]));
if True:
chart_list = []
for file_name in os.listdir('.'):
if file_name.endswith('.csv'):
chart_list.append(ChartData(file_name));
chart_ins_list = [c for c in chart_list if c.w_ins > 0 and c.read_th==0]
chart_up_tiny_list = [c for c in chart_list if c.w_up_tiny > 0 and c.read_th==0]
chart_up_wide_list = [c for c in chart_list if c.w_up_wide > 0 and c.read_th==0]
chart_r_lite_list = [c for c in chart_list if c.r_lite > 0 and c.write_th==0]
chart_r_heavy_list = [c for c in chart_list if c.r_heavy > 0 and c.write_th==0]
time_line = chart_list[0].time_line
if len(chart_ins_list)>0:
render_group(time_line, chart_ins_list, 'Write Ins', lambda c: c.chart.write_th)
if len(chart_up_tiny_list)>0:
render_group(time_line, chart_up_tiny_list, 'Write Up Tiny', lambda c: c.chart.write_th)
if len(chart_up_wide_list)>0:
render_group(time_line, chart_up_wide_list, 'Write Up Wide', lambda c: c.chart.write_th)
with open('report-all.html', 'w') as out:
out.write("""<html>
<head>
<script type="text/javascript" src="https://www.google.com/jsapi"></script>
<script type="text/javascript">
google.load("visualization", "1", {packages:["corechart"]});
google.setOnLoadCallback(function(){
""");
for id, renderer in charts:
out.write(" %s();\n" % id);
out.write("""
});
""");
for id, renderer in charts:
out.write(renderer);
out.write("""
</script>
</head>
<body>
""");
for id, renderer in charts:
out.write(' <div id="%s" style="width: 1200px; height: 400px;"></div>\n' % id)
out.write("""
</body>
</html>""");
|
normal
|
{
"blob_id": "bb208d40ce098b05594aaf9c579f64b909738d52",
"index": 1067,
"step-1": "#!/usr/bin/python\n\nimport os;\nimport math;\n\n# os.chdir('data/postgres/linux.env')\nos.chdir('data/mysql/linux.env')\n# os.chdir('data/mongo/linux.env')\n\ncol_time = 0;\ncol_read_ops = 1\ncol_read_err = 2\ncol_write_ops = 3\ncol_write_err = 4\n\n\nclass ColumnData:\n def __init__(self, chart, title, data):\n self.chart = chart;\n self.title = title;\n self.data = data;\n self.sum = sum(self.data);\n self.avg = self.sum / len(self.data);\n self.sd = math.sqrt(sum([math.pow(x - self.avg, 2) for x in data]) / len(self.data));\n\n def aggregate(self, group_size):\n assert len(self.data) % group_size == 0\n agg_data = [0.0 for i in range(len(self.data) / group_size)]\n for i in range(len(self.data)):\n agg_data[i / group_size] += self.data[i]\n agg_column = ColumnData(self.chart, self.title + '_agg', [x / group_size for x in agg_data])\n agg_column.sum = self.sum\n agg_column.avg = self.avg\n agg_column.sd = self.sd\n return agg_column\n\n\nclass ChartData:\n def __init__(self, file):\n assert file.endswith('.csv')\n\n # read meta-data:\n for field in file[:-len('.csv')].split(','):\n [key, value] = field.split('=');\n setattr(self, key, int(value));\n\n # read raw data:\n first_line = True;\n input_matrix = None\n for line in open(file, 'r'):\n line = line.strip();\n if line <> '':\n items = line.split(',')\n if first_line:\n input_matrix = [[title.replace(\"'\", '')] for title in items]\n first_line = False;\n else:\n values = [float(value) for value in items]\n for i in range(len(values)):\n input_matrix[i].append(values[i])\n self.columns = [ColumnData(self, input_column[0], input_column[1:]) for input_column in input_matrix]\n\n self.time_line = self.columns[0]\n\n self.read_th = self.r_lite + self.r_heavy;\n read_title = 'r%d_R%d' % (self.r_lite, self.r_heavy)\n self.read_ops = self.columns[1]\n self.read_ops.title = 'R_' + read_title\n self.read_err = self.columns[2]\n self.read_err.title = 'RE_' + read_title\n\n self.write_th = self.w_ins + self.w_up_tiny + self.w_up_wide;\n write_title = 'i%d_u%d_U%d' % (self.w_ins, self.w_up_tiny, self.w_up_wide)\n self.write_ops = self.columns[3]\n self.write_ops.title = 'W_' + write_title\n self.write_err = self.columns[4]\n self.write_err.title = 'WE_' + write_title\n\n\nname_index = 0;\n\n\ndef draw_chart(columns, name='', notes=''):\n if name == '':\n global name_index;\n name_index += 1;\n name = 'chart_%s' % name_index\n id = 'chart_' + name;\n result = \"\";\n result += \"\"\"\n function %s() {\n var data = google.visualization.arrayToDataTable([\n \"\"\" % id;\n result += '[%s],\\n' % ', '.join(['\"' + c.title + '\"' for c in columns])\n for i in range(len(columns[0].data)):\n result += '[%s],\\n' % (', '.join([str(c.data[i]) for c in columns]))\n\n result += \"\"\"\n ]);\n\n var options = {\n title: '%s',\n //curveType: 'function',\n chartArea:{left:60,top:10,width:'65%%',height:'85%%'}\n };\n\n var chart = new google.visualization.LineChart(document.getElementById('%s'));\n chart.draw(data, options);\n }\n \"\"\" % (name, id);\n return id, result\n\n\ncharts = []\n\n\ndef draw_aggregated_chart(name, columns, read_from=0, read_to=0, write_from=0, write_to=0):\n read_chart = []\n for file_csv in os.listdir('.'):\n if file_csv.endswith('.csv'):\n items = file_csv.replace('=', '_').replace('.', '_').split('_');\n read_threads = int(items[4]);\n write_threads = int(items[6]);\n if read_from <= read_threads <= read_to and write_from <= write_threads <= write_to:\n chart = read_chart_data(file_csv);\n if len(read_chart) == 0:\n read_chart = [[t] for t in extract_column(chart, col_time)];\n for column in columns:\n column_data = extract_column(chart, column)\n if sum(column_data[1:]) == 0.0:\n continue;\n read_chart = append_column(read_chart, column_data);\n return draw_chart(read_chart, name);\n\n\ndef meta_column(columns, title, metric):\n return ColumnData(None, title, [metric(c) for c in columns])\n\ndef render_group(time_line, group_list, meta_prefix, threads_metric):\n global c\n charts.append(draw_chart([time_line] + [c.write_ops for c in group_list]));\n charts.append(draw_chart([time_line.aggregate(10)] + [c.write_ops.aggregate(10) for c in group_list]));\n charts.append(draw_chart([\n meta_column([c.write_ops for c in group_list], meta_prefix + ' Threads', threads_metric),\n meta_column([c.write_ops for c in group_list], meta_prefix + ' ops avg', lambda c: c.avg),\n meta_column([c.write_ops for c in group_list], meta_prefix + ' ops sd', lambda c: c.sd),\n ]));\n\n\nif True:\n chart_list = []\n for file_name in os.listdir('.'):\n if file_name.endswith('.csv'):\n chart_list.append(ChartData(file_name));\n\n chart_ins_list = [c for c in chart_list if c.w_ins > 0 and c.read_th==0]\n chart_up_tiny_list = [c for c in chart_list if c.w_up_tiny > 0 and c.read_th==0]\n chart_up_wide_list = [c for c in chart_list if c.w_up_wide > 0 and c.read_th==0]\n chart_r_lite_list = [c for c in chart_list if c.r_lite > 0 and c.write_th==0]\n chart_r_heavy_list = [c for c in chart_list if c.r_heavy > 0 and c.write_th==0]\n time_line = chart_list[0].time_line\n\n if len(chart_ins_list)>0:\n render_group(time_line, chart_ins_list, 'Write Ins', lambda c: c.chart.write_th)\n if len(chart_up_tiny_list)>0:\n render_group(time_line, chart_up_tiny_list, 'Write Up Tiny', lambda c: c.chart.write_th)\n if len(chart_up_wide_list)>0:\n render_group(time_line, chart_up_wide_list, 'Write Up Wide', lambda c: c.chart.write_th)\n\nwith open('report-all.html', 'w') as out:\n out.write(\"\"\"<html>\n <head>\n <script type=\"text/javascript\" src=\"https://www.google.com/jsapi\"></script>\n <script type=\"text/javascript\">\n google.load(\"visualization\", \"1\", {packages:[\"corechart\"]});\n google.setOnLoadCallback(function(){\n \"\"\");\n for id, renderer in charts:\n out.write(\" %s();\\n\" % id);\n out.write(\"\"\" \n });\n \"\"\");\n for id, renderer in charts:\n out.write(renderer);\n\n out.write(\"\"\"\n </script>\n </head>\n <body>\n \"\"\");\n\n for id, renderer in charts:\n out.write(' <div id=\"%s\" style=\"width: 1200px; height: 400px;\"></div>\\n' % id)\n\n out.write(\"\"\"\n </body>\n</html>\"\"\");\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Greger Update Agent (GUA) module for the Greger Client Module
"""
__author__ = "Eric Sandbling"
__license__ = 'MIT'
__status__ = 'Development'
# System modules
import os, sys
import shutil
import logging
import subprocess
from threading import Event
from threading import Thread
from threading import enumerate
# Local Modules
from common import getLocalConfig
from common import restart_program
from gdb import GregerDatabase
# from gcm import GregerClientModule
class GregerUpdateAgent(Thread):
"""
Main class which holds the main sequence of the application.
"""
def __init__(self, ready=None):
'''
Initialize the main class
'''
Thread.__init__(self)
self.ready = ready
# Setup logging
self.logPath = "root.GUA"
self.log = logging.getLogger(self.logPath)
localLog = logging.getLogger(self.logPath + ".__init__")
localLog.debug("Initiating Greger Update Agent (GUA)...")
# Stop execution handler
self.stopExecution = Event()
# Get local path
self._location = os.path.abspath(__file__)
self._location = self._location[:-15] # Trim gcm/__main__.py from path to get at location of application
localLog.debug("Local path: " + self._location)
# Get Local Configuration Parameters
localLog.debug("Getting configuration parameters from file...")
config = getLocalConfig()
# Locally relevant parameters
self.localRevisionRecordPath = config.get("greger_update_agent", "local_revision_path")
localLog.debug("Parameter: (localRevisionRecordPath) " + self.localRevisionRecordPath)
self.log.info("Greger Update Agent (GUA) successfully initiated!")
@property
def localRevisionRecord(self):
'''
Get local revision record (.gcm)
'''
# Logging
localLog = logging.getLogger(self.logPath + ".localRevisionRecord")
localLog.debug("Getting local revision record...")
# Local parameters
# revisionRecordPath = os.path.join(self._location, ".gcm")
revisionRecordPath = self.localRevisionRecordPath
localLog.debug("Attemption to get record from file...")
try:
with open(revisionRecordPath,"r") as f:
localRecord = f.read()
localLog.debug("Local revision record: " + str(localRecord))
except Exception as e:
self.log.warning("Failed to open file! - " + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
'''
Set local revision record (.gcm)
'''
# Logging
localLog = logging.getLogger(self.logPath + ".localRevisionRecord")
localLog.debug("Setting local revision record (.gcm) to " + str(newRevision) + "...")
# Local parameters
# revisionRecordPath = os.path.join(self._location, ".gcm")
revisionRecordPath = self.localRevisionRecordPath
localLog.debug("Attemption to write \"" + str(newRevision) + "\" to file...")
with open(revisionRecordPath,"w") as f:
f.write(str(newRevision))
self.log.info("Local revision record set: " + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
'''
Retrieve information about a revision available on server.
'''
# Logging
localLog = logging.getLogger(self.logPath + ".getSoftwareInfo")
localLog.debug("Attempting to retrieve software revision info...")
# Locally relevant parameters
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning("Setting " + str(guaSWSource) + " not defined!")
return
moduleReturn = {
'revision': "",
'revision_SHA' : "",
'revision_author' : "",
'revision_date' : "",
'revision_comment' : ""
}
# Get server revision info
localLog.debug("Attempting to retrieve info from server... " + guaSWServerURI)
pCmd = "svn proplist -v -R --revprop -r " + rev
pCmd += " " + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
# Create list of output and remove extra white spaces
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
# Get revision from output
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug("Revision: " + revStr[:-1])
# Get SHA
shaStr = outputList[outputList.index('git-commit')+1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug("Revision SHA: " + shaStr)
# Get revision author
authorStr = outputList[outputList.index('svn:author')+1]
moduleReturn['revision_author'] = authorStr
localLog.debug("Revision author: " + authorStr)
# Get revision date
dateStr = outputList[outputList.index('svn:date')+1]
moduleReturn['revision_date'] = dateStr
localLog.debug("Revision date: " + dateStr)
# Get revision comment
commentStr = outputList[outputList.index('svn:log')+1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug("Revision Comment: " + commentStr)
if err is not None:
localLog.debug("Error message: " + str(err))
except Exception as e:
self.log.error("Oops! Something went wrong - " + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
'''
Get and updating software from server
'''
# Logging
localLog = logging.getLogger(self.logPath + ".updateSoftware")
localLog.debug("Getting software revision " + str(swRev) + " from server and updating local client...")
# Locally relevant parameters
localLog.debug("Constructing target path for new software...")
targetRoot = self._location
targetDir = "gcm"
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug("Target path: " + targetPath)
localLog.debug("Retrieving relevant parameters from server...")
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug("Parameter: (guaSWSource) " + guaSWServerURI)
else:
self.log.warning("Setting " + str(guaSWSource) + " not defined!")
return
# Get software files from server
localLog.debug("Getting software files from server...")
# Compile download command
pCmd = "svn export --force -r " + str(swRev)
pCmd += " " + guaSWServerURI
pCmd += " " + targetPath
localLog.debug(pCmd)
# Execute command
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
if err is not None:
self.log.warning("Error message: " + str(err))
else:
self.log.info("Download successful!")
# Print output
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error("Oops! Something went wrong - " + str(e))
# Read revision text
localLog.debug("Reading downloaded revision from \"" + output.splitlines()[-1] + "\"...")
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug("Downloaded Revision: " + revText)
# Update local revision record
self.localRevisionRecord = revText
# Get downloaded files text
localLog.debug("Listing downloaded files...")
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1])
downloadedFiles.append(file)
localLog.debug("File: " + file)
# List files in directory
self.log.debug("Getting all files in local directory (after update)...")
allFiles = []
# r=root, d=directories, f = files
for r, d, f in os.walk(targetPath):
for file in f:
# allFiles.append(os.path.abspath(file))
allFiles.append(os.path.join(r, file))
localLog.debug("File: " + allFiles[-1])
# localLog.debug("File: " + os.path.join(r, file))
for dir in d:
# allFiles.append(os.path.abspath(dir))
allFiles.append(os.path.join(r, dir))
localLog.debug("Dir: " + allFiles[-1])
# localLog.debug("Dir: " + os.path.join(r, dir))
self.log.info("Identifying old files to remove (<new_files> - <all_files>)...")
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info("Removing: " + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning("Oops! Something went wrong! - " + str(e))
# List files in directory
self.log.debug("Re-getting all files in local directory...")
allFiles = []
# r=root, d=directories, f = files
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug("File: " + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug("Dir: " + os.path.join(r, file))
def run(self):
'''
Run Greger Update Agent.
'''
# Logging
localLog = logging.getLogger(self.logPath + ".run")
self.log.info("Starting Greger Update Agent (GUA)...")
# Wait for Greger Client Module to start...
localLog.debug("Wait for Greger Client Module to start...")
self.ready.wait()
# Get all active threads!
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + " " + thr.__class__.__name__ +" active!")
allThreads.update({thr.__class__.__name__ : thr})
if thr.__class__.__name__ == "GregerClientModule":
localLog.debug("Greger Client Module thread found! " +
allThreads['GregerClientModule'].name)
# Start checking for updates
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug("Checking for updates (" + str(loopCount) + ")...")
# Get local revision record
localLog.debug("Getting local revision record...")
localRevision = self.localRevisionRecord
# Get server revision...
localLog.debug("Getting latest software info...")
softwareInfo = self.getSoftwareInfo()
self.log.info("Revision check done! (" + str(localRevision) + ")")
if int(localRevision) == int(softwareInfo['revision']):
self.log.info("No new revision found.")
else:
self.log.info("New revision found!")
# Do update!!
localLog.debug("Attempting to update software...")
self.updateSoftware()
# Update server with updated software
localLog.debug("Attempting to update server with software info...")
allThreads['GregerDatabase'].update('about', softwareInfo)
# Tell GCM to stop all treads (except GUA)...
self.log.info("Attempting to stop all exection before restarting...")
allThreads['GregerClientModule'].stopAll(GUA=True)
# Restart Application
self.log.info("Attemption to restart application...")
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay']['value']
else:
delayTime = 10
self.log.warning("Settings not defined! (using default=10)")
# Wait update delay
self.log.info("Waiting " + str(delayTime) + "s...")
self.stopExecution.wait(delayTime)
self.log.info("Greger Update Agent (GUA) execution stopped!")
|
normal
|
{
"blob_id": "a9b2a4d4924dcdd6e146ea346e71bf42c0259846",
"index": 593,
"step-1": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n <mask token>\n <mask token>\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n <mask token>\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-2": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n <mask token>\n <mask token>\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n \"\"\"\n Set local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Setting local revision record (.gcm) to ' + str(\n newRevision) + '...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to write \"' + str(newRevision) +\n '\" to file...')\n with open(revisionRecordPath, 'w') as f:\n f.write(str(newRevision))\n self.log.info('Local revision record set: ' + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-3": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n <mask token>\n\n def __init__(self, ready=None):\n \"\"\"\n Initialize the main class\n \"\"\"\n Thread.__init__(self)\n self.ready = ready\n self.logPath = 'root.GUA'\n self.log = logging.getLogger(self.logPath)\n localLog = logging.getLogger(self.logPath + '.__init__')\n localLog.debug('Initiating Greger Update Agent (GUA)...')\n self.stopExecution = Event()\n self._location = os.path.abspath(__file__)\n self._location = self._location[:-15]\n localLog.debug('Local path: ' + self._location)\n localLog.debug('Getting configuration parameters from file...')\n config = getLocalConfig()\n self.localRevisionRecordPath = config.get('greger_update_agent',\n 'local_revision_path')\n localLog.debug('Parameter: (localRevisionRecordPath) ' + self.\n localRevisionRecordPath)\n self.log.info('Greger Update Agent (GUA) successfully initiated!')\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n \"\"\"\n Set local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Setting local revision record (.gcm) to ' + str(\n newRevision) + '...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to write \"' + str(newRevision) +\n '\" to file...')\n with open(revisionRecordPath, 'w') as f:\n f.write(str(newRevision))\n self.log.info('Local revision record set: ' + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-4": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n \"\"\"\n Main class which holds the main sequence of the application.\n \"\"\"\n\n def __init__(self, ready=None):\n \"\"\"\n Initialize the main class\n \"\"\"\n Thread.__init__(self)\n self.ready = ready\n self.logPath = 'root.GUA'\n self.log = logging.getLogger(self.logPath)\n localLog = logging.getLogger(self.logPath + '.__init__')\n localLog.debug('Initiating Greger Update Agent (GUA)...')\n self.stopExecution = Event()\n self._location = os.path.abspath(__file__)\n self._location = self._location[:-15]\n localLog.debug('Local path: ' + self._location)\n localLog.debug('Getting configuration parameters from file...')\n config = getLocalConfig()\n self.localRevisionRecordPath = config.get('greger_update_agent',\n 'local_revision_path')\n localLog.debug('Parameter: (localRevisionRecordPath) ' + self.\n localRevisionRecordPath)\n self.log.info('Greger Update Agent (GUA) successfully initiated!')\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n \"\"\"\n Set local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Setting local revision record (.gcm) to ' + str(\n newRevision) + '...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to write \"' + str(newRevision) +\n '\" to file...')\n with open(revisionRecordPath, 'w') as f:\n f.write(str(newRevision))\n self.log.info('Local revision record set: ' + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nGreger Update Agent (GUA) module for the Greger Client Module\n\"\"\"\n\n__author__ = \"Eric Sandbling\"\n__license__ = 'MIT'\n__status__ = 'Development'\n\n# System modules\nimport os, sys\nimport shutil\nimport logging\nimport subprocess\nfrom threading import Event\nfrom threading import Thread\nfrom threading import enumerate\n\n# Local Modules\nfrom common import getLocalConfig\nfrom common import restart_program\nfrom gdb import GregerDatabase\n# from gcm import GregerClientModule\n\nclass GregerUpdateAgent(Thread):\n \"\"\"\n Main class which holds the main sequence of the application.\n \"\"\"\n\n def __init__(self, ready=None):\n '''\n Initialize the main class\n '''\n Thread.__init__(self)\n self.ready = ready\n\n # Setup logging\n self.logPath = \"root.GUA\"\n self.log = logging.getLogger(self.logPath)\n localLog = logging.getLogger(self.logPath + \".__init__\")\n localLog.debug(\"Initiating Greger Update Agent (GUA)...\")\n\n # Stop execution handler\n self.stopExecution = Event()\n\n # Get local path\n self._location = os.path.abspath(__file__)\n self._location = self._location[:-15] # Trim gcm/__main__.py from path to get at location of application\n localLog.debug(\"Local path: \" + self._location)\n\n # Get Local Configuration Parameters\n localLog.debug(\"Getting configuration parameters from file...\")\n config = getLocalConfig()\n\n # Locally relevant parameters\n self.localRevisionRecordPath = config.get(\"greger_update_agent\", \"local_revision_path\")\n localLog.debug(\"Parameter: (localRevisionRecordPath) \" + self.localRevisionRecordPath)\n\n\n self.log.info(\"Greger Update Agent (GUA) successfully initiated!\")\n\n @property\n def localRevisionRecord(self):\n '''\n Get local revision record (.gcm)\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".localRevisionRecord\")\n localLog.debug(\"Getting local revision record...\")\n\n # Local parameters\n # revisionRecordPath = os.path.join(self._location, \".gcm\")\n revisionRecordPath = self.localRevisionRecordPath\n\n localLog.debug(\"Attemption to get record from file...\")\n try:\n with open(revisionRecordPath,\"r\") as f:\n localRecord = f.read()\n localLog.debug(\"Local revision record: \" + str(localRecord))\n except Exception as e:\n self.log.warning(\"Failed to open file! - \" + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n '''\n Set local revision record (.gcm)\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".localRevisionRecord\")\n localLog.debug(\"Setting local revision record (.gcm) to \" + str(newRevision) + \"...\")\n\n # Local parameters\n # revisionRecordPath = os.path.join(self._location, \".gcm\")\n revisionRecordPath = self.localRevisionRecordPath\n\n localLog.debug(\"Attemption to write \\\"\" + str(newRevision) + \"\\\" to file...\")\n with open(revisionRecordPath,\"w\") as f:\n f.write(str(newRevision))\n self.log.info(\"Local revision record set: \" + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n '''\n Retrieve information about a revision available on server.\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".getSoftwareInfo\")\n localLog.debug(\"Attempting to retrieve software revision info...\")\n\n # Locally relevant parameters\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning(\"Setting \" + str(guaSWSource) + \" not defined!\")\n return\n moduleReturn = {\n 'revision': \"\",\n 'revision_SHA' : \"\",\n 'revision_author' : \"\",\n 'revision_date' : \"\",\n 'revision_comment' : \"\"\n }\n\n # Get server revision info\n localLog.debug(\"Attempting to retrieve info from server... \" + guaSWServerURI)\n pCmd = \"svn proplist -v -R --revprop -r \" + rev\n pCmd += \" \" + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n (output, err) = p.communicate()\n\n # Create list of output and remove extra white spaces\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n\n # Get revision from output\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug(\"Revision: \" + revStr[:-1])\n\n # Get SHA\n shaStr = outputList[outputList.index('git-commit')+1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug(\"Revision SHA: \" + shaStr)\n\n # Get revision author\n authorStr = outputList[outputList.index('svn:author')+1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug(\"Revision author: \" + authorStr)\n\n # Get revision date\n dateStr = outputList[outputList.index('svn:date')+1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug(\"Revision date: \" + dateStr)\n\n # Get revision comment\n commentStr = outputList[outputList.index('svn:log')+1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug(\"Revision Comment: \" + commentStr)\n\n if err is not None:\n localLog.debug(\"Error message: \" + str(err))\n\n except Exception as e:\n self.log.error(\"Oops! Something went wrong - \" + str(e))\n\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n '''\n Get and updating software from server\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".updateSoftware\")\n localLog.debug(\"Getting software revision \" + str(swRev) + \" from server and updating local client...\")\n\n # Locally relevant parameters\n localLog.debug(\"Constructing target path for new software...\")\n targetRoot = self._location\n targetDir = \"gcm\"\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug(\"Target path: \" + targetPath)\n localLog.debug(\"Retrieving relevant parameters from server...\")\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug(\"Parameter: (guaSWSource) \" + guaSWServerURI)\n else:\n self.log.warning(\"Setting \" + str(guaSWSource) + \" not defined!\")\n return\n\n # Get software files from server\n localLog.debug(\"Getting software files from server...\")\n\n # Compile download command\n pCmd = \"svn export --force -r \" + str(swRev)\n pCmd += \" \" + guaSWServerURI\n pCmd += \" \" + targetPath\n localLog.debug(pCmd)\n\n # Execute command\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n (output, err) = p.communicate()\n\n if err is not None:\n self.log.warning(\"Error message: \" + str(err))\n else:\n self.log.info(\"Download successful!\")\n # Print output\n for line in output.splitlines():\n self.log.info(line)\n\n except Exception as e:\n self.log.error(\"Oops! Something went wrong - \" + str(e))\n\n # Read revision text\n localLog.debug(\"Reading downloaded revision from \\\"\" + output.splitlines()[-1] + \"\\\"...\")\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug(\"Downloaded Revision: \" + revText)\n\n # Update local revision record\n self.localRevisionRecord = revText\n\n # Get downloaded files text\n localLog.debug(\"Listing downloaded files...\")\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1])\n downloadedFiles.append(file)\n localLog.debug(\"File: \" + file)\n\n # List files in directory\n self.log.debug(\"Getting all files in local directory (after update)...\")\n allFiles = []\n # r=root, d=directories, f = files\n for r, d, f in os.walk(targetPath):\n for file in f:\n # allFiles.append(os.path.abspath(file))\n allFiles.append(os.path.join(r, file))\n localLog.debug(\"File: \" + allFiles[-1])\n # localLog.debug(\"File: \" + os.path.join(r, file))\n for dir in d:\n # allFiles.append(os.path.abspath(dir))\n allFiles.append(os.path.join(r, dir))\n localLog.debug(\"Dir: \" + allFiles[-1])\n # localLog.debug(\"Dir: \" + os.path.join(r, dir))\n\n self.log.info(\"Identifying old files to remove (<new_files> - <all_files>)...\")\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info(\"Removing: \" + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning(\"Oops! Something went wrong! - \" + str(e))\n\n # List files in directory\n self.log.debug(\"Re-getting all files in local directory...\")\n allFiles = []\n # r=root, d=directories, f = files\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug(\"File: \" + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug(\"Dir: \" + os.path.join(r, file))\n\n def run(self):\n '''\n Run Greger Update Agent.\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".run\")\n self.log.info(\"Starting Greger Update Agent (GUA)...\")\n\n # Wait for Greger Client Module to start...\n localLog.debug(\"Wait for Greger Client Module to start...\")\n self.ready.wait()\n\n # Get all active threads!\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + \" \" + thr.__class__.__name__ +\" active!\")\n allThreads.update({thr.__class__.__name__ : thr})\n if thr.__class__.__name__ == \"GregerClientModule\":\n localLog.debug(\"Greger Client Module thread found! \" +\n allThreads['GregerClientModule'].name)\n\n # Start checking for updates\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug(\"Checking for updates (\" + str(loopCount) + \")...\")\n\n # Get local revision record\n localLog.debug(\"Getting local revision record...\")\n localRevision = self.localRevisionRecord\n\n # Get server revision...\n localLog.debug(\"Getting latest software info...\")\n softwareInfo = self.getSoftwareInfo()\n self.log.info(\"Revision check done! (\" + str(localRevision) + \")\")\n\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info(\"No new revision found.\")\n else:\n self.log.info(\"New revision found!\")\n\n # Do update!!\n localLog.debug(\"Attempting to update software...\")\n self.updateSoftware()\n\n # Update server with updated software\n localLog.debug(\"Attempting to update server with software info...\")\n allThreads['GregerDatabase'].update('about', softwareInfo)\n\n # Tell GCM to stop all treads (except GUA)...\n self.log.info(\"Attempting to stop all exection before restarting...\")\n allThreads['GregerClientModule'].stopAll(GUA=True)\n\n # Restart Application\n self.log.info(\"Attemption to restart application...\")\n restart_program()\n\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay']['value']\n else:\n delayTime = 10\n self.log.warning(\"Settings not defined! (using default=10)\")\n\n # Wait update delay\n self.log.info(\"Waiting \" + str(delayTime) + \"s...\")\n self.stopExecution.wait(delayTime)\n\n self.log.info(\"Greger Update Agent (GUA) execution stopped!\")\n",
"step-ids": [
5,
6,
7,
8,
11
]
}
|
[
5,
6,
7,
8,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('****************************')
print('***** Caixa Eletronico *****')
print('****************************')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('****************************')
print('***** Caixa Eletronico *****')
print('****************************')
account_typed = input('Digite sua conta: ')
password_typed = getpass.getpass('Digite sua senha: ')
<|reserved_special_token_1|>
import getpass
print('****************************')
print('***** Caixa Eletronico *****')
print('****************************')
account_typed = input('Digite sua conta: ')
password_typed = getpass.getpass('Digite sua senha: ')
<|reserved_special_token_1|>
import getpass
print('****************************')
print('***** Caixa Eletronico *****')
print('****************************')
account_typed = input("Digite sua conta: ")
password_typed = getpass.getpass("Digite sua senha: ")
|
flexible
|
{
"blob_id": "44b6ee8488869da447882457897ce87b2fdea726",
"index": 7846,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('****************************')\nprint('***** Caixa Eletronico *****')\nprint('****************************')\n<mask token>\n",
"step-3": "<mask token>\nprint('****************************')\nprint('***** Caixa Eletronico *****')\nprint('****************************')\naccount_typed = input('Digite sua conta: ')\npassword_typed = getpass.getpass('Digite sua senha: ')\n",
"step-4": "import getpass\nprint('****************************')\nprint('***** Caixa Eletronico *****')\nprint('****************************')\naccount_typed = input('Digite sua conta: ')\npassword_typed = getpass.getpass('Digite sua senha: ')\n",
"step-5": "import getpass\r\n\r\nprint('****************************')\r\nprint('***** Caixa Eletronico *****')\r\nprint('****************************')\r\n\r\naccount_typed = input(\"Digite sua conta: \")\r\npassword_typed = getpass.getpass(\"Digite sua senha: \")\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.
AUTH_USER_MODEL), ('events', '0004_auto_20190526_1436')]
operations = [migrations.AlterField(model_name='eventattendance', name=
'event_id', field=models.ForeignKey(db_column='event_id', on_delete
=django.db.models.deletion.DO_NOTHING, to='events.Event')),
migrations.AlterField(model_name='eventattendance', name='user_id',
field=models.ForeignKey(db_column='user_id', on_delete=django.db.
models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
migrations.AlterUniqueTogether(name='eventattendance',
unique_together={('event_id', 'user_id')})]
<|reserved_special_token_1|>
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.
AUTH_USER_MODEL), ('events', '0004_auto_20190526_1436')]
operations = [migrations.AlterField(model_name='eventattendance', name=
'event_id', field=models.ForeignKey(db_column='event_id', on_delete
=django.db.models.deletion.DO_NOTHING, to='events.Event')),
migrations.AlterField(model_name='eventattendance', name='user_id',
field=models.ForeignKey(db_column='user_id', on_delete=django.db.
models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
migrations.AlterUniqueTogether(name='eventattendance',
unique_together={('event_id', 'user_id')})]
<|reserved_special_token_1|>
# Generated by Django 2.1.7 on 2019-05-31 18:45
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0004_auto_20190526_1436'),
]
operations = [
migrations.AlterField(
model_name='eventattendance',
name='event_id',
field=models.ForeignKey(db_column='event_id', on_delete=django.db.models.deletion.DO_NOTHING, to='events.Event'),
),
migrations.AlterField(
model_name='eventattendance',
name='user_id',
field=models.ForeignKey(db_column='user_id', on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='eventattendance',
unique_together={('event_id', 'user_id')},
),
]
|
flexible
|
{
"blob_id": "2ec8d3853ea4a99d4e764c6c24d7b5a3afb64f63",
"index": 2830,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('events', '0004_auto_20190526_1436')]\n operations = [migrations.AlterField(model_name='eventattendance', name=\n 'event_id', field=models.ForeignKey(db_column='event_id', on_delete\n =django.db.models.deletion.DO_NOTHING, to='events.Event')),\n migrations.AlterField(model_name='eventattendance', name='user_id',\n field=models.ForeignKey(db_column='user_id', on_delete=django.db.\n models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),\n migrations.AlterUniqueTogether(name='eventattendance',\n unique_together={('event_id', 'user_id')})]\n",
"step-4": "from django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.\n AUTH_USER_MODEL), ('events', '0004_auto_20190526_1436')]\n operations = [migrations.AlterField(model_name='eventattendance', name=\n 'event_id', field=models.ForeignKey(db_column='event_id', on_delete\n =django.db.models.deletion.DO_NOTHING, to='events.Event')),\n migrations.AlterField(model_name='eventattendance', name='user_id',\n field=models.ForeignKey(db_column='user_id', on_delete=django.db.\n models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),\n migrations.AlterUniqueTogether(name='eventattendance',\n unique_together={('event_id', 'user_id')})]\n",
"step-5": "# Generated by Django 2.1.7 on 2019-05-31 18:45\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ('events', '0004_auto_20190526_1436'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='eventattendance',\n name='event_id',\n field=models.ForeignKey(db_column='event_id', on_delete=django.db.models.deletion.DO_NOTHING, to='events.Event'),\n ),\n migrations.AlterField(\n model_name='eventattendance',\n name='user_id',\n field=models.ForeignKey(db_column='user_id', on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),\n ),\n migrations.AlterUniqueTogether(\n name='eventattendance',\n unique_together={('event_id', 'user_id')},\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class MazeEnv:
<|reserved_special_token_0|>
def __init__(self, GW, GH, SW, SH):
global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT
GRID_WIDTH = GW
GRID_HEIGHT = GH
SCREEN_WIDTH = SW
SCREEN_HEIGHT = SH
BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH
BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT
WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)
self.pos = np.array(self.getPos(SPAWN_STATE))
self.action_space = Actions
self.max_states = GRID_WIDTH * GRID_HEIGHT
self.max_actions = len(self.action_space)
self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])
self.tunnel_vision = False
self.WALLS = list(WALLS)
self.WIN_STATE = WIN_STATE
self.SPAWN_STATE = SPAWN_STATE
def step(self, action):
self.pos = self.moveDir(self.pos, self.action_space(action))
reward = -0.04
done = True
if self.getState() == self.WIN_STATE:
reward = 10
else:
done = False
return self.getState(), reward, done, {}
def reset(self):
self.pos = np.array(self.getPos(self.SPAWN_STATE))
def render(self, screen, close=False):
self.screen = screen
self.screen.fill((0, 0, 0))
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
all_points = []
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *
BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH /
2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
width = 34
height = 10
text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [
BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4,
BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width /
2, BOX_HEIGHT - height - 4]]
for a in range(4):
s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA
)
s.fill((0, 0, 0, 0))
if self.getState((x, y)) == self.WIN_STATE:
col = 0, 255, 0, 255
elif [x, y] in self.WALLS:
col = 128, 128, 128, 255
elif len(self.Q) <= self.getState((x, y)) or len(self.Q
[self.getState((x, y))]) <= a:
col = 0, 0, 0, 0
elif self.Q[self.getState((x, y))][a] > 0:
col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.max() * 195
elif self.Q[self.getState((x, y))][a] < 0:
col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.min() * 195
else:
col = 0, 0, 0, 0
if not self.tunnel_vision or self.getState((x, y)
) == self.getState():
pygame.draw.polygon(s, col, [[all_points[a][b][0] -
x * BOX_WIDTH, all_points[a][b][1] - y *
BOX_HEIGHT] for b in range(3)])
self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))
if self.getState((x, y)) != self.WIN_STATE and [x, y
] not in self.WALLS:
pygame.draw.polygon(self.screen, (255, 255, 255
), all_points[a], 2)
pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] +
0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(
10, int(BOX_WIDTH / 10)))
pygame.display.update()
def moveDir(self, pos, action):
oldPos = list(pos)
if action == Actions.FORWARD:
pos[1] -= 1
elif action == Actions.RIGHT:
pos[0] += 1
elif action == Actions.LEFT:
pos[0] -= 1
elif action == Actions.BACK:
pos[1] += 1
if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1
] >= GRID_HEIGHT or self.hitWall(pos):
pos = oldPos
return pos
def hitWall(self, pos):
for w in self.WALLS:
if w[0] == pos[0] and w[1] == pos[1]:
return True
return False
def getState(self, pos=False):
if not pos:
pos = self.pos
return int(pos[1] * GRID_WIDTH + pos[0])
def getPos(self, state):
return [state % GRID_WIDTH, state // GRID_WIDTH]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MazeEnv:
""" TODO """
def __init__(self, GW, GH, SW, SH):
global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT
GRID_WIDTH = GW
GRID_HEIGHT = GH
SCREEN_WIDTH = SW
SCREEN_HEIGHT = SH
BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH
BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT
WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)
self.pos = np.array(self.getPos(SPAWN_STATE))
self.action_space = Actions
self.max_states = GRID_WIDTH * GRID_HEIGHT
self.max_actions = len(self.action_space)
self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])
self.tunnel_vision = False
self.WALLS = list(WALLS)
self.WIN_STATE = WIN_STATE
self.SPAWN_STATE = SPAWN_STATE
def step(self, action):
self.pos = self.moveDir(self.pos, self.action_space(action))
reward = -0.04
done = True
if self.getState() == self.WIN_STATE:
reward = 10
else:
done = False
return self.getState(), reward, done, {}
def reset(self):
self.pos = np.array(self.getPos(self.SPAWN_STATE))
def render(self, screen, close=False):
self.screen = screen
self.screen.fill((0, 0, 0))
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
all_points = []
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *
BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH /
2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
width = 34
height = 10
text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [
BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4,
BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width /
2, BOX_HEIGHT - height - 4]]
for a in range(4):
s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA
)
s.fill((0, 0, 0, 0))
if self.getState((x, y)) == self.WIN_STATE:
col = 0, 255, 0, 255
elif [x, y] in self.WALLS:
col = 128, 128, 128, 255
elif len(self.Q) <= self.getState((x, y)) or len(self.Q
[self.getState((x, y))]) <= a:
col = 0, 0, 0, 0
elif self.Q[self.getState((x, y))][a] > 0:
col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.max() * 195
elif self.Q[self.getState((x, y))][a] < 0:
col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.min() * 195
else:
col = 0, 0, 0, 0
if not self.tunnel_vision or self.getState((x, y)
) == self.getState():
pygame.draw.polygon(s, col, [[all_points[a][b][0] -
x * BOX_WIDTH, all_points[a][b][1] - y *
BOX_HEIGHT] for b in range(3)])
self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))
if self.getState((x, y)) != self.WIN_STATE and [x, y
] not in self.WALLS:
pygame.draw.polygon(self.screen, (255, 255, 255
), all_points[a], 2)
pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] +
0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(
10, int(BOX_WIDTH / 10)))
pygame.display.update()
def moveDir(self, pos, action):
oldPos = list(pos)
if action == Actions.FORWARD:
pos[1] -= 1
elif action == Actions.RIGHT:
pos[0] += 1
elif action == Actions.LEFT:
pos[0] -= 1
elif action == Actions.BACK:
pos[1] += 1
if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1
] >= GRID_HEIGHT or self.hitWall(pos):
pos = oldPos
return pos
def hitWall(self, pos):
for w in self.WALLS:
if w[0] == pos[0] and w[1] == pos[1]:
return True
return False
def getState(self, pos=False):
if not pos:
pos = self.pos
return int(pos[1] * GRID_WIDTH + pos[0])
def getPos(self, state):
return [state % GRID_WIDTH, state // GRID_WIDTH]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Actions(Enum):
FORWARD = 0
RIGHT = 1
LEFT = 2
BACK = 3
class MazeEnv:
""" TODO """
def __init__(self, GW, GH, SW, SH):
global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT
GRID_WIDTH = GW
GRID_HEIGHT = GH
SCREEN_WIDTH = SW
SCREEN_HEIGHT = SH
BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH
BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT
WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)
self.pos = np.array(self.getPos(SPAWN_STATE))
self.action_space = Actions
self.max_states = GRID_WIDTH * GRID_HEIGHT
self.max_actions = len(self.action_space)
self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])
self.tunnel_vision = False
self.WALLS = list(WALLS)
self.WIN_STATE = WIN_STATE
self.SPAWN_STATE = SPAWN_STATE
def step(self, action):
self.pos = self.moveDir(self.pos, self.action_space(action))
reward = -0.04
done = True
if self.getState() == self.WIN_STATE:
reward = 10
else:
done = False
return self.getState(), reward, done, {}
def reset(self):
self.pos = np.array(self.getPos(self.SPAWN_STATE))
def render(self, screen, close=False):
self.screen = screen
self.screen.fill((0, 0, 0))
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
all_points = []
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *
BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH /
2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
width = 34
height = 10
text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [
BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4,
BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width /
2, BOX_HEIGHT - height - 4]]
for a in range(4):
s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA
)
s.fill((0, 0, 0, 0))
if self.getState((x, y)) == self.WIN_STATE:
col = 0, 255, 0, 255
elif [x, y] in self.WALLS:
col = 128, 128, 128, 255
elif len(self.Q) <= self.getState((x, y)) or len(self.Q
[self.getState((x, y))]) <= a:
col = 0, 0, 0, 0
elif self.Q[self.getState((x, y))][a] > 0:
col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.max() * 195
elif self.Q[self.getState((x, y))][a] < 0:
col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.min() * 195
else:
col = 0, 0, 0, 0
if not self.tunnel_vision or self.getState((x, y)
) == self.getState():
pygame.draw.polygon(s, col, [[all_points[a][b][0] -
x * BOX_WIDTH, all_points[a][b][1] - y *
BOX_HEIGHT] for b in range(3)])
self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))
if self.getState((x, y)) != self.WIN_STATE and [x, y
] not in self.WALLS:
pygame.draw.polygon(self.screen, (255, 255, 255
), all_points[a], 2)
pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] +
0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(
10, int(BOX_WIDTH / 10)))
pygame.display.update()
def moveDir(self, pos, action):
oldPos = list(pos)
if action == Actions.FORWARD:
pos[1] -= 1
elif action == Actions.RIGHT:
pos[0] += 1
elif action == Actions.LEFT:
pos[0] -= 1
elif action == Actions.BACK:
pos[1] += 1
if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1
] >= GRID_HEIGHT or self.hitWall(pos):
pos = oldPos
return pos
def hitWall(self, pos):
for w in self.WALLS:
if w[0] == pos[0] and w[1] == pos[1]:
return True
return False
def getState(self, pos=False):
if not pos:
pos = self.pos
return int(pos[1] * GRID_WIDTH + pos[0])
def getPos(self, state):
return [state % GRID_WIDTH, state // GRID_WIDTH]
<|reserved_special_token_1|>
import pygame
import numpy as np
import random
from enum import Enum
from .config import *
class Actions(Enum):
FORWARD = 0
RIGHT = 1
LEFT = 2
BACK = 3
class MazeEnv:
""" TODO """
def __init__(self, GW, GH, SW, SH):
global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT
GRID_WIDTH = GW
GRID_HEIGHT = GH
SCREEN_WIDTH = SW
SCREEN_HEIGHT = SH
BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH
BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT
WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)
self.pos = np.array(self.getPos(SPAWN_STATE))
self.action_space = Actions
self.max_states = GRID_WIDTH * GRID_HEIGHT
self.max_actions = len(self.action_space)
self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])
self.tunnel_vision = False
self.WALLS = list(WALLS)
self.WIN_STATE = WIN_STATE
self.SPAWN_STATE = SPAWN_STATE
def step(self, action):
self.pos = self.moveDir(self.pos, self.action_space(action))
reward = -0.04
done = True
if self.getState() == self.WIN_STATE:
reward = 10
else:
done = False
return self.getState(), reward, done, {}
def reset(self):
self.pos = np.array(self.getPos(self.SPAWN_STATE))
def render(self, screen, close=False):
self.screen = screen
self.screen.fill((0, 0, 0))
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
all_points = []
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +
BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *
BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *
BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +
BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *
BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH /
2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])
width = 34
height = 10
text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [
BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4,
BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width /
2, BOX_HEIGHT - height - 4]]
for a in range(4):
s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA
)
s.fill((0, 0, 0, 0))
if self.getState((x, y)) == self.WIN_STATE:
col = 0, 255, 0, 255
elif [x, y] in self.WALLS:
col = 128, 128, 128, 255
elif len(self.Q) <= self.getState((x, y)) or len(self.Q
[self.getState((x, y))]) <= a:
col = 0, 0, 0, 0
elif self.Q[self.getState((x, y))][a] > 0:
col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.max() * 195
elif self.Q[self.getState((x, y))][a] < 0:
col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a
] / self.Q.min() * 195
else:
col = 0, 0, 0, 0
if not self.tunnel_vision or self.getState((x, y)
) == self.getState():
pygame.draw.polygon(s, col, [[all_points[a][b][0] -
x * BOX_WIDTH, all_points[a][b][1] - y *
BOX_HEIGHT] for b in range(3)])
self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))
if self.getState((x, y)) != self.WIN_STATE and [x, y
] not in self.WALLS:
pygame.draw.polygon(self.screen, (255, 255, 255
), all_points[a], 2)
pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] +
0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(
10, int(BOX_WIDTH / 10)))
pygame.display.update()
def moveDir(self, pos, action):
oldPos = list(pos)
if action == Actions.FORWARD:
pos[1] -= 1
elif action == Actions.RIGHT:
pos[0] += 1
elif action == Actions.LEFT:
pos[0] -= 1
elif action == Actions.BACK:
pos[1] += 1
if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1
] >= GRID_HEIGHT or self.hitWall(pos):
pos = oldPos
return pos
def hitWall(self, pos):
for w in self.WALLS:
if w[0] == pos[0] and w[1] == pos[1]:
return True
return False
def getState(self, pos=False):
if not pos:
pos = self.pos
return int(pos[1] * GRID_WIDTH + pos[0])
def getPos(self, state):
return [state % GRID_WIDTH, state // GRID_WIDTH]
<|reserved_special_token_1|>
import pygame
import numpy as np
import random
from enum import Enum
from .config import *
class Actions(Enum):
FORWARD = 0
RIGHT = 1
LEFT = 2
BACK = 3
class MazeEnv():
''' TODO '''
def __init__(self, GW, GH, SW, SH):
global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT
GRID_WIDTH = GW
GRID_HEIGHT = GH
SCREEN_WIDTH = SW
SCREEN_HEIGHT = SH
BOX_WIDTH = SCREEN_WIDTH/GRID_WIDTH
BOX_HEIGHT = SCREEN_HEIGHT/GRID_HEIGHT
WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)
# Setup ML stuff
self.pos = np.array(self.getPos(SPAWN_STATE))
self.action_space = Actions
self.max_states = GRID_WIDTH * GRID_HEIGHT
self.max_actions = len(self.action_space)
self.Q = np.zeros([GRID_WIDTH*GRID_HEIGHT, len(self.action_space)])
self.tunnel_vision = False
# Other
self.WALLS = list(WALLS)
self.WIN_STATE = WIN_STATE
self.SPAWN_STATE = SPAWN_STATE
def step(self, action):
self.pos = self.moveDir(self.pos, self.action_space(action))
reward = -0.04
done = True
if self.getState() == self.WIN_STATE:
reward = 10
else:
done = False
return (self.getState(), reward, done, {})
def reset(self):
self.pos = np.array(self.getPos(self.SPAWN_STATE))
def render(self, screen, close=False):
self.screen = screen
self.screen.fill((0, 0, 0))
# Draw the grid
# font = pygame.font.Font(None, 22)
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
all_points = []
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])
all_points.append([[x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])
all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])
all_points.append([[x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])
width = 34
height = 10
text_offs = [[(BOX_WIDTH/2-width/2), height/2], [BOX_WIDTH-width, BOX_HEIGHT/2-height/2], [4, BOX_HEIGHT/2-height/2], [BOX_WIDTH/2-width/2, BOX_HEIGHT-height-4]]
for a in range(4):
s = pygame.Surface((BOX_WIDTH,BOX_HEIGHT), pygame.SRCALPHA)
s.fill((0, 0, 0, 0))
if self.getState((x, y)) == self.WIN_STATE:
col = (0, 255, 0, 255)
elif [x, y] in self.WALLS:
col = (128, 128, 128, 255)
elif len(self.Q) <= self.getState((x, y)) or len(self.Q[self.getState((x, y))]) <= a:
col = (0, 0, 0, 0)
elif self.Q[self.getState((x, y))][a] > 0:
col = (0, 255, 0, 60 + self.Q[self.getState((x, y))][a] / self.Q.max() * 195)
elif self.Q[self.getState((x, y))][a] < 0:
col = (255, 0, 0, 60 + self.Q[self.getState((x, y))][a] / self.Q.min() * 195)
else:
col = (0, 0, 0, 0)
if not self.tunnel_vision or self.getState((x, y)) == self.getState():
pygame.draw.polygon(s, col, [[all_points[a][b][0]-x*BOX_WIDTH, all_points[a][b][1]-y*BOX_HEIGHT] for b in range(3)])
self.screen.blit(s, (x*BOX_WIDTH, y*BOX_HEIGHT))
if self.getState((x, y)) != self.WIN_STATE and [x, y] not in self.WALLS:
pygame.draw.polygon(self.screen, (255, 255, 255), all_points[a], 2)
#if BOX_WIDTH > 80:
#trender = font.render("{0:.2f}".format(self.Q[self.getState((x, y)), a]), True, (255, 255, 255))
#self.screen.blit(trender, (x*BOX_WIDTH+text_offs[a][0], y*BOX_HEIGHT+text_offs[a][1]))
# Draw the player
pygame.draw.circle(self.screen, (0, 0, 255),
(int((self.pos[0]+0.5)*BOX_WIDTH),
int((self.pos[1]+0.5)*BOX_HEIGHT)),
max(10, int(BOX_WIDTH/10)))
pygame.display.update()
def moveDir(self, pos, action):
oldPos = list(pos)
if action == Actions.FORWARD:
pos[1] -= 1
elif action == Actions.RIGHT:
pos[0] += 1
elif action == Actions.LEFT:
pos[0] -= 1
elif action == Actions.BACK:
pos[1] += 1
if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1] >= GRID_HEIGHT \
or self.hitWall(pos):
pos = oldPos
return pos
def hitWall(self, pos):
for w in self.WALLS:
if w[0] == pos[0] and w[1] == pos[1]:
return True
return False
def getState(self, pos=False):
if not pos:
pos = self.pos
return int(pos[1]*GRID_WIDTH+pos[0])
def getPos(self, state):
return [state % GRID_WIDTH, state // GRID_WIDTH]
|
flexible
|
{
"blob_id": "751d2a07b97d080988c54511ca13a97a969e06bd",
"index": 6405,
"step-1": "<mask token>\n\n\nclass MazeEnv:\n <mask token>\n\n def __init__(self, GW, GH, SW, SH):\n global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT\n GRID_WIDTH = GW\n GRID_HEIGHT = GH\n SCREEN_WIDTH = SW\n SCREEN_HEIGHT = SH\n BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH\n BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT\n WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)\n self.pos = np.array(self.getPos(SPAWN_STATE))\n self.action_space = Actions\n self.max_states = GRID_WIDTH * GRID_HEIGHT\n self.max_actions = len(self.action_space)\n self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])\n self.tunnel_vision = False\n self.WALLS = list(WALLS)\n self.WIN_STATE = WIN_STATE\n self.SPAWN_STATE = SPAWN_STATE\n\n def step(self, action):\n self.pos = self.moveDir(self.pos, self.action_space(action))\n reward = -0.04\n done = True\n if self.getState() == self.WIN_STATE:\n reward = 10\n else:\n done = False\n return self.getState(), reward, done, {}\n\n def reset(self):\n self.pos = np.array(self.getPos(self.SPAWN_STATE))\n\n def render(self, screen, close=False):\n self.screen = screen\n self.screen.fill((0, 0, 0))\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n all_points = []\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *\n BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / \n 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n width = 34\n height = 10\n text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [\n BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4, \n BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width / \n 2, BOX_HEIGHT - height - 4]]\n for a in range(4):\n s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA\n )\n s.fill((0, 0, 0, 0))\n if self.getState((x, y)) == self.WIN_STATE:\n col = 0, 255, 0, 255\n elif [x, y] in self.WALLS:\n col = 128, 128, 128, 255\n elif len(self.Q) <= self.getState((x, y)) or len(self.Q\n [self.getState((x, y))]) <= a:\n col = 0, 0, 0, 0\n elif self.Q[self.getState((x, y))][a] > 0:\n col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.max() * 195\n elif self.Q[self.getState((x, y))][a] < 0:\n col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.min() * 195\n else:\n col = 0, 0, 0, 0\n if not self.tunnel_vision or self.getState((x, y)\n ) == self.getState():\n pygame.draw.polygon(s, col, [[all_points[a][b][0] -\n x * BOX_WIDTH, all_points[a][b][1] - y *\n BOX_HEIGHT] for b in range(3)])\n self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))\n if self.getState((x, y)) != self.WIN_STATE and [x, y\n ] not in self.WALLS:\n pygame.draw.polygon(self.screen, (255, 255, 255\n ), all_points[a], 2)\n pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] + \n 0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(\n 10, int(BOX_WIDTH / 10)))\n pygame.display.update()\n\n def moveDir(self, pos, action):\n oldPos = list(pos)\n if action == Actions.FORWARD:\n pos[1] -= 1\n elif action == Actions.RIGHT:\n pos[0] += 1\n elif action == Actions.LEFT:\n pos[0] -= 1\n elif action == Actions.BACK:\n pos[1] += 1\n if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1\n ] >= GRID_HEIGHT or self.hitWall(pos):\n pos = oldPos\n return pos\n\n def hitWall(self, pos):\n for w in self.WALLS:\n if w[0] == pos[0] and w[1] == pos[1]:\n return True\n return False\n\n def getState(self, pos=False):\n if not pos:\n pos = self.pos\n return int(pos[1] * GRID_WIDTH + pos[0])\n\n def getPos(self, state):\n return [state % GRID_WIDTH, state // GRID_WIDTH]\n",
"step-2": "<mask token>\n\n\nclass MazeEnv:\n \"\"\" TODO \"\"\"\n\n def __init__(self, GW, GH, SW, SH):\n global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT\n GRID_WIDTH = GW\n GRID_HEIGHT = GH\n SCREEN_WIDTH = SW\n SCREEN_HEIGHT = SH\n BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH\n BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT\n WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)\n self.pos = np.array(self.getPos(SPAWN_STATE))\n self.action_space = Actions\n self.max_states = GRID_WIDTH * GRID_HEIGHT\n self.max_actions = len(self.action_space)\n self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])\n self.tunnel_vision = False\n self.WALLS = list(WALLS)\n self.WIN_STATE = WIN_STATE\n self.SPAWN_STATE = SPAWN_STATE\n\n def step(self, action):\n self.pos = self.moveDir(self.pos, self.action_space(action))\n reward = -0.04\n done = True\n if self.getState() == self.WIN_STATE:\n reward = 10\n else:\n done = False\n return self.getState(), reward, done, {}\n\n def reset(self):\n self.pos = np.array(self.getPos(self.SPAWN_STATE))\n\n def render(self, screen, close=False):\n self.screen = screen\n self.screen.fill((0, 0, 0))\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n all_points = []\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *\n BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / \n 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n width = 34\n height = 10\n text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [\n BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4, \n BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width / \n 2, BOX_HEIGHT - height - 4]]\n for a in range(4):\n s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA\n )\n s.fill((0, 0, 0, 0))\n if self.getState((x, y)) == self.WIN_STATE:\n col = 0, 255, 0, 255\n elif [x, y] in self.WALLS:\n col = 128, 128, 128, 255\n elif len(self.Q) <= self.getState((x, y)) or len(self.Q\n [self.getState((x, y))]) <= a:\n col = 0, 0, 0, 0\n elif self.Q[self.getState((x, y))][a] > 0:\n col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.max() * 195\n elif self.Q[self.getState((x, y))][a] < 0:\n col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.min() * 195\n else:\n col = 0, 0, 0, 0\n if not self.tunnel_vision or self.getState((x, y)\n ) == self.getState():\n pygame.draw.polygon(s, col, [[all_points[a][b][0] -\n x * BOX_WIDTH, all_points[a][b][1] - y *\n BOX_HEIGHT] for b in range(3)])\n self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))\n if self.getState((x, y)) != self.WIN_STATE and [x, y\n ] not in self.WALLS:\n pygame.draw.polygon(self.screen, (255, 255, 255\n ), all_points[a], 2)\n pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] + \n 0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(\n 10, int(BOX_WIDTH / 10)))\n pygame.display.update()\n\n def moveDir(self, pos, action):\n oldPos = list(pos)\n if action == Actions.FORWARD:\n pos[1] -= 1\n elif action == Actions.RIGHT:\n pos[0] += 1\n elif action == Actions.LEFT:\n pos[0] -= 1\n elif action == Actions.BACK:\n pos[1] += 1\n if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1\n ] >= GRID_HEIGHT or self.hitWall(pos):\n pos = oldPos\n return pos\n\n def hitWall(self, pos):\n for w in self.WALLS:\n if w[0] == pos[0] and w[1] == pos[1]:\n return True\n return False\n\n def getState(self, pos=False):\n if not pos:\n pos = self.pos\n return int(pos[1] * GRID_WIDTH + pos[0])\n\n def getPos(self, state):\n return [state % GRID_WIDTH, state // GRID_WIDTH]\n",
"step-3": "<mask token>\n\n\nclass Actions(Enum):\n FORWARD = 0\n RIGHT = 1\n LEFT = 2\n BACK = 3\n\n\nclass MazeEnv:\n \"\"\" TODO \"\"\"\n\n def __init__(self, GW, GH, SW, SH):\n global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT\n GRID_WIDTH = GW\n GRID_HEIGHT = GH\n SCREEN_WIDTH = SW\n SCREEN_HEIGHT = SH\n BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH\n BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT\n WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)\n self.pos = np.array(self.getPos(SPAWN_STATE))\n self.action_space = Actions\n self.max_states = GRID_WIDTH * GRID_HEIGHT\n self.max_actions = len(self.action_space)\n self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])\n self.tunnel_vision = False\n self.WALLS = list(WALLS)\n self.WIN_STATE = WIN_STATE\n self.SPAWN_STATE = SPAWN_STATE\n\n def step(self, action):\n self.pos = self.moveDir(self.pos, self.action_space(action))\n reward = -0.04\n done = True\n if self.getState() == self.WIN_STATE:\n reward = 10\n else:\n done = False\n return self.getState(), reward, done, {}\n\n def reset(self):\n self.pos = np.array(self.getPos(self.SPAWN_STATE))\n\n def render(self, screen, close=False):\n self.screen = screen\n self.screen.fill((0, 0, 0))\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n all_points = []\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *\n BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / \n 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n width = 34\n height = 10\n text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [\n BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4, \n BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width / \n 2, BOX_HEIGHT - height - 4]]\n for a in range(4):\n s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA\n )\n s.fill((0, 0, 0, 0))\n if self.getState((x, y)) == self.WIN_STATE:\n col = 0, 255, 0, 255\n elif [x, y] in self.WALLS:\n col = 128, 128, 128, 255\n elif len(self.Q) <= self.getState((x, y)) or len(self.Q\n [self.getState((x, y))]) <= a:\n col = 0, 0, 0, 0\n elif self.Q[self.getState((x, y))][a] > 0:\n col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.max() * 195\n elif self.Q[self.getState((x, y))][a] < 0:\n col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.min() * 195\n else:\n col = 0, 0, 0, 0\n if not self.tunnel_vision or self.getState((x, y)\n ) == self.getState():\n pygame.draw.polygon(s, col, [[all_points[a][b][0] -\n x * BOX_WIDTH, all_points[a][b][1] - y *\n BOX_HEIGHT] for b in range(3)])\n self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))\n if self.getState((x, y)) != self.WIN_STATE and [x, y\n ] not in self.WALLS:\n pygame.draw.polygon(self.screen, (255, 255, 255\n ), all_points[a], 2)\n pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] + \n 0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(\n 10, int(BOX_WIDTH / 10)))\n pygame.display.update()\n\n def moveDir(self, pos, action):\n oldPos = list(pos)\n if action == Actions.FORWARD:\n pos[1] -= 1\n elif action == Actions.RIGHT:\n pos[0] += 1\n elif action == Actions.LEFT:\n pos[0] -= 1\n elif action == Actions.BACK:\n pos[1] += 1\n if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1\n ] >= GRID_HEIGHT or self.hitWall(pos):\n pos = oldPos\n return pos\n\n def hitWall(self, pos):\n for w in self.WALLS:\n if w[0] == pos[0] and w[1] == pos[1]:\n return True\n return False\n\n def getState(self, pos=False):\n if not pos:\n pos = self.pos\n return int(pos[1] * GRID_WIDTH + pos[0])\n\n def getPos(self, state):\n return [state % GRID_WIDTH, state // GRID_WIDTH]\n",
"step-4": "import pygame\nimport numpy as np\nimport random\nfrom enum import Enum\nfrom .config import *\n\n\nclass Actions(Enum):\n FORWARD = 0\n RIGHT = 1\n LEFT = 2\n BACK = 3\n\n\nclass MazeEnv:\n \"\"\" TODO \"\"\"\n\n def __init__(self, GW, GH, SW, SH):\n global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT\n GRID_WIDTH = GW\n GRID_HEIGHT = GH\n SCREEN_WIDTH = SW\n SCREEN_HEIGHT = SH\n BOX_WIDTH = SCREEN_WIDTH / GRID_WIDTH\n BOX_HEIGHT = SCREEN_HEIGHT / GRID_HEIGHT\n WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)\n self.pos = np.array(self.getPos(SPAWN_STATE))\n self.action_space = Actions\n self.max_states = GRID_WIDTH * GRID_HEIGHT\n self.max_actions = len(self.action_space)\n self.Q = np.zeros([GRID_WIDTH * GRID_HEIGHT, len(self.action_space)])\n self.tunnel_vision = False\n self.WALLS = list(WALLS)\n self.WIN_STATE = WIN_STATE\n self.SPAWN_STATE = SPAWN_STATE\n\n def step(self, action):\n self.pos = self.moveDir(self.pos, self.action_space(action))\n reward = -0.04\n done = True\n if self.getState() == self.WIN_STATE:\n reward = 10\n else:\n done = False\n return self.getState(), reward, done, {}\n\n def reset(self):\n self.pos = np.array(self.getPos(self.SPAWN_STATE))\n\n def render(self, screen, close=False):\n self.screen = screen\n self.screen.fill((0, 0, 0))\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n all_points = []\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH, y * BOX_HEIGHT +\n BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / 2, y *\n BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x *\n BOX_WIDTH, y * BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH +\n BOX_WIDTH / 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n all_points.append([[x * BOX_WIDTH + BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH, y *\n BOX_HEIGHT + BOX_HEIGHT], [x * BOX_WIDTH + BOX_WIDTH / \n 2, y * BOX_HEIGHT + BOX_HEIGHT / 2]])\n width = 34\n height = 10\n text_offs = [[BOX_WIDTH / 2 - width / 2, height / 2], [\n BOX_WIDTH - width, BOX_HEIGHT / 2 - height / 2], [4, \n BOX_HEIGHT / 2 - height / 2], [BOX_WIDTH / 2 - width / \n 2, BOX_HEIGHT - height - 4]]\n for a in range(4):\n s = pygame.Surface((BOX_WIDTH, BOX_HEIGHT), pygame.SRCALPHA\n )\n s.fill((0, 0, 0, 0))\n if self.getState((x, y)) == self.WIN_STATE:\n col = 0, 255, 0, 255\n elif [x, y] in self.WALLS:\n col = 128, 128, 128, 255\n elif len(self.Q) <= self.getState((x, y)) or len(self.Q\n [self.getState((x, y))]) <= a:\n col = 0, 0, 0, 0\n elif self.Q[self.getState((x, y))][a] > 0:\n col = 0, 255, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.max() * 195\n elif self.Q[self.getState((x, y))][a] < 0:\n col = 255, 0, 0, 60 + self.Q[self.getState((x, y))][a\n ] / self.Q.min() * 195\n else:\n col = 0, 0, 0, 0\n if not self.tunnel_vision or self.getState((x, y)\n ) == self.getState():\n pygame.draw.polygon(s, col, [[all_points[a][b][0] -\n x * BOX_WIDTH, all_points[a][b][1] - y *\n BOX_HEIGHT] for b in range(3)])\n self.screen.blit(s, (x * BOX_WIDTH, y * BOX_HEIGHT))\n if self.getState((x, y)) != self.WIN_STATE and [x, y\n ] not in self.WALLS:\n pygame.draw.polygon(self.screen, (255, 255, 255\n ), all_points[a], 2)\n pygame.draw.circle(self.screen, (0, 0, 255), (int((self.pos[0] + \n 0.5) * BOX_WIDTH), int((self.pos[1] + 0.5) * BOX_HEIGHT)), max(\n 10, int(BOX_WIDTH / 10)))\n pygame.display.update()\n\n def moveDir(self, pos, action):\n oldPos = list(pos)\n if action == Actions.FORWARD:\n pos[1] -= 1\n elif action == Actions.RIGHT:\n pos[0] += 1\n elif action == Actions.LEFT:\n pos[0] -= 1\n elif action == Actions.BACK:\n pos[1] += 1\n if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1\n ] >= GRID_HEIGHT or self.hitWall(pos):\n pos = oldPos\n return pos\n\n def hitWall(self, pos):\n for w in self.WALLS:\n if w[0] == pos[0] and w[1] == pos[1]:\n return True\n return False\n\n def getState(self, pos=False):\n if not pos:\n pos = self.pos\n return int(pos[1] * GRID_WIDTH + pos[0])\n\n def getPos(self, state):\n return [state % GRID_WIDTH, state // GRID_WIDTH]\n",
"step-5": "import pygame\nimport numpy as np\nimport random\nfrom enum import Enum\nfrom .config import *\n\nclass Actions(Enum):\n FORWARD = 0\n RIGHT = 1\n LEFT = 2\n BACK = 3\n\nclass MazeEnv():\n ''' TODO '''\n def __init__(self, GW, GH, SW, SH):\n global GRID_WIDTH, GRID_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT, BOX_WIDTH, BOX_HEIGHT\n\n GRID_WIDTH = GW\n GRID_HEIGHT = GH\n SCREEN_WIDTH = SW\n SCREEN_HEIGHT = SH\n\n BOX_WIDTH = SCREEN_WIDTH/GRID_WIDTH\n BOX_HEIGHT = SCREEN_HEIGHT/GRID_HEIGHT\n\n WIN_STATE = random.randint(0, GRID_WIDTH * GRID_HEIGHT - 1)\n # Setup ML stuff\n self.pos = np.array(self.getPos(SPAWN_STATE))\n self.action_space = Actions\n self.max_states = GRID_WIDTH * GRID_HEIGHT\n self.max_actions = len(self.action_space)\n\n self.Q = np.zeros([GRID_WIDTH*GRID_HEIGHT, len(self.action_space)])\n\n self.tunnel_vision = False\n\n # Other\n self.WALLS = list(WALLS)\n self.WIN_STATE = WIN_STATE\n self.SPAWN_STATE = SPAWN_STATE\n\n def step(self, action):\n self.pos = self.moveDir(self.pos, self.action_space(action))\n\n reward = -0.04\n done = True\n if self.getState() == self.WIN_STATE:\n reward = 10\n else:\n done = False\n\n return (self.getState(), reward, done, {})\n\n def reset(self):\n self.pos = np.array(self.getPos(self.SPAWN_STATE))\n\n def render(self, screen, close=False):\n self.screen = screen\n self.screen.fill((0, 0, 0))\n\n # Draw the grid\n # font = pygame.font.Font(None, 22)\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n all_points = []\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])\n all_points.append([[x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])\n all_points.append([[x * BOX_WIDTH, y * BOX_HEIGHT], [x * BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])\n all_points.append([[x * BOX_WIDTH+BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH, y * BOX_HEIGHT+BOX_HEIGHT], [x * BOX_WIDTH+BOX_WIDTH/2, y * BOX_HEIGHT+BOX_HEIGHT/2]])\n\n width = 34\n height = 10\n text_offs = [[(BOX_WIDTH/2-width/2), height/2], [BOX_WIDTH-width, BOX_HEIGHT/2-height/2], [4, BOX_HEIGHT/2-height/2], [BOX_WIDTH/2-width/2, BOX_HEIGHT-height-4]]\n\n for a in range(4):\n s = pygame.Surface((BOX_WIDTH,BOX_HEIGHT), pygame.SRCALPHA)\n s.fill((0, 0, 0, 0))\n\n if self.getState((x, y)) == self.WIN_STATE:\n col = (0, 255, 0, 255)\n elif [x, y] in self.WALLS:\n col = (128, 128, 128, 255)\n elif len(self.Q) <= self.getState((x, y)) or len(self.Q[self.getState((x, y))]) <= a:\n col = (0, 0, 0, 0)\n elif self.Q[self.getState((x, y))][a] > 0:\n col = (0, 255, 0, 60 + self.Q[self.getState((x, y))][a] / self.Q.max() * 195)\n elif self.Q[self.getState((x, y))][a] < 0:\n col = (255, 0, 0, 60 + self.Q[self.getState((x, y))][a] / self.Q.min() * 195)\n else:\n col = (0, 0, 0, 0)\n\n if not self.tunnel_vision or self.getState((x, y)) == self.getState():\n pygame.draw.polygon(s, col, [[all_points[a][b][0]-x*BOX_WIDTH, all_points[a][b][1]-y*BOX_HEIGHT] for b in range(3)])\n self.screen.blit(s, (x*BOX_WIDTH, y*BOX_HEIGHT))\n\n if self.getState((x, y)) != self.WIN_STATE and [x, y] not in self.WALLS:\n pygame.draw.polygon(self.screen, (255, 255, 255), all_points[a], 2)\n\n #if BOX_WIDTH > 80:\n #trender = font.render(\"{0:.2f}\".format(self.Q[self.getState((x, y)), a]), True, (255, 255, 255))\n #self.screen.blit(trender, (x*BOX_WIDTH+text_offs[a][0], y*BOX_HEIGHT+text_offs[a][1]))\n\n # Draw the player\n pygame.draw.circle(self.screen, (0, 0, 255),\n (int((self.pos[0]+0.5)*BOX_WIDTH),\n int((self.pos[1]+0.5)*BOX_HEIGHT)),\n max(10, int(BOX_WIDTH/10)))\n\n pygame.display.update()\n\n def moveDir(self, pos, action):\n oldPos = list(pos)\n if action == Actions.FORWARD:\n pos[1] -= 1\n elif action == Actions.RIGHT:\n pos[0] += 1\n elif action == Actions.LEFT:\n pos[0] -= 1\n elif action == Actions.BACK:\n pos[1] += 1\n\n if pos[0] < 0 or pos[0] >= GRID_WIDTH or pos[1] < 0 or pos[1] >= GRID_HEIGHT \\\n or self.hitWall(pos):\n pos = oldPos\n\n return pos\n\n def hitWall(self, pos):\n for w in self.WALLS:\n if w[0] == pos[0] and w[1] == pos[1]:\n return True\n return False\n\n def getState(self, pos=False):\n if not pos:\n pos = self.pos\n\n return int(pos[1]*GRID_WIDTH+pos[0])\n\n def getPos(self, state):\n return [state % GRID_WIDTH, state // GRID_WIDTH]",
"step-ids": [
9,
10,
12,
13,
14
]
}
|
[
9,
10,
12,
13,
14
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestJobConfigHistory(WebAppTest):
def setUp(self):
super(TestJobConfigHistory, self).setUp()
config_path = os.getenv('CONFIG_PATH')
try:
yaml_contents = open('{}/job_config_history.yml'.format(
config_path), 'r').read()
except IOError:
pass
self.job_config_history = yaml.safe_load(yaml_contents)
self.config_page = JobConfigHistorySubPage(self.browser)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestJobConfigHistory(WebAppTest):
def setUp(self):
super(TestJobConfigHistory, self).setUp()
config_path = os.getenv('CONFIG_PATH')
try:
yaml_contents = open('{}/job_config_history.yml'.format(
config_path), 'r').read()
except IOError:
pass
self.job_config_history = yaml.safe_load(yaml_contents)
self.config_page = JobConfigHistorySubPage(self.browser)
def test_job_config_history(self):
"""
Verify the Jenkins Config History plugin has been configured
properly.
"""
self.config_page.visit()
self.config_page.expand_advanced()
assert self.job_config_history['HISTORY_ROOT_DIR'
] == self.config_page.get_history_root_dir()
assert self.job_config_history['MAX_HISTORY_ENTRIES'
] == self.config_page.get_max_history_entries()
assert str(self.job_config_history['SKIP_DUPLICATE_HISTORY']).lower(
) == self.config_page.get_skip_duplicate_history()
assert self.job_config_history['SHOW_BUILD_BADGES'
] == self.config_page.get_show_build_badges()
<|reserved_special_token_1|>
from __future__ import absolute_import
import unittest
import yaml
import os
from bok_choy.web_app_test import WebAppTest
from .pages.job_config_history_subpage import JobConfigHistorySubPage
class TestJobConfigHistory(WebAppTest):
def setUp(self):
super(TestJobConfigHistory, self).setUp()
config_path = os.getenv('CONFIG_PATH')
try:
yaml_contents = open('{}/job_config_history.yml'.format(
config_path), 'r').read()
except IOError:
pass
self.job_config_history = yaml.safe_load(yaml_contents)
self.config_page = JobConfigHistorySubPage(self.browser)
def test_job_config_history(self):
"""
Verify the Jenkins Config History plugin has been configured
properly.
"""
self.config_page.visit()
self.config_page.expand_advanced()
assert self.job_config_history['HISTORY_ROOT_DIR'
] == self.config_page.get_history_root_dir()
assert self.job_config_history['MAX_HISTORY_ENTRIES'
] == self.config_page.get_max_history_entries()
assert str(self.job_config_history['SKIP_DUPLICATE_HISTORY']).lower(
) == self.config_page.get_skip_duplicate_history()
assert self.job_config_history['SHOW_BUILD_BADGES'
] == self.config_page.get_show_build_badges()
<|reserved_special_token_1|>
from __future__ import absolute_import
import unittest
import yaml
import os
from bok_choy.web_app_test import WebAppTest
from .pages.job_config_history_subpage import JobConfigHistorySubPage
class TestJobConfigHistory(WebAppTest):
def setUp(self):
super(TestJobConfigHistory, self).setUp()
config_path = os.getenv('CONFIG_PATH')
try:
yaml_contents = open(
"{}/job_config_history.yml".format(config_path), 'r'
).read()
except IOError:
pass
self.job_config_history = yaml.safe_load(yaml_contents)
self.config_page = JobConfigHistorySubPage(self.browser)
def test_job_config_history(self):
"""
Verify the Jenkins Config History plugin has been configured
properly.
"""
self.config_page.visit()
self.config_page.expand_advanced()
assert self.job_config_history['HISTORY_ROOT_DIR'] == self.config_page.get_history_root_dir()
assert self.job_config_history['MAX_HISTORY_ENTRIES'] == self.config_page.get_max_history_entries()
assert str(self.job_config_history['SKIP_DUPLICATE_HISTORY']).lower() == self.config_page.get_skip_duplicate_history()
assert self.job_config_history['SHOW_BUILD_BADGES'] == self.config_page.get_show_build_badges()
|
flexible
|
{
"blob_id": "51bdbec732bebd73a84b52c6d1d39eead047d29e",
"index": 5349,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestJobConfigHistory(WebAppTest):\n\n def setUp(self):\n super(TestJobConfigHistory, self).setUp()\n config_path = os.getenv('CONFIG_PATH')\n try:\n yaml_contents = open('{}/job_config_history.yml'.format(\n config_path), 'r').read()\n except IOError:\n pass\n self.job_config_history = yaml.safe_load(yaml_contents)\n self.config_page = JobConfigHistorySubPage(self.browser)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TestJobConfigHistory(WebAppTest):\n\n def setUp(self):\n super(TestJobConfigHistory, self).setUp()\n config_path = os.getenv('CONFIG_PATH')\n try:\n yaml_contents = open('{}/job_config_history.yml'.format(\n config_path), 'r').read()\n except IOError:\n pass\n self.job_config_history = yaml.safe_load(yaml_contents)\n self.config_page = JobConfigHistorySubPage(self.browser)\n\n def test_job_config_history(self):\n \"\"\"\n Verify the Jenkins Config History plugin has been configured\n properly.\n \"\"\"\n self.config_page.visit()\n self.config_page.expand_advanced()\n assert self.job_config_history['HISTORY_ROOT_DIR'\n ] == self.config_page.get_history_root_dir()\n assert self.job_config_history['MAX_HISTORY_ENTRIES'\n ] == self.config_page.get_max_history_entries()\n assert str(self.job_config_history['SKIP_DUPLICATE_HISTORY']).lower(\n ) == self.config_page.get_skip_duplicate_history()\n assert self.job_config_history['SHOW_BUILD_BADGES'\n ] == self.config_page.get_show_build_badges()\n",
"step-4": "from __future__ import absolute_import\nimport unittest\nimport yaml\nimport os\nfrom bok_choy.web_app_test import WebAppTest\nfrom .pages.job_config_history_subpage import JobConfigHistorySubPage\n\n\nclass TestJobConfigHistory(WebAppTest):\n\n def setUp(self):\n super(TestJobConfigHistory, self).setUp()\n config_path = os.getenv('CONFIG_PATH')\n try:\n yaml_contents = open('{}/job_config_history.yml'.format(\n config_path), 'r').read()\n except IOError:\n pass\n self.job_config_history = yaml.safe_load(yaml_contents)\n self.config_page = JobConfigHistorySubPage(self.browser)\n\n def test_job_config_history(self):\n \"\"\"\n Verify the Jenkins Config History plugin has been configured\n properly.\n \"\"\"\n self.config_page.visit()\n self.config_page.expand_advanced()\n assert self.job_config_history['HISTORY_ROOT_DIR'\n ] == self.config_page.get_history_root_dir()\n assert self.job_config_history['MAX_HISTORY_ENTRIES'\n ] == self.config_page.get_max_history_entries()\n assert str(self.job_config_history['SKIP_DUPLICATE_HISTORY']).lower(\n ) == self.config_page.get_skip_duplicate_history()\n assert self.job_config_history['SHOW_BUILD_BADGES'\n ] == self.config_page.get_show_build_badges()\n",
"step-5": "from __future__ import absolute_import\nimport unittest\nimport yaml\nimport os\nfrom bok_choy.web_app_test import WebAppTest\nfrom .pages.job_config_history_subpage import JobConfigHistorySubPage\n\nclass TestJobConfigHistory(WebAppTest):\n\n def setUp(self):\n super(TestJobConfigHistory, self).setUp()\n config_path = os.getenv('CONFIG_PATH')\n try:\n yaml_contents = open(\n \"{}/job_config_history.yml\".format(config_path), 'r'\n ).read()\n except IOError:\n pass\n self.job_config_history = yaml.safe_load(yaml_contents)\n self.config_page = JobConfigHistorySubPage(self.browser)\n\n def test_job_config_history(self):\n \"\"\"\n Verify the Jenkins Config History plugin has been configured\n properly.\n \"\"\"\n self.config_page.visit()\n self.config_page.expand_advanced()\n assert self.job_config_history['HISTORY_ROOT_DIR'] == self.config_page.get_history_root_dir()\n assert self.job_config_history['MAX_HISTORY_ENTRIES'] == self.config_page.get_max_history_entries()\n assert str(self.job_config_history['SKIP_DUPLICATE_HISTORY']).lower() == self.config_page.get_skip_duplicate_history()\n assert self.job_config_history['SHOW_BUILD_BADGES'] == self.config_page.get_show_build_badges()\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
def cubarea(l2,b2,h2):
print("Area of cuboid =",2*(l2+b2+h2))
def cubperimeter(l2,b2,h2):
print("Perimeter of cuboid =",4*(l2+b2+h2))
|
normal
|
{
"blob_id": "45a85ff765833fd62fc1670404d8994818788707",
"index": 6873,
"step-1": "<mask token>\n",
"step-2": "def cubarea(l2, b2, h2):\n print('Area of cuboid =', 2 * (l2 + b2 + h2))\n\n\n<mask token>\n",
"step-3": "def cubarea(l2, b2, h2):\n print('Area of cuboid =', 2 * (l2 + b2 + h2))\n\n\ndef cubperimeter(l2, b2, h2):\n print('Perimeter of cuboid =', 4 * (l2 + b2 + h2))\n",
"step-4": "def cubarea(l2,b2,h2):\n print(\"Area of cuboid =\",2*(l2+b2+h2))\ndef cubperimeter(l2,b2,h2):\n print(\"Perimeter of cuboid =\",4*(l2+b2+h2)) \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from werkzeug.security import check_password_hash, generate_password_hash
from datetime import datetime
from app import db
from app import login
from flask_login import UserMixin
@login.user_loader
def load_user(id):
return User.query.get(int(id))
class User(UserMixin, db.Model):
user_id = db.Column(db.Integer, primary_key=True, nullable=False)
username = db.Column(db.String(50), unique=True, nullable=False)
email = db.Column(db.String(100))
pass_hash = db.Column(db.String(128))
posts = db.relationship('Post', backref='user.id', lazy='dynamic')
def __repr__(self):
return "<User {}>".format(self.username)
def set_pass(self, password):
self.pass_hash = generate_password_hash(password)
def check_pass(self, password):
return check_password_hash(self.pass_hash, password)
class Post(db.Model):
post_id = db.Column(db.Integer, primary_key=True, nullable=False)
title = db.Column(db.String(50))
body = db.Column(db.String(200))
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
user_id = db.relationship(db.Integer, db.ForeignKey('User.user_id'))
def __repr__(self):
return "<Post: {} authoured by {}>".format(self.title, self.user_id)
class Following(db.Model):
follower_id = db.Column(db.Integer, primary_key=True, nullable=False)
following_id = db.Column(db.Integer, primary_key=True, nullable=False)
|
normal
|
{
"blob_id": "5cfdb1f6b99f59a83a9bd42b7daf3e016eee94a8",
"index": 2898,
"step-1": "<mask token>\n\n\nclass Post(db.Model):\n post_id = db.Column(db.Integer, primary_key=True, nullable=False)\n title = db.Column(db.String(50))\n body = db.Column(db.String(200))\n timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)\n user_id = db.relationship(db.Integer, db.ForeignKey('User.user_id'))\n\n def __repr__(self):\n return '<Post: {} authoured by {}>'.format(self.title, self.user_id)\n\n\nclass Following(db.Model):\n follower_id = db.Column(db.Integer, primary_key=True, nullable=False)\n following_id = db.Column(db.Integer, primary_key=True, nullable=False)\n",
"step-2": "<mask token>\n\n\nclass User(UserMixin, db.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __repr__(self):\n return '<User {}>'.format(self.username)\n <mask token>\n\n def check_pass(self, password):\n return check_password_hash(self.pass_hash, password)\n\n\nclass Post(db.Model):\n post_id = db.Column(db.Integer, primary_key=True, nullable=False)\n title = db.Column(db.String(50))\n body = db.Column(db.String(200))\n timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)\n user_id = db.relationship(db.Integer, db.ForeignKey('User.user_id'))\n\n def __repr__(self):\n return '<Post: {} authoured by {}>'.format(self.title, self.user_id)\n\n\nclass Following(db.Model):\n follower_id = db.Column(db.Integer, primary_key=True, nullable=False)\n following_id = db.Column(db.Integer, primary_key=True, nullable=False)\n",
"step-3": "<mask token>\n\n\nclass User(UserMixin, db.Model):\n user_id = db.Column(db.Integer, primary_key=True, nullable=False)\n username = db.Column(db.String(50), unique=True, nullable=False)\n email = db.Column(db.String(100))\n pass_hash = db.Column(db.String(128))\n posts = db.relationship('Post', backref='user.id', lazy='dynamic')\n\n def __repr__(self):\n return '<User {}>'.format(self.username)\n\n def set_pass(self, password):\n self.pass_hash = generate_password_hash(password)\n\n def check_pass(self, password):\n return check_password_hash(self.pass_hash, password)\n\n\nclass Post(db.Model):\n post_id = db.Column(db.Integer, primary_key=True, nullable=False)\n title = db.Column(db.String(50))\n body = db.Column(db.String(200))\n timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)\n user_id = db.relationship(db.Integer, db.ForeignKey('User.user_id'))\n\n def __repr__(self):\n return '<Post: {} authoured by {}>'.format(self.title, self.user_id)\n\n\nclass Following(db.Model):\n follower_id = db.Column(db.Integer, primary_key=True, nullable=False)\n following_id = db.Column(db.Integer, primary_key=True, nullable=False)\n",
"step-4": "<mask token>\n\n\[email protected]_loader\ndef load_user(id):\n return User.query.get(int(id))\n\n\nclass User(UserMixin, db.Model):\n user_id = db.Column(db.Integer, primary_key=True, nullable=False)\n username = db.Column(db.String(50), unique=True, nullable=False)\n email = db.Column(db.String(100))\n pass_hash = db.Column(db.String(128))\n posts = db.relationship('Post', backref='user.id', lazy='dynamic')\n\n def __repr__(self):\n return '<User {}>'.format(self.username)\n\n def set_pass(self, password):\n self.pass_hash = generate_password_hash(password)\n\n def check_pass(self, password):\n return check_password_hash(self.pass_hash, password)\n\n\nclass Post(db.Model):\n post_id = db.Column(db.Integer, primary_key=True, nullable=False)\n title = db.Column(db.String(50))\n body = db.Column(db.String(200))\n timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)\n user_id = db.relationship(db.Integer, db.ForeignKey('User.user_id'))\n\n def __repr__(self):\n return '<Post: {} authoured by {}>'.format(self.title, self.user_id)\n\n\nclass Following(db.Model):\n follower_id = db.Column(db.Integer, primary_key=True, nullable=False)\n following_id = db.Column(db.Integer, primary_key=True, nullable=False)\n",
"step-5": "from werkzeug.security import check_password_hash, generate_password_hash\nfrom datetime import datetime\nfrom app import db\nfrom app import login\nfrom flask_login import UserMixin\n\n\[email protected]_loader\ndef load_user(id):\n\treturn User.query.get(int(id))\n\n\nclass User(UserMixin, db.Model):\n\tuser_id = db.Column(db.Integer, primary_key=True, nullable=False)\n\tusername = db.Column(db.String(50), unique=True, nullable=False)\n\temail = db.Column(db.String(100))\n\tpass_hash = db.Column(db.String(128))\n\tposts = db.relationship('Post', backref='user.id', lazy='dynamic')\n\n\tdef __repr__(self):\n\t\treturn \"<User {}>\".format(self.username)\n\n\tdef set_pass(self, password):\n\t\tself.pass_hash = generate_password_hash(password)\n\n\tdef check_pass(self, password):\n\t\treturn check_password_hash(self.pass_hash, password)\n\n\nclass Post(db.Model):\n\tpost_id = db.Column(db.Integer, primary_key=True, nullable=False)\n\ttitle = db.Column(db.String(50))\n\tbody = db.Column(db.String(200))\n\ttimestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)\n\tuser_id = db.relationship(db.Integer, db.ForeignKey('User.user_id'))\n\n\tdef __repr__(self):\n\t\treturn \"<Post: {} authoured by {}>\".format(self.title, self.user_id)\n\n\nclass Following(db.Model):\n\tfollower_id = db.Column(db.Integer, primary_key=True, nullable=False)\n\tfollowing_id = db.Column(db.Integer, primary_key=True, nullable=False)\n",
"step-ids": [
5,
8,
10,
11,
13
]
}
|
[
5,
8,
10,
11,
13
] |
from src.produtos import *
class Estoque(object):
def __init__(self):
self.categorias = []
self.subcategorias = []
self.produtos = []
self.menu_estoque()
def save_categoria(self, categoria):
pass
def save_subcategorias(self, subcategoria):
pass
def save_produtos(self, produto):
pass
def create_categoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição
"""
print("- Criar CATEGORIA -")
codigo = input("CÓDIGO: ").strip()
nome = input("NOME: ").strip()
descrição = input("DESCRIÇÃO: ").strip()
categoria = Categoria(codigo, nome, descrição)
if categoria not in self.categorias:
self.categorias.append(categoria)
def create_subcategoria(self):
""""
Cria uma categoria através dos dados recolhidos pelo formulário.
Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria
"""
if len(self.categorias) == 0:
print("Você deve criar pelo menos uma CATEGORIA!\n")
self.create_categoria()
print("- Criar SUBCATEGORIA -")
codigo = input("CÓDIGO: ").strip()
nome = input("NOME: ").strip()
descrição = input("DESCRIÇÃO: ").strip()
escolhe = input("CATEGORIA (Nome ou Código): ")
categoria = 0
for cat in self.categorias:
if cat.nome == escolhe or cat.codigo == escolhe:
categoria = cat
break
else:
print("Categoria não Encontrada!\nVocê deve criar uma CATEGORIA!")
self.create_categoria()
subcategoria = Subcategoria(categoria, codigo, nome, descrição)
if subcategoria not in self.subcategorias:
self.subcategorias.append(subcategoria)
def create_produto(self):
""""
Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.
Produtos são itens que podem ser vendidos.
Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto
TODELETE: Por enquanto foto recebe uma string qualquer
"""
# TODO: Implementar a foto no sistemas
if not len(self.subcategorias):
print("Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\n")
self.create_subcategoria()
else:
print("- Cadastrar PRODUTO -")
escolhe = input("SUBCATEGORIA (Nome ou Código): ").lower()
codigo = input("CÓDIGO: ").strip()
nome = input("NOME: ").strip()
descrição = input("DESCRIÇÃO: ").strip()
estoquemax = input("Quantidade Maxima em Estoque: ")
while not produtos.valida_estoque(estoquemax):
print("Valor Inválido!")
estoquemax = input("Valor deve ser Numérico: ")
estoquemin = input("Quantidade Minima em Estoque: ")
while not produtos.valida_estoque(estoquemin):
print("Valor Inválido!")
estoquemin = input("Valor deve ser Numérico: ")
valorvenda = input("Preço Unitário: ")
while not produtos.valida_valorvenda(valorvenda):
print("Valor Inválido!")
estoquemax = input("Valor deve ser Numérico: ")
valorcompra = input("Valor de Compra: ")
while not produtos.valida_valorvenda(valorcompra):
print("Valor Inválido!")
estoquemax = input("Valor deve ser Numérico: ")
foto = input("Arquivo de foto: ")
subcategoria = 0
for scat in self.subcategorias:
if scat.nome.lower() == escolhe or scat.codigo == escolhe:
subcategoria = scat
break
else:
print("Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não")
choice = input()
if choice.lower() == 's' or choice == '1':
self.create_subcategoria()
else:
self.create_produto()
produto = Produtos( subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto)
if produto not in self.produtos:
self.produtos.append(produto)
# funcionalidade pedida na especificação
def low_stock_alarm(self): # aviso de estoque baixo
pass
def consulta_estoque(self): # exibe itens disponiveis no estoque
print("Exibindo estoque")
if not len(self.categorias):
print("Não há Categorias Registrados!")
else:
for categoria in self.categorias:
print(categoria, end=" ")
print()
if not len(self.subcategorias):
print("Não há Subcategorias Registradas!")
else:
for subcategoria in self.subcategorias:
print(subcategoria, end=" ")
print()
if not len(self.produtos):
print("Não há Produtos Registrados!")
else:
for produto in self.produtos:
print(produto, end=" ")
self.menu_estoque()
def altera_item(self): # altera um item disponivel no estoque
print("alterando item do estoque")
self.menu_estoque()
def remove_item(self): # remove um item disponivel no estoque - n remover se o item ainda tem produtos no estoque
print("Removendo item do estoque")
self.menu_estoque()
def adiciona_item(self): # adiciona novo item ao estoque
print("Adicionando item ao estoque")
while 1:
print("************* Menu Adicionar: ******************")
print("Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair")
opcao = input()
while not self.valida_opcao(opcao):
print("Opção Inválida!")
opcao = input()
if opcao == '1':
self.create_categoria()
elif opcao == '2':
self.create_subcategoria()
elif opcao == '3':
pass
elif opcao == '4':
break
self.menu_estoque()
def menu_estoque(self):
print("Sistema de Vendas ao Consumidor")
print("****** MENU DE ESTOQUE *****")
print("Digite Ação!\n1 - Consultar Estoque\n2 - Adicionar\n3 - Remover\n4 - Alterar")
opcao = input()
while not self.valida_opcao(opcao):
print("Opção Inválida!")
opcao = input()
if opcao == '1':
self.consulta_estoque()
elif opcao == '2':
self.adiciona_item()
elif opcao == '3':
self.remove_item()
elif opcao == '4':
self.altera_item()
def valida_opcao(self, opcao):
if opcao.isdigit():
return True
else:
return False
estoque = Estoque()
|
normal
|
{
"blob_id": "9f3ca0d5a10a27d926a0f306665889418f8d6a0c",
"index": 5884,
"step-1": "<mask token>\n\n\nclass Estoque(object):\n <mask token>\n\n def save_categoria(self, categoria):\n pass\n <mask token>\n\n def save_produtos(self, produto):\n pass\n <mask token>\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n <mask token>\n\n def low_stock_alarm(self):\n pass\n <mask token>\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n <mask token>\n <mask token>\n <mask token>\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Estoque(object):\n <mask token>\n\n def save_categoria(self, categoria):\n pass\n <mask token>\n\n def save_produtos(self, produto):\n pass\n\n def create_categoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição\n \"\"\"\n print('- Criar CATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n categoria = Categoria(codigo, nome, descrição)\n if categoria not in self.categorias:\n self.categorias.append(categoria)\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n\n def create_produto(self):\n \"\"\"\"\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\n Produtos são itens que podem ser vendidos.\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\n\n TODELETE: Por enquanto foto recebe uma string qualquer\n\n \"\"\"\n if not len(self.subcategorias):\n print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n')\n self.create_subcategoria()\n else:\n print('- Cadastrar PRODUTO -')\n escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n estoquemax = input('Quantidade Maxima em Estoque: ')\n while not produtos.valida_estoque(estoquemax):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n estoquemin = input('Quantidade Minima em Estoque: ')\n while not produtos.valida_estoque(estoquemin):\n print('Valor Inválido!')\n estoquemin = input('Valor deve ser Numérico: ')\n valorvenda = input('Preço Unitário: ')\n while not produtos.valida_valorvenda(valorvenda):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n valorcompra = input('Valor de Compra: ')\n while not produtos.valida_valorvenda(valorcompra):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n foto = input('Arquivo de foto: ')\n subcategoria = 0\n for scat in self.subcategorias:\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\n subcategoria = scat\n break\n else:\n print(\n \"\"\"Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não\"\"\"\n )\n choice = input()\n if choice.lower() == 's' or choice == '1':\n self.create_subcategoria()\n else:\n self.create_produto()\n produto = Produtos(subcategoria, codigo, nome, descricao,\n estoquemax, estoquemin, valorvenda, valorcompra, foto)\n if produto not in self.produtos:\n self.produtos.append(produto)\n\n def low_stock_alarm(self):\n pass\n <mask token>\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n <mask token>\n\n def adiciona_item(self):\n print('Adicionando item ao estoque')\n while 1:\n print('************* Menu Adicionar: ******************')\n print(\n \"\"\"Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair\"\"\"\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.create_categoria()\n elif opcao == '2':\n self.create_subcategoria()\n elif opcao == '3':\n pass\n elif opcao == '4':\n break\n self.menu_estoque()\n\n def menu_estoque(self):\n print('Sistema de Vendas ao Consumidor')\n print('****** MENU DE ESTOQUE *****')\n print(\n 'Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar'\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.consulta_estoque()\n elif opcao == '2':\n self.adiciona_item()\n elif opcao == '3':\n self.remove_item()\n elif opcao == '4':\n self.altera_item()\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Estoque(object):\n <mask token>\n\n def save_categoria(self, categoria):\n pass\n <mask token>\n\n def save_produtos(self, produto):\n pass\n\n def create_categoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição\n \"\"\"\n print('- Criar CATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n categoria = Categoria(codigo, nome, descrição)\n if categoria not in self.categorias:\n self.categorias.append(categoria)\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n\n def create_produto(self):\n \"\"\"\"\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\n Produtos são itens que podem ser vendidos.\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\n\n TODELETE: Por enquanto foto recebe uma string qualquer\n\n \"\"\"\n if not len(self.subcategorias):\n print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n')\n self.create_subcategoria()\n else:\n print('- Cadastrar PRODUTO -')\n escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n estoquemax = input('Quantidade Maxima em Estoque: ')\n while not produtos.valida_estoque(estoquemax):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n estoquemin = input('Quantidade Minima em Estoque: ')\n while not produtos.valida_estoque(estoquemin):\n print('Valor Inválido!')\n estoquemin = input('Valor deve ser Numérico: ')\n valorvenda = input('Preço Unitário: ')\n while not produtos.valida_valorvenda(valorvenda):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n valorcompra = input('Valor de Compra: ')\n while not produtos.valida_valorvenda(valorcompra):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n foto = input('Arquivo de foto: ')\n subcategoria = 0\n for scat in self.subcategorias:\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\n subcategoria = scat\n break\n else:\n print(\n \"\"\"Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não\"\"\"\n )\n choice = input()\n if choice.lower() == 's' or choice == '1':\n self.create_subcategoria()\n else:\n self.create_produto()\n produto = Produtos(subcategoria, codigo, nome, descricao,\n estoquemax, estoquemin, valorvenda, valorcompra, foto)\n if produto not in self.produtos:\n self.produtos.append(produto)\n\n def low_stock_alarm(self):\n pass\n\n def consulta_estoque(self):\n print('Exibindo estoque')\n if not len(self.categorias):\n print('Não há Categorias Registrados!')\n else:\n for categoria in self.categorias:\n print(categoria, end=' ')\n print()\n if not len(self.subcategorias):\n print('Não há Subcategorias Registradas!')\n else:\n for subcategoria in self.subcategorias:\n print(subcategoria, end=' ')\n print()\n if not len(self.produtos):\n print('Não há Produtos Registrados!')\n else:\n for produto in self.produtos:\n print(produto, end=' ')\n self.menu_estoque()\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n <mask token>\n\n def adiciona_item(self):\n print('Adicionando item ao estoque')\n while 1:\n print('************* Menu Adicionar: ******************')\n print(\n \"\"\"Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair\"\"\"\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.create_categoria()\n elif opcao == '2':\n self.create_subcategoria()\n elif opcao == '3':\n pass\n elif opcao == '4':\n break\n self.menu_estoque()\n\n def menu_estoque(self):\n print('Sistema de Vendas ao Consumidor')\n print('****** MENU DE ESTOQUE *****')\n print(\n 'Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar'\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.consulta_estoque()\n elif opcao == '2':\n self.adiciona_item()\n elif opcao == '3':\n self.remove_item()\n elif opcao == '4':\n self.altera_item()\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\n<mask token>\n",
"step-4": "from src.produtos import *\n\n\nclass Estoque(object):\n\n def __init__(self):\n self.categorias = []\n self.subcategorias = []\n self.produtos = []\n self.menu_estoque()\n\n def save_categoria(self, categoria):\n pass\n\n def save_subcategorias(self, subcategoria):\n pass\n\n def save_produtos(self, produto):\n pass\n\n def create_categoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição\n \"\"\"\n print('- Criar CATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n categoria = Categoria(codigo, nome, descrição)\n if categoria not in self.categorias:\n self.categorias.append(categoria)\n\n def create_subcategoria(self):\n \"\"\"\"\n Cria uma categoria através dos dados recolhidos pelo formulário.\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\n \"\"\"\n if len(self.categorias) == 0:\n print('Você deve criar pelo menos uma CATEGORIA!\\n')\n self.create_categoria()\n print('- Criar SUBCATEGORIA -')\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n escolhe = input('CATEGORIA (Nome ou Código): ')\n categoria = 0\n for cat in self.categorias:\n if cat.nome == escolhe or cat.codigo == escolhe:\n categoria = cat\n break\n else:\n print(\n 'Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!'\n )\n self.create_categoria()\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\n if subcategoria not in self.subcategorias:\n self.subcategorias.append(subcategoria)\n\n def create_produto(self):\n \"\"\"\"\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\n Produtos são itens que podem ser vendidos.\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\n\n TODELETE: Por enquanto foto recebe uma string qualquer\n\n \"\"\"\n if not len(self.subcategorias):\n print('Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n')\n self.create_subcategoria()\n else:\n print('- Cadastrar PRODUTO -')\n escolhe = input('SUBCATEGORIA (Nome ou Código): ').lower()\n codigo = input('CÓDIGO: ').strip()\n nome = input('NOME: ').strip()\n descrição = input('DESCRIÇÃO: ').strip()\n estoquemax = input('Quantidade Maxima em Estoque: ')\n while not produtos.valida_estoque(estoquemax):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n estoquemin = input('Quantidade Minima em Estoque: ')\n while not produtos.valida_estoque(estoquemin):\n print('Valor Inválido!')\n estoquemin = input('Valor deve ser Numérico: ')\n valorvenda = input('Preço Unitário: ')\n while not produtos.valida_valorvenda(valorvenda):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n valorcompra = input('Valor de Compra: ')\n while not produtos.valida_valorvenda(valorcompra):\n print('Valor Inválido!')\n estoquemax = input('Valor deve ser Numérico: ')\n foto = input('Arquivo de foto: ')\n subcategoria = 0\n for scat in self.subcategorias:\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\n subcategoria = scat\n break\n else:\n print(\n \"\"\"Subcategoria não Encontrada!\nDeseja criar uma SUBCATEGORIA?\n1- Sim\n2 - Não\"\"\"\n )\n choice = input()\n if choice.lower() == 's' or choice == '1':\n self.create_subcategoria()\n else:\n self.create_produto()\n produto = Produtos(subcategoria, codigo, nome, descricao,\n estoquemax, estoquemin, valorvenda, valorcompra, foto)\n if produto not in self.produtos:\n self.produtos.append(produto)\n\n def low_stock_alarm(self):\n pass\n\n def consulta_estoque(self):\n print('Exibindo estoque')\n if not len(self.categorias):\n print('Não há Categorias Registrados!')\n else:\n for categoria in self.categorias:\n print(categoria, end=' ')\n print()\n if not len(self.subcategorias):\n print('Não há Subcategorias Registradas!')\n else:\n for subcategoria in self.subcategorias:\n print(subcategoria, end=' ')\n print()\n if not len(self.produtos):\n print('Não há Produtos Registrados!')\n else:\n for produto in self.produtos:\n print(produto, end=' ')\n self.menu_estoque()\n\n def altera_item(self):\n print('alterando item do estoque')\n self.menu_estoque()\n\n def remove_item(self):\n print('Removendo item do estoque')\n self.menu_estoque()\n\n def adiciona_item(self):\n print('Adicionando item ao estoque')\n while 1:\n print('************* Menu Adicionar: ******************')\n print(\n \"\"\"Digite Ação!\n1 - Adicionar Categoria\n2 - Adicionar Subcategoria\n3 - Adicionar Produtos\n4 - Sair\"\"\"\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.create_categoria()\n elif opcao == '2':\n self.create_subcategoria()\n elif opcao == '3':\n pass\n elif opcao == '4':\n break\n self.menu_estoque()\n\n def menu_estoque(self):\n print('Sistema de Vendas ao Consumidor')\n print('****** MENU DE ESTOQUE *****')\n print(\n 'Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar'\n )\n opcao = input()\n while not self.valida_opcao(opcao):\n print('Opção Inválida!')\n opcao = input()\n if opcao == '1':\n self.consulta_estoque()\n elif opcao == '2':\n self.adiciona_item()\n elif opcao == '3':\n self.remove_item()\n elif opcao == '4':\n self.altera_item()\n\n def valida_opcao(self, opcao):\n if opcao.isdigit():\n return True\n else:\n return False\n\n\nestoque = Estoque()\n",
"step-5": "from src.produtos import *\r\n\r\n\r\nclass Estoque(object):\r\n def __init__(self):\r\n self.categorias = []\r\n self.subcategorias = []\r\n self.produtos = []\r\n self.menu_estoque()\r\n\r\n def save_categoria(self, categoria):\r\n pass\r\n\r\n def save_subcategorias(self, subcategoria):\r\n pass\r\n\r\n def save_produtos(self, produto):\r\n pass\r\n\r\n def create_categoria(self):\r\n \"\"\"\"\r\n Cria uma categoria através dos dados recolhidos pelo formulário.\r\n Os dados são: Codigo, nome e descrição\r\n \"\"\"\r\n print(\"- Criar CATEGORIA -\")\r\n codigo = input(\"CÓDIGO: \").strip()\r\n nome = input(\"NOME: \").strip()\r\n descrição = input(\"DESCRIÇÃO: \").strip()\r\n categoria = Categoria(codigo, nome, descrição)\r\n if categoria not in self.categorias:\r\n self.categorias.append(categoria)\r\n\r\n def create_subcategoria(self):\r\n \"\"\"\"\r\n Cria uma categoria através dos dados recolhidos pelo formulário.\r\n Os dados são: Codigo, nome e descrição e a passagem de um objeto categoria\r\n \"\"\"\r\n if len(self.categorias) == 0:\r\n print(\"Você deve criar pelo menos uma CATEGORIA!\\n\")\r\n self.create_categoria()\r\n print(\"- Criar SUBCATEGORIA -\")\r\n codigo = input(\"CÓDIGO: \").strip()\r\n nome = input(\"NOME: \").strip()\r\n descrição = input(\"DESCRIÇÃO: \").strip()\r\n escolhe = input(\"CATEGORIA (Nome ou Código): \")\r\n categoria = 0\r\n\r\n for cat in self.categorias:\r\n if cat.nome == escolhe or cat.codigo == escolhe:\r\n categoria = cat\r\n break\r\n else:\r\n print(\"Categoria não Encontrada!\\nVocê deve criar uma CATEGORIA!\")\r\n self.create_categoria()\r\n\r\n subcategoria = Subcategoria(categoria, codigo, nome, descrição)\r\n\r\n if subcategoria not in self.subcategorias:\r\n self.subcategorias.append(subcategoria)\r\n\r\n def create_produto(self):\r\n \"\"\"\"\r\n Cria produto a ser controlado pelo estoque. Um produto deve pertencer a uma subcategoria.\r\n Produtos são itens que podem ser vendidos.\r\n Possuem subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto\r\n\r\n TODELETE: Por enquanto foto recebe uma string qualquer\r\n\r\n \"\"\"\r\n # TODO: Implementar a foto no sistemas\r\n if not len(self.subcategorias):\r\n print(\"Produto deve ter CATEGORIA ou uma SUBCATEGORIA!\\n\")\r\n self.create_subcategoria()\r\n else:\r\n print(\"- Cadastrar PRODUTO -\")\r\n escolhe = input(\"SUBCATEGORIA (Nome ou Código): \").lower()\r\n codigo = input(\"CÓDIGO: \").strip()\r\n nome = input(\"NOME: \").strip()\r\n descrição = input(\"DESCRIÇÃO: \").strip()\r\n\r\n estoquemax = input(\"Quantidade Maxima em Estoque: \")\r\n while not produtos.valida_estoque(estoquemax):\r\n print(\"Valor Inválido!\")\r\n estoquemax = input(\"Valor deve ser Numérico: \")\r\n\r\n estoquemin = input(\"Quantidade Minima em Estoque: \")\r\n while not produtos.valida_estoque(estoquemin):\r\n print(\"Valor Inválido!\")\r\n estoquemin = input(\"Valor deve ser Numérico: \")\r\n\r\n valorvenda = input(\"Preço Unitário: \")\r\n while not produtos.valida_valorvenda(valorvenda):\r\n print(\"Valor Inválido!\")\r\n estoquemax = input(\"Valor deve ser Numérico: \")\r\n\r\n valorcompra = input(\"Valor de Compra: \")\r\n while not produtos.valida_valorvenda(valorcompra):\r\n print(\"Valor Inválido!\")\r\n estoquemax = input(\"Valor deve ser Numérico: \")\r\n\r\n foto = input(\"Arquivo de foto: \")\r\n\r\n subcategoria = 0\r\n\r\n for scat in self.subcategorias:\r\n if scat.nome.lower() == escolhe or scat.codigo == escolhe:\r\n subcategoria = scat\r\n break\r\n else:\r\n print(\"Subcategoria não Encontrada!\\nDeseja criar uma SUBCATEGORIA?\\n1- Sim\\n2 - Não\")\r\n choice = input()\r\n if choice.lower() == 's' or choice == '1':\r\n self.create_subcategoria()\r\n else:\r\n self.create_produto()\r\n\r\n produto = Produtos( subcategoria, codigo, nome, descricao, estoquemax, estoquemin, valorvenda, valorcompra, foto)\r\n\r\n if produto not in self.produtos:\r\n self.produtos.append(produto)\r\n\r\n # funcionalidade pedida na especificação\r\n\r\n def low_stock_alarm(self): # aviso de estoque baixo\r\n pass\r\n\r\n def consulta_estoque(self): # exibe itens disponiveis no estoque\r\n print(\"Exibindo estoque\")\r\n if not len(self.categorias):\r\n print(\"Não há Categorias Registrados!\")\r\n else:\r\n for categoria in self.categorias:\r\n print(categoria, end=\" \")\r\n print()\r\n if not len(self.subcategorias):\r\n print(\"Não há Subcategorias Registradas!\")\r\n else:\r\n for subcategoria in self.subcategorias:\r\n print(subcategoria, end=\" \")\r\n print()\r\n if not len(self.produtos):\r\n print(\"Não há Produtos Registrados!\")\r\n else:\r\n for produto in self.produtos:\r\n print(produto, end=\" \")\r\n\r\n self.menu_estoque()\r\n\r\n def altera_item(self): # altera um item disponivel no estoque\r\n print(\"alterando item do estoque\")\r\n self.menu_estoque()\r\n\r\n def remove_item(self): # remove um item disponivel no estoque - n remover se o item ainda tem produtos no estoque\r\n print(\"Removendo item do estoque\")\r\n self.menu_estoque()\r\n\r\n def adiciona_item(self): # adiciona novo item ao estoque\r\n print(\"Adicionando item ao estoque\")\r\n while 1:\r\n print(\"************* Menu Adicionar: ******************\")\r\n print(\"Digite Ação!\\n1 - Adicionar Categoria\\n2 - Adicionar Subcategoria\\n3 - Adicionar Produtos\\n4 - Sair\")\r\n opcao = input()\r\n while not self.valida_opcao(opcao):\r\n print(\"Opção Inválida!\")\r\n opcao = input()\r\n if opcao == '1':\r\n self.create_categoria()\r\n elif opcao == '2':\r\n self.create_subcategoria()\r\n elif opcao == '3':\r\n pass\r\n elif opcao == '4':\r\n break\r\n self.menu_estoque()\r\n\r\n def menu_estoque(self):\r\n print(\"Sistema de Vendas ao Consumidor\")\r\n print(\"****** MENU DE ESTOQUE *****\")\r\n print(\"Digite Ação!\\n1 - Consultar Estoque\\n2 - Adicionar\\n3 - Remover\\n4 - Alterar\")\r\n opcao = input()\r\n\r\n while not self.valida_opcao(opcao):\r\n print(\"Opção Inválida!\")\r\n opcao = input()\r\n\r\n if opcao == '1':\r\n self.consulta_estoque()\r\n elif opcao == '2':\r\n self.adiciona_item()\r\n elif opcao == '3':\r\n self.remove_item()\r\n elif opcao == '4':\r\n self.altera_item()\r\n\r\n def valida_opcao(self, opcao):\r\n if opcao.isdigit():\r\n return True\r\n else:\r\n return False\r\n\r\nestoque = Estoque()\r\n",
"step-ids": [
7,
11,
12,
17,
18
]
}
|
[
7,
11,
12,
17,
18
] |
import math
import random
import time
import numpy as np
class NeuralNetwork:
digits = [
[
1,1,1,1,1,
1,0,0,0,1,
1,0,0,0,1,
1,0,0,0,1,
1,1,1,1,1
],
[
0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0
],
[
1,1,1,1,1,
0,0,0,0,1,
1,1,1,1,1,
1,0,0,0,0,
1,1,1,1,1
],
[
1,1,1,1,1,
0,0,0,0,1,
1,1,1,1,1,
0,0,0,0,1,
1,1,1,1,1
],
[
1,0,0,0,1,
1,0,0,0,1,
1,1,1,1,1,
0,0,0,0,1,
0,0,0,0,1
],
[
1,1,1,1,1,
1,0,0,0,0,
1,1,1,1,1,
0,0,0,0,1,
1,1,1,1,1
],
[
1,1,1,1,1,
1,0,0,0,0,
1,1,1,1,1,
1,0,0,0,1,
1,1,1,1,1
],
[
1,1,1,1,1,
0,0,0,1,0,
0,0,1,0,0,
0,1,0,0,0,
1,0,0,0,0
],
[
1,1,1,1,1,
1,0,0,0,1,
1,1,1,1,1,
1,0,0,0,1,
1,1,1,1,1
],
[
1,1,1,1,1,
1,0,0,0,1,
1,1,1,1,1,
0,0,0,0,1,
0,0,0,0,1
]
]
base_output = [
[1,0,0,0,0,0,0,0,0,0],
[0,1,0,0,0,0,0,0,0,0],
[0,0,1,0,0,0,0,0,0,0],
[0,0,0,1,0,0,0,0,0,0],
[0,0,0,0,1,0,0,0,0,0],
[0,0,0,0,0,1,0,0,0,0],
[0,0,0,0,0,0,1,0,0,0],
[0,0,0,0,0,0,0,1,0,0],
[0,0,0,0,0,0,0,0,1,0],
[0,0,0,0,0,0,0,0,0,1]
]
show_operations = False
def __init__(self, seed = 5, alpha = 0.1, min_error_percentage = 0.0005, input_size = 25, output_size = 10, hidden_num = 5):
self.seed = seed
self.alpha = alpha
self.min_error_percentage = min_error_percentage
self.input_size = input_size
self.output_size = output_size
self.hidden_num = hidden_num
def withSeed(self, seed):
self.seed = seed
return self
def withAlpha(self, alpha):
self.alpha = alpha
return self
def withMinErrorPercentage(self, min_error_percentage):
self.min_error_percentage = min_error_percentage
return self
def verbose(self, show_operations):
self.show_operations = show_operations
return self
def withHiddenLabels(self, hidden_num):
self.hidden_num = hidden_num
return self
def randomize(self):
random.seed(self.seed)
neural_network = [
[
[random.randint(-1, 0) for _ in range(self.input_size + 1)] for _ in range(self.hidden_num)
],
[
[random.randint(-1, 0) for _ in range(self.hidden_num + 1)] for _ in range(self.output_size)
]
]
return neural_network
def sigmoid(self, x):
return 1 / (1 + math.exp(-x))
def product(self, v, w):
return sum([a * b for a, b in zip(v, w)])
def neuron_output(self, weights, inputs):
return self.sigmoid(self.product(weights, inputs))
def ffnn(self, neural_network, inputs):
outputs = []
for label in neural_network:
inputs = inputs + [1]
output = [self.neuron_output(neuron, inputs) for neuron in label]
outputs.append(output)
inputs = output
return outputs
def back_propagation(self, digit, inputs, target):
hidden_output, output = self.ffnn(digit, inputs)
new_output = []
new_hidden = []
error = sum((output - target) * (output - target) for output, target in zip(output, target)) * 0.5
delta_output = [output * (1 - output) * (output - target) for output, target in zip(output, target)]
for i, output_neuron in enumerate(digit[-1]):
for j, hidden_output_current in enumerate(hidden_output + [1]):
output_neuron[j] -= delta_output[i] * hidden_output_current * self.alpha
new_output.append(output_neuron)
if (self.show_operations):
print("Neuron weights: ", i, output_neuron)
hidden_delta = [hidden_output_current * (1 - hidden_output_current) * self.product(delta_output, [n[i] for n in digit[-1]]) for i, hidden_output_current in enumerate(hidden_output)]
for i, hidden_neuron in enumerate(digit[0]):
for j, input_ in enumerate(inputs + [1]):
hidden_neuron[j] -= hidden_delta[i] * input_ * self.alpha
new_hidden.append(hidden_neuron)
if (self.show_operations):
print("Hidden neuron weights: ", i, hidden_neuron)
return new_hidden, new_output, error
def randomTraining(self):
print("Starting training...")
start = time.time()
output = self.randomize()
sq_error = 1
iterations = 1
print("Initial random network: ", output)
while sq_error > self.min_error_percentage:
sq_error = 0
for i in range(len(self.digits)):
hidden, output, error = self.back_propagation(output, self.digits[i], self.base_output[i])
output = [hidden, output]
sq_error += error
sq_error = sq_error / len(self.digits)
if (self.show_operations):
print("Iterations: ", iterations, ", error percentage: ", sq_error)
iterations += 1
self.output_data = output
end = time.time()
elapsed = end - start
print("Trained finished in: ", elapsed, " seconds")
print("Total iterations: ", iterations)
print("Error percentage: ", sq_error)
print("Output result: ", self.output_data)
def guessWith(self, output):
index = 0
closest_dif = abs(output[0] - 1)
for i, value in enumerate(output):
current_dif = abs(value - 1)
if (current_dif < closest_dif):
closest_dif = current_dif
index = i
return index
def test(self, input_):
result = self.ffnn(self.output_data, input_)[-1]
print("Output: ", result)
print("Your number probably is: ", self.guessWith(result))
|
normal
|
{
"blob_id": "0af45914c8c111a42b0b9684f5f0ee19ef5eeb70",
"index": 7548,
"step-1": "<mask token>\n\n\nclass NeuralNetwork:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def withSeed(self, seed):\n self.seed = seed\n return self\n <mask token>\n\n def withMinErrorPercentage(self, min_error_percentage):\n self.min_error_percentage = min_error_percentage\n return self\n\n def verbose(self, show_operations):\n self.show_operations = show_operations\n return self\n <mask token>\n\n def randomize(self):\n random.seed(self.seed)\n neural_network = [[[random.randint(-1, 0) for _ in range(self.\n input_size + 1)] for _ in range(self.hidden_num)], [[random.\n randint(-1, 0) for _ in range(self.hidden_num + 1)] for _ in\n range(self.output_size)]]\n return neural_network\n\n def sigmoid(self, x):\n return 1 / (1 + math.exp(-x))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test(self, input_):\n result = self.ffnn(self.output_data, input_)[-1]\n print('Output: ', result)\n print('Your number probably is: ', self.guessWith(result))\n",
"step-2": "<mask token>\n\n\nclass NeuralNetwork:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def withSeed(self, seed):\n self.seed = seed\n return self\n <mask token>\n\n def withMinErrorPercentage(self, min_error_percentage):\n self.min_error_percentage = min_error_percentage\n return self\n\n def verbose(self, show_operations):\n self.show_operations = show_operations\n return self\n <mask token>\n\n def randomize(self):\n random.seed(self.seed)\n neural_network = [[[random.randint(-1, 0) for _ in range(self.\n input_size + 1)] for _ in range(self.hidden_num)], [[random.\n randint(-1, 0) for _ in range(self.hidden_num + 1)] for _ in\n range(self.output_size)]]\n return neural_network\n\n def sigmoid(self, x):\n return 1 / (1 + math.exp(-x))\n\n def product(self, v, w):\n return sum([(a * b) for a, b in zip(v, w)])\n\n def neuron_output(self, weights, inputs):\n return self.sigmoid(self.product(weights, inputs))\n <mask token>\n\n def back_propagation(self, digit, inputs, target):\n hidden_output, output = self.ffnn(digit, inputs)\n new_output = []\n new_hidden = []\n error = sum((output - target) * (output - target) for output,\n target in zip(output, target)) * 0.5\n delta_output = [(output * (1 - output) * (output - target)) for \n output, target in zip(output, target)]\n for i, output_neuron in enumerate(digit[-1]):\n for j, hidden_output_current in enumerate(hidden_output + [1]):\n output_neuron[j] -= delta_output[i\n ] * hidden_output_current * self.alpha\n new_output.append(output_neuron)\n if self.show_operations:\n print('Neuron weights: ', i, output_neuron)\n hidden_delta = [(hidden_output_current * (1 - hidden_output_current\n ) * self.product(delta_output, [n[i] for n in digit[-1]])) for \n i, hidden_output_current in enumerate(hidden_output)]\n for i, hidden_neuron in enumerate(digit[0]):\n for j, input_ in enumerate(inputs + [1]):\n hidden_neuron[j] -= hidden_delta[i] * input_ * self.alpha\n new_hidden.append(hidden_neuron)\n if self.show_operations:\n print('Hidden neuron weights: ', i, hidden_neuron)\n return new_hidden, new_output, error\n\n def randomTraining(self):\n print('Starting training...')\n start = time.time()\n output = self.randomize()\n sq_error = 1\n iterations = 1\n print('Initial random network: ', output)\n while sq_error > self.min_error_percentage:\n sq_error = 0\n for i in range(len(self.digits)):\n hidden, output, error = self.back_propagation(output, self.\n digits[i], self.base_output[i])\n output = [hidden, output]\n sq_error += error\n sq_error = sq_error / len(self.digits)\n if self.show_operations:\n print('Iterations: ', iterations, ', error percentage: ',\n sq_error)\n iterations += 1\n self.output_data = output\n end = time.time()\n elapsed = end - start\n print('Trained finished in: ', elapsed, ' seconds')\n print('Total iterations: ', iterations)\n print('Error percentage: ', sq_error)\n print('Output result: ', self.output_data)\n\n def guessWith(self, output):\n index = 0\n closest_dif = abs(output[0] - 1)\n for i, value in enumerate(output):\n current_dif = abs(value - 1)\n if current_dif < closest_dif:\n closest_dif = current_dif\n index = i\n return index\n\n def test(self, input_):\n result = self.ffnn(self.output_data, input_)[-1]\n print('Output: ', result)\n print('Your number probably is: ', self.guessWith(result))\n",
"step-3": "<mask token>\n\n\nclass NeuralNetwork:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def withSeed(self, seed):\n self.seed = seed\n return self\n <mask token>\n\n def withMinErrorPercentage(self, min_error_percentage):\n self.min_error_percentage = min_error_percentage\n return self\n\n def verbose(self, show_operations):\n self.show_operations = show_operations\n return self\n <mask token>\n\n def randomize(self):\n random.seed(self.seed)\n neural_network = [[[random.randint(-1, 0) for _ in range(self.\n input_size + 1)] for _ in range(self.hidden_num)], [[random.\n randint(-1, 0) for _ in range(self.hidden_num + 1)] for _ in\n range(self.output_size)]]\n return neural_network\n\n def sigmoid(self, x):\n return 1 / (1 + math.exp(-x))\n\n def product(self, v, w):\n return sum([(a * b) for a, b in zip(v, w)])\n\n def neuron_output(self, weights, inputs):\n return self.sigmoid(self.product(weights, inputs))\n\n def ffnn(self, neural_network, inputs):\n outputs = []\n for label in neural_network:\n inputs = inputs + [1]\n output = [self.neuron_output(neuron, inputs) for neuron in label]\n outputs.append(output)\n inputs = output\n return outputs\n\n def back_propagation(self, digit, inputs, target):\n hidden_output, output = self.ffnn(digit, inputs)\n new_output = []\n new_hidden = []\n error = sum((output - target) * (output - target) for output,\n target in zip(output, target)) * 0.5\n delta_output = [(output * (1 - output) * (output - target)) for \n output, target in zip(output, target)]\n for i, output_neuron in enumerate(digit[-1]):\n for j, hidden_output_current in enumerate(hidden_output + [1]):\n output_neuron[j] -= delta_output[i\n ] * hidden_output_current * self.alpha\n new_output.append(output_neuron)\n if self.show_operations:\n print('Neuron weights: ', i, output_neuron)\n hidden_delta = [(hidden_output_current * (1 - hidden_output_current\n ) * self.product(delta_output, [n[i] for n in digit[-1]])) for \n i, hidden_output_current in enumerate(hidden_output)]\n for i, hidden_neuron in enumerate(digit[0]):\n for j, input_ in enumerate(inputs + [1]):\n hidden_neuron[j] -= hidden_delta[i] * input_ * self.alpha\n new_hidden.append(hidden_neuron)\n if self.show_operations:\n print('Hidden neuron weights: ', i, hidden_neuron)\n return new_hidden, new_output, error\n\n def randomTraining(self):\n print('Starting training...')\n start = time.time()\n output = self.randomize()\n sq_error = 1\n iterations = 1\n print('Initial random network: ', output)\n while sq_error > self.min_error_percentage:\n sq_error = 0\n for i in range(len(self.digits)):\n hidden, output, error = self.back_propagation(output, self.\n digits[i], self.base_output[i])\n output = [hidden, output]\n sq_error += error\n sq_error = sq_error / len(self.digits)\n if self.show_operations:\n print('Iterations: ', iterations, ', error percentage: ',\n sq_error)\n iterations += 1\n self.output_data = output\n end = time.time()\n elapsed = end - start\n print('Trained finished in: ', elapsed, ' seconds')\n print('Total iterations: ', iterations)\n print('Error percentage: ', sq_error)\n print('Output result: ', self.output_data)\n\n def guessWith(self, output):\n index = 0\n closest_dif = abs(output[0] - 1)\n for i, value in enumerate(output):\n current_dif = abs(value - 1)\n if current_dif < closest_dif:\n closest_dif = current_dif\n index = i\n return index\n\n def test(self, input_):\n result = self.ffnn(self.output_data, input_)[-1]\n print('Output: ', result)\n print('Your number probably is: ', self.guessWith(result))\n",
"step-4": "<mask token>\n\n\nclass NeuralNetwork:\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, seed=5, alpha=0.1, min_error_percentage=0.0005,\n input_size=25, output_size=10, hidden_num=5):\n self.seed = seed\n self.alpha = alpha\n self.min_error_percentage = min_error_percentage\n self.input_size = input_size\n self.output_size = output_size\n self.hidden_num = hidden_num\n\n def withSeed(self, seed):\n self.seed = seed\n return self\n <mask token>\n\n def withMinErrorPercentage(self, min_error_percentage):\n self.min_error_percentage = min_error_percentage\n return self\n\n def verbose(self, show_operations):\n self.show_operations = show_operations\n return self\n <mask token>\n\n def randomize(self):\n random.seed(self.seed)\n neural_network = [[[random.randint(-1, 0) for _ in range(self.\n input_size + 1)] for _ in range(self.hidden_num)], [[random.\n randint(-1, 0) for _ in range(self.hidden_num + 1)] for _ in\n range(self.output_size)]]\n return neural_network\n\n def sigmoid(self, x):\n return 1 / (1 + math.exp(-x))\n\n def product(self, v, w):\n return sum([(a * b) for a, b in zip(v, w)])\n\n def neuron_output(self, weights, inputs):\n return self.sigmoid(self.product(weights, inputs))\n\n def ffnn(self, neural_network, inputs):\n outputs = []\n for label in neural_network:\n inputs = inputs + [1]\n output = [self.neuron_output(neuron, inputs) for neuron in label]\n outputs.append(output)\n inputs = output\n return outputs\n\n def back_propagation(self, digit, inputs, target):\n hidden_output, output = self.ffnn(digit, inputs)\n new_output = []\n new_hidden = []\n error = sum((output - target) * (output - target) for output,\n target in zip(output, target)) * 0.5\n delta_output = [(output * (1 - output) * (output - target)) for \n output, target in zip(output, target)]\n for i, output_neuron in enumerate(digit[-1]):\n for j, hidden_output_current in enumerate(hidden_output + [1]):\n output_neuron[j] -= delta_output[i\n ] * hidden_output_current * self.alpha\n new_output.append(output_neuron)\n if self.show_operations:\n print('Neuron weights: ', i, output_neuron)\n hidden_delta = [(hidden_output_current * (1 - hidden_output_current\n ) * self.product(delta_output, [n[i] for n in digit[-1]])) for \n i, hidden_output_current in enumerate(hidden_output)]\n for i, hidden_neuron in enumerate(digit[0]):\n for j, input_ in enumerate(inputs + [1]):\n hidden_neuron[j] -= hidden_delta[i] * input_ * self.alpha\n new_hidden.append(hidden_neuron)\n if self.show_operations:\n print('Hidden neuron weights: ', i, hidden_neuron)\n return new_hidden, new_output, error\n\n def randomTraining(self):\n print('Starting training...')\n start = time.time()\n output = self.randomize()\n sq_error = 1\n iterations = 1\n print('Initial random network: ', output)\n while sq_error > self.min_error_percentage:\n sq_error = 0\n for i in range(len(self.digits)):\n hidden, output, error = self.back_propagation(output, self.\n digits[i], self.base_output[i])\n output = [hidden, output]\n sq_error += error\n sq_error = sq_error / len(self.digits)\n if self.show_operations:\n print('Iterations: ', iterations, ', error percentage: ',\n sq_error)\n iterations += 1\n self.output_data = output\n end = time.time()\n elapsed = end - start\n print('Trained finished in: ', elapsed, ' seconds')\n print('Total iterations: ', iterations)\n print('Error percentage: ', sq_error)\n print('Output result: ', self.output_data)\n\n def guessWith(self, output):\n index = 0\n closest_dif = abs(output[0] - 1)\n for i, value in enumerate(output):\n current_dif = abs(value - 1)\n if current_dif < closest_dif:\n closest_dif = current_dif\n index = i\n return index\n\n def test(self, input_):\n result = self.ffnn(self.output_data, input_)[-1]\n print('Output: ', result)\n print('Your number probably is: ', self.guessWith(result))\n",
"step-5": "import math\nimport random\nimport time\nimport numpy as np\n\nclass NeuralNetwork:\n\n digits = [\n [\n 1,1,1,1,1,\n 1,0,0,0,1,\n 1,0,0,0,1,\n 1,0,0,0,1,\n 1,1,1,1,1\n ],\n [\n 0,0,1,0,0,\n 0,0,1,0,0,\n 0,0,1,0,0,\n 0,0,1,0,0,\n 0,0,1,0,0\n ],\n [\n 1,1,1,1,1,\n 0,0,0,0,1,\n 1,1,1,1,1,\n 1,0,0,0,0,\n 1,1,1,1,1\n ],\n [\n 1,1,1,1,1,\n 0,0,0,0,1,\n 1,1,1,1,1,\n 0,0,0,0,1,\n 1,1,1,1,1\n ],\n [\n 1,0,0,0,1,\n 1,0,0,0,1,\n 1,1,1,1,1,\n 0,0,0,0,1,\n 0,0,0,0,1\n ],\n [\n 1,1,1,1,1,\n 1,0,0,0,0,\n 1,1,1,1,1,\n 0,0,0,0,1,\n 1,1,1,1,1\n ],\n [\n 1,1,1,1,1,\n 1,0,0,0,0,\n 1,1,1,1,1,\n 1,0,0,0,1,\n 1,1,1,1,1\n ],\n [\n 1,1,1,1,1,\n 0,0,0,1,0,\n 0,0,1,0,0,\n 0,1,0,0,0,\n 1,0,0,0,0\n ],\n [\n 1,1,1,1,1,\n 1,0,0,0,1,\n 1,1,1,1,1,\n 1,0,0,0,1,\n 1,1,1,1,1\n ],\n [\n 1,1,1,1,1,\n 1,0,0,0,1,\n 1,1,1,1,1,\n 0,0,0,0,1,\n 0,0,0,0,1\n ]\n ]\n\n base_output = [\n [1,0,0,0,0,0,0,0,0,0],\n [0,1,0,0,0,0,0,0,0,0],\n [0,0,1,0,0,0,0,0,0,0],\n [0,0,0,1,0,0,0,0,0,0],\n [0,0,0,0,1,0,0,0,0,0],\n [0,0,0,0,0,1,0,0,0,0],\n [0,0,0,0,0,0,1,0,0,0],\n [0,0,0,0,0,0,0,1,0,0],\n [0,0,0,0,0,0,0,0,1,0],\n [0,0,0,0,0,0,0,0,0,1]\n ]\n\n show_operations = False\n\n def __init__(self, seed = 5, alpha = 0.1, min_error_percentage = 0.0005, input_size = 25, output_size = 10, hidden_num = 5):\n self.seed = seed\n self.alpha = alpha\n self.min_error_percentage = min_error_percentage\n self.input_size = input_size\n self.output_size = output_size\n self.hidden_num = hidden_num\n \n def withSeed(self, seed):\n self.seed = seed\n return self\n\n def withAlpha(self, alpha):\n self.alpha = alpha\n return self\n\n def withMinErrorPercentage(self, min_error_percentage):\n self.min_error_percentage = min_error_percentage\n return self\n\n def verbose(self, show_operations):\n self.show_operations = show_operations\n return self\n\n def withHiddenLabels(self, hidden_num):\n self.hidden_num = hidden_num\n return self\n\n def randomize(self):\n random.seed(self.seed)\n neural_network = [\n [\n [random.randint(-1, 0) for _ in range(self.input_size + 1)] for _ in range(self.hidden_num)\n ],\n [\n [random.randint(-1, 0) for _ in range(self.hidden_num + 1)] for _ in range(self.output_size)\n ]\n ]\n return neural_network\n \n def sigmoid(self, x):\n return 1 / (1 + math.exp(-x))\n\n def product(self, v, w):\n return sum([a * b for a, b in zip(v, w)])\n\n def neuron_output(self, weights, inputs):\n return self.sigmoid(self.product(weights, inputs))\n\n def ffnn(self, neural_network, inputs):\n outputs = []\n for label in neural_network:\n inputs = inputs + [1]\n output = [self.neuron_output(neuron, inputs) for neuron in label]\n outputs.append(output)\n inputs = output\n return outputs\n\n def back_propagation(self, digit, inputs, target):\n hidden_output, output = self.ffnn(digit, inputs)\n new_output = []\n new_hidden = []\n \n error = sum((output - target) * (output - target) for output, target in zip(output, target)) * 0.5\n delta_output = [output * (1 - output) * (output - target) for output, target in zip(output, target)]\n \n for i, output_neuron in enumerate(digit[-1]):\n for j, hidden_output_current in enumerate(hidden_output + [1]):\n output_neuron[j] -= delta_output[i] * hidden_output_current * self.alpha\n new_output.append(output_neuron)\n if (self.show_operations):\n print(\"Neuron weights: \", i, output_neuron)\n \n hidden_delta = [hidden_output_current * (1 - hidden_output_current) * self.product(delta_output, [n[i] for n in digit[-1]]) for i, hidden_output_current in enumerate(hidden_output)]\n \n for i, hidden_neuron in enumerate(digit[0]):\n for j, input_ in enumerate(inputs + [1]):\n hidden_neuron[j] -= hidden_delta[i] * input_ * self.alpha\n new_hidden.append(hidden_neuron)\n if (self.show_operations):\n print(\"Hidden neuron weights: \", i, hidden_neuron)\n\n return new_hidden, new_output, error \n \n def randomTraining(self):\n print(\"Starting training...\")\n start = time.time()\n output = self.randomize()\n sq_error = 1\n iterations = 1\n\n print(\"Initial random network: \", output)\n\n while sq_error > self.min_error_percentage:\n sq_error = 0\n for i in range(len(self.digits)):\n hidden, output, error = self.back_propagation(output, self.digits[i], self.base_output[i])\n output = [hidden, output]\n sq_error += error\n sq_error = sq_error / len(self.digits)\n if (self.show_operations):\n print(\"Iterations: \", iterations, \", error percentage: \", sq_error)\n iterations += 1\n \n self.output_data = output\n end = time.time()\n elapsed = end - start\n print(\"Trained finished in: \", elapsed, \" seconds\")\n print(\"Total iterations: \", iterations)\n print(\"Error percentage: \", sq_error)\n print(\"Output result: \", self.output_data)\n\n def guessWith(self, output):\n index = 0\n closest_dif = abs(output[0] - 1)\n for i, value in enumerate(output):\n current_dif = abs(value - 1)\n if (current_dif < closest_dif):\n closest_dif = current_dif\n index = i\n return index\n\n def test(self, input_):\n result = self.ffnn(self.output_data, input_)[-1]\n print(\"Output: \", result)\n print(\"Your number probably is: \", self.guessWith(result))\n",
"step-ids": [
7,
12,
13,
14,
19
]
}
|
[
7,
12,
13,
14,
19
] |
from util import *
def K_step(x):
if not x.shape:
return S.One
assert len(x.shape) == 1
n = x.shape[0]
if n == 2:
return x[1]
return Piecewise((1, Equal(n, 1)),
(x[1], Equal(n, 2)),
(K(x[:n - 1]) * x[n - 1] + K(x[:n - 2]), True))
K = Function.K(integer=True, eval=K_step, shape=())
@apply
def apply(self):
assert self.is_K
x = self.arg
n = x.shape[0]
n -= 2
assert n > 0
return Equal(self, K(x[:n]) + K(x[:n + 1]) * x[n + 1])
@prove
def prove(Eq):
x = Symbol(integer=True, shape=(oo,))
n = Symbol(integer=True, positive=True)
Eq << apply(K(x[:n + 2]))
Eq << Eq[-1].this.lhs.defun()
if __name__ == '__main__':
run()
# created on 2021-08-18
|
normal
|
{
"blob_id": "b00c07ee3cdba55800c9701b7b8b0e3c9079e9f8",
"index": 6272,
"step-1": "<mask token>\n\n\ndef K_step(x):\n if not x.shape:\n return S.One\n assert len(x.shape) == 1\n n = x.shape[0]\n if n == 2:\n return x[1]\n return Piecewise((1, Equal(n, 1)), (x[1], Equal(n, 2)), (K(x[:n - 1]) *\n x[n - 1] + K(x[:n - 2]), True))\n\n\n<mask token>\n\n\n@apply\ndef apply(self):\n assert self.is_K\n x = self.arg\n n = x.shape[0]\n n -= 2\n assert n > 0\n return Equal(self, K(x[:n]) + K(x[:n + 1]) * x[n + 1])\n\n\n@prove\ndef prove(Eq):\n x = Symbol(integer=True, shape=(oo,))\n n = Symbol(integer=True, positive=True)\n Eq << apply(K(x[:n + 2]))\n Eq << Eq[-1].this.lhs.defun()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef K_step(x):\n if not x.shape:\n return S.One\n assert len(x.shape) == 1\n n = x.shape[0]\n if n == 2:\n return x[1]\n return Piecewise((1, Equal(n, 1)), (x[1], Equal(n, 2)), (K(x[:n - 1]) *\n x[n - 1] + K(x[:n - 2]), True))\n\n\n<mask token>\n\n\n@apply\ndef apply(self):\n assert self.is_K\n x = self.arg\n n = x.shape[0]\n n -= 2\n assert n > 0\n return Equal(self, K(x[:n]) + K(x[:n + 1]) * x[n + 1])\n\n\n@prove\ndef prove(Eq):\n x = Symbol(integer=True, shape=(oo,))\n n = Symbol(integer=True, positive=True)\n Eq << apply(K(x[:n + 2]))\n Eq << Eq[-1].this.lhs.defun()\n\n\nif __name__ == '__main__':\n run()\n",
"step-3": "<mask token>\n\n\ndef K_step(x):\n if not x.shape:\n return S.One\n assert len(x.shape) == 1\n n = x.shape[0]\n if n == 2:\n return x[1]\n return Piecewise((1, Equal(n, 1)), (x[1], Equal(n, 2)), (K(x[:n - 1]) *\n x[n - 1] + K(x[:n - 2]), True))\n\n\nK = Function.K(integer=True, eval=K_step, shape=())\n\n\n@apply\ndef apply(self):\n assert self.is_K\n x = self.arg\n n = x.shape[0]\n n -= 2\n assert n > 0\n return Equal(self, K(x[:n]) + K(x[:n + 1]) * x[n + 1])\n\n\n@prove\ndef prove(Eq):\n x = Symbol(integer=True, shape=(oo,))\n n = Symbol(integer=True, positive=True)\n Eq << apply(K(x[:n + 2]))\n Eq << Eq[-1].this.lhs.defun()\n\n\nif __name__ == '__main__':\n run()\n",
"step-4": "from util import *\n\n\ndef K_step(x):\n if not x.shape:\n return S.One\n assert len(x.shape) == 1\n n = x.shape[0]\n if n == 2:\n return x[1]\n return Piecewise((1, Equal(n, 1)), (x[1], Equal(n, 2)), (K(x[:n - 1]) *\n x[n - 1] + K(x[:n - 2]), True))\n\n\nK = Function.K(integer=True, eval=K_step, shape=())\n\n\n@apply\ndef apply(self):\n assert self.is_K\n x = self.arg\n n = x.shape[0]\n n -= 2\n assert n > 0\n return Equal(self, K(x[:n]) + K(x[:n + 1]) * x[n + 1])\n\n\n@prove\ndef prove(Eq):\n x = Symbol(integer=True, shape=(oo,))\n n = Symbol(integer=True, positive=True)\n Eq << apply(K(x[:n + 2]))\n Eq << Eq[-1].this.lhs.defun()\n\n\nif __name__ == '__main__':\n run()\n",
"step-5": "from util import *\n\n\n\ndef K_step(x):\n if not x.shape:\n return S.One\n assert len(x.shape) == 1\n n = x.shape[0]\n if n == 2:\n return x[1]\n return Piecewise((1, Equal(n, 1)),\n (x[1], Equal(n, 2)),\n (K(x[:n - 1]) * x[n - 1] + K(x[:n - 2]), True))\n\n\nK = Function.K(integer=True, eval=K_step, shape=())\n\n\n@apply\ndef apply(self):\n assert self.is_K\n x = self.arg\n n = x.shape[0]\n n -= 2\n assert n > 0\n\n return Equal(self, K(x[:n]) + K(x[:n + 1]) * x[n + 1])\n\n\n@prove\ndef prove(Eq):\n x = Symbol(integer=True, shape=(oo,))\n n = Symbol(integer=True, positive=True)\n\n Eq << apply(K(x[:n + 2]))\n\n Eq << Eq[-1].this.lhs.defun()\n\n\nif __name__ == '__main__':\n run()\n# created on 2021-08-18\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
#!/usr/bin/python3
"""
@author : Chris Phibbs
@created : Sunday Aug 30, 2020 14:05:56 AEST
@file : q3
"""
class Solution:
def minDays(self, grid: List[List[int]]) -> int:
# bfs - find 1, run bfs. Then loop through - if any other ones found then disconnected
i, j = 0, 0
islandExists = False
visited = dict()
leastAdjacent = 4
while i < len(grid):
while j < len(grid[i]):
if grid[i][j] == 1 and (i,j) not in visited:
# new land - return 0 if already disconnected from already found land
if islandExists == True: return 0
islandExists = True
# run bfs
s = list()
s.append((i,j))
while s:
n = 0
x, y = s.pop()
print(f"current coords are {x}, {y}")
visited[(x,y)] = True
if self.checkLand(grid, x-1, y): n+=1
if self.checkLand(grid, x+1, y): n+=1
if self.checkLand(grid, x, y-1): n+=1
if self.checkLand(grid, x, y+1): n+=1
leastAdjacent = min(leastAdjacent, n)
if self.checkValid(grid, visited, x-1, y): s.append((x-1, y))
if self.checkValid(grid, visited, x+1, y): s.append((x+1, y))
if self.checkValid(grid, visited, x, y-1): s.append((x, y-1))
if self.checkValid(grid, visited, x, y+1): s.append((x, y+1))
# Did not handle the "bridge" case - i.e. element of n == 2 that when removed disconnects everything
# TL;DR If not in the corner and n==2 then answer is 1
j += 1
i += 1
if len(grid[0]) == 2: return 2
return leastAdjacent
# if land and not visited, run bfs
# else do nothing
# returns True if valid land
def checkValid(self, grid, visited, x, y):
if x < 0 or x >= len(grid): return False
if y < 0 or y >= len(grid[0]): return False
if (x,y) in visited: return False
return grid[x][y] == 1
def checkLand(self, grid, x, y):
print(f"current checkLand(x,y) are {x}, {y}")
if x < 0 or x >= len(grid): return False
if y < 0 or y >= len(grid[0]): return False
return grid[x][y] == 1
|
normal
|
{
"blob_id": "cddd5deba0ddc59a604d2926bdc687716e08f226",
"index": 1557,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n\n def checkLand(self, grid, x, y):\n print(f'current checkLand(x,y) are {x}, {y}')\n if x < 0 or x >= len(grid):\n return False\n if y < 0 or y >= len(grid[0]):\n return False\n return grid[x][y] == 1\n",
"step-3": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def checkValid(self, grid, visited, x, y):\n if x < 0 or x >= len(grid):\n return False\n if y < 0 or y >= len(grid[0]):\n return False\n if (x, y) in visited:\n return False\n return grid[x][y] == 1\n\n def checkLand(self, grid, x, y):\n print(f'current checkLand(x,y) are {x}, {y}')\n if x < 0 or x >= len(grid):\n return False\n if y < 0 or y >= len(grid[0]):\n return False\n return grid[x][y] == 1\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def minDays(self, grid: List[List[int]]) ->int:\n i, j = 0, 0\n islandExists = False\n visited = dict()\n leastAdjacent = 4\n while i < len(grid):\n while j < len(grid[i]):\n if grid[i][j] == 1 and (i, j) not in visited:\n if islandExists == True:\n return 0\n islandExists = True\n s = list()\n s.append((i, j))\n while s:\n n = 0\n x, y = s.pop()\n print(f'current coords are {x}, {y}')\n visited[x, y] = True\n if self.checkLand(grid, x - 1, y):\n n += 1\n if self.checkLand(grid, x + 1, y):\n n += 1\n if self.checkLand(grid, x, y - 1):\n n += 1\n if self.checkLand(grid, x, y + 1):\n n += 1\n leastAdjacent = min(leastAdjacent, n)\n if self.checkValid(grid, visited, x - 1, y):\n s.append((x - 1, y))\n if self.checkValid(grid, visited, x + 1, y):\n s.append((x + 1, y))\n if self.checkValid(grid, visited, x, y - 1):\n s.append((x, y - 1))\n if self.checkValid(grid, visited, x, y + 1):\n s.append((x, y + 1))\n j += 1\n i += 1\n if len(grid[0]) == 2:\n return 2\n return leastAdjacent\n\n def checkValid(self, grid, visited, x, y):\n if x < 0 or x >= len(grid):\n return False\n if y < 0 or y >= len(grid[0]):\n return False\n if (x, y) in visited:\n return False\n return grid[x][y] == 1\n\n def checkLand(self, grid, x, y):\n print(f'current checkLand(x,y) are {x}, {y}')\n if x < 0 or x >= len(grid):\n return False\n if y < 0 or y >= len(grid[0]):\n return False\n return grid[x][y] == 1\n",
"step-5": "#!/usr/bin/python3\n\n\"\"\"\n @author : Chris Phibbs\n @created : Sunday Aug 30, 2020 14:05:56 AEST\n @file : q3\n\n\"\"\"\n\nclass Solution:\n def minDays(self, grid: List[List[int]]) -> int:\n # bfs - find 1, run bfs. Then loop through - if any other ones found then disconnected\n \n i, j = 0, 0\n islandExists = False\n visited = dict()\n leastAdjacent = 4\n \n while i < len(grid):\n while j < len(grid[i]):\n if grid[i][j] == 1 and (i,j) not in visited:\n # new land - return 0 if already disconnected from already found land\n if islandExists == True: return 0\n \n islandExists = True\n # run bfs\n \n s = list()\n s.append((i,j))\n\n \n while s:\n n = 0\n x, y = s.pop()\n print(f\"current coords are {x}, {y}\")\n visited[(x,y)] = True\n \n if self.checkLand(grid, x-1, y): n+=1\n if self.checkLand(grid, x+1, y): n+=1\n if self.checkLand(grid, x, y-1): n+=1\n if self.checkLand(grid, x, y+1): n+=1\n \n leastAdjacent = min(leastAdjacent, n)\n \n if self.checkValid(grid, visited, x-1, y): s.append((x-1, y))\n if self.checkValid(grid, visited, x+1, y): s.append((x+1, y)) \n if self.checkValid(grid, visited, x, y-1): s.append((x, y-1))\n if self.checkValid(grid, visited, x, y+1): s.append((x, y+1))\n \n # Did not handle the \"bridge\" case - i.e. element of n == 2 that when removed disconnects everything\n # TL;DR If not in the corner and n==2 then answer is 1\n \n \n \n \n j += 1\n i += 1\n \n if len(grid[0]) == 2: return 2\n \n return leastAdjacent\n \n # if land and not visited, run bfs\n # else do nothing\n \n \n # returns True if valid land\n def checkValid(self, grid, visited, x, y):\n if x < 0 or x >= len(grid): return False\n if y < 0 or y >= len(grid[0]): return False\n if (x,y) in visited: return False\n return grid[x][y] == 1 \n \n def checkLand(self, grid, x, y):\n print(f\"current checkLand(x,y) are {x}, {y}\")\n if x < 0 or x >= len(grid): return False\n if y < 0 or y >= len(grid[0]): return False\n return grid[x][y] == 1\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def people_on_image(path_to_image):
color_map = [(255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
0, 0), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255, 255,
255), (255, 255, 255)]
trans = torchvision.transforms.Compose([torchvision.transforms.Resize(
540), torchvision.transforms.CenterCrop(520), torchvision.
transforms.ToTensor(), torchvision.transforms.Normalize((0.485,
0.456, 0.406), (0.229, 0.224, 0.225))])
model = torchvision.models.segmentation.fcn_resnet50(pretrained=True)
model.eval()
image = Image.open(path_to_image)
image = trans(image)
image = image.unsqueeze(0)
out = model(image)
labels = torch.argmax(out['out'].squeeze(), dim=0).detach().cpu().numpy()
red_map = np.zeros_like(labels).astype(np.uint8)
green_map = np.zeros_like(labels).astype(np.uint8)
blue_map = np.zeros_like(labels).astype(np.uint8)
for label_num in range(0, len(color_map)):
index = labels == label_num
red_map[index] = np.array(color_map)[label_num, 0]
blue_map[index] = np.array(color_map)[label_num, 1]
green_map[index] = np.array(color_map)[label_num, 2]
ready_image = np.stack([red_map, green_map, blue_map], axis=2)
image = np.array(image)
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
ready_image = cv2.cvtColor(ready_image, cv2.COLOR_RGB2BGR)
cv2.addWeighted(ready_image, 0.6, image, 0.4, 0)
return ready_image
<|reserved_special_token_1|>
import torch
import numpy as np
import cv2
import torchvision
from PIL import Image
def people_on_image(path_to_image):
color_map = [(255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,
0, 0), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255, 255,
255), (255, 255, 255)]
trans = torchvision.transforms.Compose([torchvision.transforms.Resize(
540), torchvision.transforms.CenterCrop(520), torchvision.
transforms.ToTensor(), torchvision.transforms.Normalize((0.485,
0.456, 0.406), (0.229, 0.224, 0.225))])
model = torchvision.models.segmentation.fcn_resnet50(pretrained=True)
model.eval()
image = Image.open(path_to_image)
image = trans(image)
image = image.unsqueeze(0)
out = model(image)
labels = torch.argmax(out['out'].squeeze(), dim=0).detach().cpu().numpy()
red_map = np.zeros_like(labels).astype(np.uint8)
green_map = np.zeros_like(labels).astype(np.uint8)
blue_map = np.zeros_like(labels).astype(np.uint8)
for label_num in range(0, len(color_map)):
index = labels == label_num
red_map[index] = np.array(color_map)[label_num, 0]
blue_map[index] = np.array(color_map)[label_num, 1]
green_map[index] = np.array(color_map)[label_num, 2]
ready_image = np.stack([red_map, green_map, blue_map], axis=2)
image = np.array(image)
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
ready_image = cv2.cvtColor(ready_image, cv2.COLOR_RGB2BGR)
cv2.addWeighted(ready_image, 0.6, image, 0.4, 0)
return ready_image
<|reserved_special_token_1|>
import torch
import numpy as np
import cv2
import torchvision
from PIL import Image
def people_on_image(path_to_image):
color_map = [
(255, 255, 255), # background
(255, 255, 255), # aeroplane
(255, 255, 255), # bicycle
(255, 255, 255), # bird
(255, 255, 255), # boat
(255, 255, 255), # bottle
(255, 255, 255), # bus
(255, 255, 255), # car
(255, 255, 255), # cat
(255, 255, 255), # chair
(255, 255, 255), # cow
(255, 255, 255), # dining table
(255, 255, 255), # dog
(255, 255, 255), # horse
(255, 255, 255), # motorbike
(255, 0, 0), # person
(255, 255, 255), # potted plant
(255, 255, 255), # sheep
(255, 255, 255), # sofa
(255, 255, 255), # train
(255, 255, 255) # tv/monitor
]
trans = torchvision.transforms.Compose([
torchvision.transforms.Resize(540),
torchvision.transforms.CenterCrop(520),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(
(0.485, 0.456, 0.406), (0.229, 0.224, 0.225))])
model = torchvision.models.segmentation.fcn_resnet50(pretrained=True)
model.eval()
image = Image.open(path_to_image)
image = trans(image)
image = image.unsqueeze(0)
out = model(image)
labels = torch.argmax(out['out'].squeeze(), dim=0).detach().cpu().numpy()
red_map = np.zeros_like(labels).astype(np.uint8)
green_map = np.zeros_like(labels).astype(np.uint8)
blue_map = np.zeros_like(labels).astype(np.uint8)
for label_num in range(0, len(color_map)):
index = labels == label_num
red_map[index] = np.array(color_map)[label_num, 0]
blue_map[index] = np.array(color_map)[label_num, 1]
green_map[index] = np.array(color_map)[label_num, 2]
ready_image = np.stack([red_map, green_map, blue_map], axis=2)
image = np.array(image)
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
ready_image = cv2.cvtColor(ready_image, cv2.COLOR_RGB2BGR)
cv2.addWeighted(ready_image, 0.6, image, 0.4, 0)
return ready_image
|
flexible
|
{
"blob_id": "2193c97b7f1fcf204007c2528ecc47cbf3c67e81",
"index": 9992,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef people_on_image(path_to_image):\n color_map = [(255, 255, 255), (255, 255, 255), (255, 255, 255), (255, \n 255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,\n 255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,\n 255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,\n 0, 0), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255, 255,\n 255), (255, 255, 255)]\n trans = torchvision.transforms.Compose([torchvision.transforms.Resize(\n 540), torchvision.transforms.CenterCrop(520), torchvision.\n transforms.ToTensor(), torchvision.transforms.Normalize((0.485, \n 0.456, 0.406), (0.229, 0.224, 0.225))])\n model = torchvision.models.segmentation.fcn_resnet50(pretrained=True)\n model.eval()\n image = Image.open(path_to_image)\n image = trans(image)\n image = image.unsqueeze(0)\n out = model(image)\n labels = torch.argmax(out['out'].squeeze(), dim=0).detach().cpu().numpy()\n red_map = np.zeros_like(labels).astype(np.uint8)\n green_map = np.zeros_like(labels).astype(np.uint8)\n blue_map = np.zeros_like(labels).astype(np.uint8)\n for label_num in range(0, len(color_map)):\n index = labels == label_num\n red_map[index] = np.array(color_map)[label_num, 0]\n blue_map[index] = np.array(color_map)[label_num, 1]\n green_map[index] = np.array(color_map)[label_num, 2]\n ready_image = np.stack([red_map, green_map, blue_map], axis=2)\n image = np.array(image)\n image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)\n ready_image = cv2.cvtColor(ready_image, cv2.COLOR_RGB2BGR)\n cv2.addWeighted(ready_image, 0.6, image, 0.4, 0)\n return ready_image\n",
"step-3": "import torch\nimport numpy as np\nimport cv2\nimport torchvision\nfrom PIL import Image\n\n\ndef people_on_image(path_to_image):\n color_map = [(255, 255, 255), (255, 255, 255), (255, 255, 255), (255, \n 255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,\n 255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,\n 255, 255), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255,\n 0, 0), (255, 255, 255), (255, 255, 255), (255, 255, 255), (255, 255,\n 255), (255, 255, 255)]\n trans = torchvision.transforms.Compose([torchvision.transforms.Resize(\n 540), torchvision.transforms.CenterCrop(520), torchvision.\n transforms.ToTensor(), torchvision.transforms.Normalize((0.485, \n 0.456, 0.406), (0.229, 0.224, 0.225))])\n model = torchvision.models.segmentation.fcn_resnet50(pretrained=True)\n model.eval()\n image = Image.open(path_to_image)\n image = trans(image)\n image = image.unsqueeze(0)\n out = model(image)\n labels = torch.argmax(out['out'].squeeze(), dim=0).detach().cpu().numpy()\n red_map = np.zeros_like(labels).astype(np.uint8)\n green_map = np.zeros_like(labels).astype(np.uint8)\n blue_map = np.zeros_like(labels).astype(np.uint8)\n for label_num in range(0, len(color_map)):\n index = labels == label_num\n red_map[index] = np.array(color_map)[label_num, 0]\n blue_map[index] = np.array(color_map)[label_num, 1]\n green_map[index] = np.array(color_map)[label_num, 2]\n ready_image = np.stack([red_map, green_map, blue_map], axis=2)\n image = np.array(image)\n image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)\n ready_image = cv2.cvtColor(ready_image, cv2.COLOR_RGB2BGR)\n cv2.addWeighted(ready_image, 0.6, image, 0.4, 0)\n return ready_image\n",
"step-4": "import torch\r\nimport numpy as np\r\nimport cv2\r\nimport torchvision\r\nfrom PIL import Image\r\n\r\n\r\n\r\ndef people_on_image(path_to_image):\r\n\r\n color_map = [\r\n (255, 255, 255), # background\r\n (255, 255, 255), # aeroplane\r\n (255, 255, 255), # bicycle\r\n (255, 255, 255), # bird\r\n (255, 255, 255), # boat\r\n (255, 255, 255), # bottle\r\n (255, 255, 255), # bus\r\n (255, 255, 255), # car\r\n (255, 255, 255), # cat\r\n (255, 255, 255), # chair\r\n (255, 255, 255), # cow\r\n (255, 255, 255), # dining table\r\n (255, 255, 255), # dog\r\n (255, 255, 255), # horse\r\n (255, 255, 255), # motorbike\r\n (255, 0, 0), # person\r\n (255, 255, 255), # potted plant\r\n (255, 255, 255), # sheep\r\n (255, 255, 255), # sofa\r\n (255, 255, 255), # train\r\n (255, 255, 255) # tv/monitor\r\n ]\r\n trans = torchvision.transforms.Compose([\r\n torchvision.transforms.Resize(540),\r\n torchvision.transforms.CenterCrop(520),\r\n torchvision.transforms.ToTensor(),\r\n torchvision.transforms.Normalize(\r\n (0.485, 0.456, 0.406), (0.229, 0.224, 0.225))])\r\n\r\n model = torchvision.models.segmentation.fcn_resnet50(pretrained=True)\r\n model.eval()\r\n\r\n image = Image.open(path_to_image)\r\n image = trans(image)\r\n image = image.unsqueeze(0)\r\n out = model(image)\r\n\r\n labels = torch.argmax(out['out'].squeeze(), dim=0).detach().cpu().numpy()\r\n\r\n red_map = np.zeros_like(labels).astype(np.uint8)\r\n green_map = np.zeros_like(labels).astype(np.uint8)\r\n blue_map = np.zeros_like(labels).astype(np.uint8)\r\n\r\n for label_num in range(0, len(color_map)):\r\n index = labels == label_num\r\n red_map[index] = np.array(color_map)[label_num, 0]\r\n blue_map[index] = np.array(color_map)[label_num, 1]\r\n green_map[index] = np.array(color_map)[label_num, 2]\r\n\r\n ready_image = np.stack([red_map, green_map, blue_map], axis=2)\r\n\r\n image = np.array(image)\r\n image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)\r\n ready_image = cv2.cvtColor(ready_image, cv2.COLOR_RGB2BGR)\r\n cv2.addWeighted(ready_image, 0.6, image, 0.4, 0)\r\n return ready_image\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
$ pip install "<package_name> >= 1.1"
|
normal
|
{
"blob_id": "8010c0d53af6d428f29ff3ce63bcd6b5b811b051",
"index": 3456,
"step-1": "$ pip install \"<package_name> >= 1.1\"\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def interpret(trees):
for tree in trees:
nodetype = tree[0]
if nodetype == 'word-element':
graphics.word(tree[1])
elif nodetype == 'tag-element':
tagname = tree[1]
tagargs = tree[2]
subtrees = tree[3]
closetagname = tree[4]
if tagname != closetagname:
graphics.warning('mismatched tag')
else:
graphics.begintag(tagname, tagargs)
interpret(subtrees)
graphics.endtag()
elif nodetype == 'javascript-element':
jstext = tree[1]
jslexer = lex.lex(module=jstokens)
jsparser = yacc.yacc(module=jsgrammar)
jstree = jsparser.parse(jstext, lexer=jslexer)
result = jsinterp.interpret(jstree)
graphics.word(result)
<|reserved_special_token_1|>
import graphics
import ply.lex as lex
import ply.yacc as yacc
import jstokens
import jsgrammar
def interpret(trees):
for tree in trees:
nodetype = tree[0]
if nodetype == 'word-element':
graphics.word(tree[1])
elif nodetype == 'tag-element':
tagname = tree[1]
tagargs = tree[2]
subtrees = tree[3]
closetagname = tree[4]
if tagname != closetagname:
graphics.warning('mismatched tag')
else:
graphics.begintag(tagname, tagargs)
interpret(subtrees)
graphics.endtag()
elif nodetype == 'javascript-element':
jstext = tree[1]
jslexer = lex.lex(module=jstokens)
jsparser = yacc.yacc(module=jsgrammar)
jstree = jsparser.parse(jstext, lexer=jslexer)
result = jsinterp.interpret(jstree)
graphics.word(result)
<|reserved_special_token_1|>
import graphics
import ply.lex as lex
import ply.yacc as yacc
import jstokens
import jsgrammar
def interpret(trees): # Hello, friend
for tree in trees: # Hello,
# ("word-element","Hello")
nodetype=tree[0] # "word-element"
if nodetype == "word-element":
graphics.word(tree[1])
elif nodetype == "tag-element":
# <b>Strong text</b>
tagname = tree[1] # b
tagargs = tree[2] # []
subtrees = tree[3] # ...Strong Text!...
closetagname = tree[4] # b
if(tagname!=closetagname):
graphics.warning("mismatched tag")
else:
graphics.begintag(tagname,tagargs)
interpret(subtrees)
graphics.endtag()
elif nodetype == "javascript-element":
jstext = tree[1]; # "document.write(55);"
jslexer = lex.lex(module=jstokens)
jsparser = yacc.yacc(module=jsgrammar)
jstree = jsparser.parse(jstext,lexer=jslexer)
# jstree is a parse tree for JavaScript
result = jsinterp.interpret(jstree)
graphics.word(result)
|
flexible
|
{
"blob_id": "f3b3bee494493263f8b00827e6f3ff3a1dcd8c37",
"index": 6144,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef interpret(trees):\n for tree in trees:\n nodetype = tree[0]\n if nodetype == 'word-element':\n graphics.word(tree[1])\n elif nodetype == 'tag-element':\n tagname = tree[1]\n tagargs = tree[2]\n subtrees = tree[3]\n closetagname = tree[4]\n if tagname != closetagname:\n graphics.warning('mismatched tag')\n else:\n graphics.begintag(tagname, tagargs)\n interpret(subtrees)\n graphics.endtag()\n elif nodetype == 'javascript-element':\n jstext = tree[1]\n jslexer = lex.lex(module=jstokens)\n jsparser = yacc.yacc(module=jsgrammar)\n jstree = jsparser.parse(jstext, lexer=jslexer)\n result = jsinterp.interpret(jstree)\n graphics.word(result)\n",
"step-3": "import graphics\nimport ply.lex as lex\nimport ply.yacc as yacc\nimport jstokens\nimport jsgrammar\n\n\ndef interpret(trees):\n for tree in trees:\n nodetype = tree[0]\n if nodetype == 'word-element':\n graphics.word(tree[1])\n elif nodetype == 'tag-element':\n tagname = tree[1]\n tagargs = tree[2]\n subtrees = tree[3]\n closetagname = tree[4]\n if tagname != closetagname:\n graphics.warning('mismatched tag')\n else:\n graphics.begintag(tagname, tagargs)\n interpret(subtrees)\n graphics.endtag()\n elif nodetype == 'javascript-element':\n jstext = tree[1]\n jslexer = lex.lex(module=jstokens)\n jsparser = yacc.yacc(module=jsgrammar)\n jstree = jsparser.parse(jstext, lexer=jslexer)\n result = jsinterp.interpret(jstree)\n graphics.word(result)\n",
"step-4": "import graphics\nimport ply.lex as lex\nimport ply.yacc as yacc\nimport jstokens\nimport jsgrammar\n\ndef interpret(trees): # Hello, friend\n for tree in trees: # Hello,\n # (\"word-element\",\"Hello\")\n nodetype=tree[0] # \"word-element\"\n if nodetype == \"word-element\":\n graphics.word(tree[1]) \n elif nodetype == \"tag-element\":\n # <b>Strong text</b>\n tagname = tree[1] # b\n tagargs = tree[2] # []\n subtrees = tree[3] # ...Strong Text!...\n closetagname = tree[4] # b\n if(tagname!=closetagname):\n graphics.warning(\"mismatched tag\")\n else:\n graphics.begintag(tagname,tagargs)\n interpret(subtrees)\n graphics.endtag()\n elif nodetype == \"javascript-element\":\n jstext = tree[1]; # \"document.write(55);\"\n jslexer = lex.lex(module=jstokens)\n jsparser = yacc.yacc(module=jsgrammar)\n jstree = jsparser.parse(jstext,lexer=jslexer)\n # jstree is a parse tree for JavaScript\n result = jsinterp.interpret(jstree)\n graphics.word(result)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# p.85 (문자 갯수 카운팅)
message = \
'It was a bright cold day in April, and the clocks were striking thirteen.'
print(message, type(message))
msg_dict = dict() #빈 dict() 생성
for msg in message:
print(msg, message.count(msg))
msg_dict[msg] = message.count(msg)
print(msg_dict)
|
normal
|
{
"blob_id": "20671470c087719fa9ea8ffa25be55e9ade67681",
"index": 5373,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(message, type(message))\n<mask token>\nfor msg in message:\n print(msg, message.count(msg))\n msg_dict[msg] = message.count(msg)\nprint(msg_dict)\n",
"step-3": "message = (\n 'It was a bright cold day in April, and the clocks were striking thirteen.'\n )\nprint(message, type(message))\nmsg_dict = dict()\nfor msg in message:\n print(msg, message.count(msg))\n msg_dict[msg] = message.count(msg)\nprint(msg_dict)\n",
"step-4": "# p.85 (문자 갯수 카운팅)\nmessage = \\\n 'It was a bright cold day in April, and the clocks were striking thirteen.'\nprint(message, type(message))\n\nmsg_dict = dict() #빈 dict() 생성\nfor msg in message:\n print(msg, message.count(msg))\n msg_dict[msg] = message.count(msg)\n\nprint(msg_dict)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
subprocess.call(options)
subprocess.call([f'./bin/{name}'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
path = sys.argv[1]
name, ext = os.path.splitext(path)
options = ['g++', '-O3', 'src/' + path, '-o', f'./bin/{name}', '-std=c++11',
'-lgmp']
subprocess.call(options)
subprocess.call([f'./bin/{name}'])
<|reserved_special_token_1|>
import os
import sys
import subprocess
path = sys.argv[1]
name, ext = os.path.splitext(path)
options = ['g++', '-O3', 'src/' + path, '-o', f'./bin/{name}', '-std=c++11',
'-lgmp']
subprocess.call(options)
subprocess.call([f'./bin/{name}'])
<|reserved_special_token_1|>
#!/usr/bin/python3
import os
import sys
import subprocess
path = sys.argv[1]
name, ext = os.path.splitext(path)
options = ['g++',
'-O3',
'src/' + path,
'-o', f'./bin/{name}',
'-std=c++11',
'-lgmp']
subprocess.call(options)
subprocess.call([f'./bin/{name}'])
|
flexible
|
{
"blob_id": "5dd79f8ebd74099871d4367cafd83359c4f24e26",
"index": 5385,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsubprocess.call(options)\nsubprocess.call([f'./bin/{name}'])\n",
"step-3": "<mask token>\npath = sys.argv[1]\nname, ext = os.path.splitext(path)\noptions = ['g++', '-O3', 'src/' + path, '-o', f'./bin/{name}', '-std=c++11',\n '-lgmp']\nsubprocess.call(options)\nsubprocess.call([f'./bin/{name}'])\n",
"step-4": "import os\nimport sys\nimport subprocess\npath = sys.argv[1]\nname, ext = os.path.splitext(path)\noptions = ['g++', '-O3', 'src/' + path, '-o', f'./bin/{name}', '-std=c++11',\n '-lgmp']\nsubprocess.call(options)\nsubprocess.call([f'./bin/{name}'])\n",
"step-5": "#!/usr/bin/python3\n\nimport os\nimport sys\nimport subprocess\n\npath = sys.argv[1]\nname, ext = os.path.splitext(path)\noptions = ['g++',\n '-O3',\n 'src/' + path,\n '-o', f'./bin/{name}',\n '-std=c++11',\n '-lgmp']\nsubprocess.call(options)\nsubprocess.call([f'./bin/{name}'])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
a=[1,2,3,4,5]
max=0
for i in a:
if i>=max:
max=i
elif i<=min:
min=i
print max
print min
|
normal
|
{
"blob_id": "65da68d33aa382ed6deeff3c66a063ee299c2567",
"index": 1448,
"step-1": "a=[1,2,3,4,5]\nmax=0\nfor i in a:\n\tif i>=max:\n\t\tmax=i\n\telif i<=min:\n\t\tmin=i\nprint max\nprint min\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
'''
'''
import numpy as np
from scipy.spatial import distance
def synonym_filter(WordVectors_npArray, WordLabels_npArray):
'''
'''
pass
def synonym_alternatives_range(WordVectors_npArray,
AlternativesVectorOne_npArray,
AlternativesVectorTwo_npArray,
AlternativesVectorThree_npArray,
AlternativesVectorFour_npArray):
'''
'''
synonym_alternatives_range = np.zeros(len(WordVectors_npArray))
for word_int in range(len(WordVectors_npArray)):
DistToAltOne = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorOne_npArray[word_int,:])
print(DistToAltOne)
DistToAltTwo = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorTwo_npArray[word_int,:])
print(DistToAltTwo)
DistToAltThree = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorThree_npArray[word_int,:])
print(DistToAltThree)
DistToAltFour = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorFour_npArray[word_int,:])
print(DistToAltFour)
synonym_alternatives_range[word_int] = (max(DistToAltOne, \
DistToAltTwo, DistToAltThree, DistToAltFour) - min(DistToAltOne, \
DistToAltTwo, DistToAltThree, DistToAltFour))
return synonym_alternatives_range
def synonym_alternatives_average(WordVectors_npArray,
AlternativesVectorOne_npArray,
AlternativesVectorTwo_npArray,
AlternativesVectorThree_npArray,
AlternativesVectorFour_npArray):
'''
'''
synonym_alternatives_average = np.zeros(len(WordVectors_npArray))
for word_int in range(len(WordVectors_npArray)):
DistToAltOne = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorOne_npArray[word_int,:])
print(DistToAltOne)
DistToAltTwo = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorTwo_npArray[word_int,:])
print(DistToAltTwo)
DistToAltThree = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorThree_npArray[word_int,:])
print(DistToAltThree)
DistToAltFour = distance.cosine(WordVectors_npArray[word_int,:], \
AlternativesVectorFour_npArray[word_int,:])
print(DistToAltFour)
synonym_alternatives_average[word_int] = (DistToAltOne +\
DistToAltTwo + DistToAltThree + DistToAltFour)/4
return synonym_alternatives_average
def nth_neighbor_filter():
''' Maybe we won't have this.
'''
pass
|
normal
|
{
"blob_id": "ea0a59953f2571f36e65f8f958774074b39a9ae5",
"index": 6996,
"step-1": "<mask token>\n\n\ndef synonym_alternatives_range(WordVectors_npArray,\n AlternativesVectorOne_npArray, AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray, AlternativesVectorFour_npArray):\n \"\"\"\n \"\"\"\n synonym_alternatives_range = np.zeros(len(WordVectors_npArray))\n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorOne_npArray[word_int, :])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorTwo_npArray[word_int, :])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorThree_npArray[word_int, :])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorFour_npArray[word_int, :])\n print(DistToAltFour)\n synonym_alternatives_range[word_int] = max(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour) - min(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour)\n return synonym_alternatives_range\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef synonym_alternatives_range(WordVectors_npArray,\n AlternativesVectorOne_npArray, AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray, AlternativesVectorFour_npArray):\n \"\"\"\n \"\"\"\n synonym_alternatives_range = np.zeros(len(WordVectors_npArray))\n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorOne_npArray[word_int, :])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorTwo_npArray[word_int, :])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorThree_npArray[word_int, :])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorFour_npArray[word_int, :])\n print(DistToAltFour)\n synonym_alternatives_range[word_int] = max(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour) - min(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour)\n return synonym_alternatives_range\n\n\ndef synonym_alternatives_average(WordVectors_npArray,\n AlternativesVectorOne_npArray, AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray, AlternativesVectorFour_npArray):\n \"\"\"\n \"\"\"\n synonym_alternatives_average = np.zeros(len(WordVectors_npArray))\n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorOne_npArray[word_int, :])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorTwo_npArray[word_int, :])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorThree_npArray[word_int, :])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorFour_npArray[word_int, :])\n print(DistToAltFour)\n synonym_alternatives_average[word_int] = (DistToAltOne +\n DistToAltTwo + DistToAltThree + DistToAltFour) / 4\n return synonym_alternatives_average\n\n\ndef nth_neighbor_filter():\n \"\"\" Maybe we won't have this.\n \"\"\"\n pass\n",
"step-3": "<mask token>\n\n\ndef synonym_filter(WordVectors_npArray, WordLabels_npArray):\n \"\"\"\n \"\"\"\n pass\n\n\ndef synonym_alternatives_range(WordVectors_npArray,\n AlternativesVectorOne_npArray, AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray, AlternativesVectorFour_npArray):\n \"\"\"\n \"\"\"\n synonym_alternatives_range = np.zeros(len(WordVectors_npArray))\n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorOne_npArray[word_int, :])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorTwo_npArray[word_int, :])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorThree_npArray[word_int, :])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorFour_npArray[word_int, :])\n print(DistToAltFour)\n synonym_alternatives_range[word_int] = max(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour) - min(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour)\n return synonym_alternatives_range\n\n\ndef synonym_alternatives_average(WordVectors_npArray,\n AlternativesVectorOne_npArray, AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray, AlternativesVectorFour_npArray):\n \"\"\"\n \"\"\"\n synonym_alternatives_average = np.zeros(len(WordVectors_npArray))\n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorOne_npArray[word_int, :])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorTwo_npArray[word_int, :])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorThree_npArray[word_int, :])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorFour_npArray[word_int, :])\n print(DistToAltFour)\n synonym_alternatives_average[word_int] = (DistToAltOne +\n DistToAltTwo + DistToAltThree + DistToAltFour) / 4\n return synonym_alternatives_average\n\n\ndef nth_neighbor_filter():\n \"\"\" Maybe we won't have this.\n \"\"\"\n pass\n",
"step-4": "<mask token>\nimport numpy as np\nfrom scipy.spatial import distance\n\n\ndef synonym_filter(WordVectors_npArray, WordLabels_npArray):\n \"\"\"\n \"\"\"\n pass\n\n\ndef synonym_alternatives_range(WordVectors_npArray,\n AlternativesVectorOne_npArray, AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray, AlternativesVectorFour_npArray):\n \"\"\"\n \"\"\"\n synonym_alternatives_range = np.zeros(len(WordVectors_npArray))\n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorOne_npArray[word_int, :])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorTwo_npArray[word_int, :])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorThree_npArray[word_int, :])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorFour_npArray[word_int, :])\n print(DistToAltFour)\n synonym_alternatives_range[word_int] = max(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour) - min(DistToAltOne,\n DistToAltTwo, DistToAltThree, DistToAltFour)\n return synonym_alternatives_range\n\n\ndef synonym_alternatives_average(WordVectors_npArray,\n AlternativesVectorOne_npArray, AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray, AlternativesVectorFour_npArray):\n \"\"\"\n \"\"\"\n synonym_alternatives_average = np.zeros(len(WordVectors_npArray))\n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorOne_npArray[word_int, :])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorTwo_npArray[word_int, :])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorThree_npArray[word_int, :])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int, :],\n AlternativesVectorFour_npArray[word_int, :])\n print(DistToAltFour)\n synonym_alternatives_average[word_int] = (DistToAltOne +\n DistToAltTwo + DistToAltThree + DistToAltFour) / 4\n return synonym_alternatives_average\n\n\ndef nth_neighbor_filter():\n \"\"\" Maybe we won't have this.\n \"\"\"\n pass\n",
"step-5": "'''\n'''\n\nimport numpy as np\n\nfrom scipy.spatial import distance\n\n\ndef synonym_filter(WordVectors_npArray, WordLabels_npArray):\n '''\n '''\n \n \n pass\n\ndef synonym_alternatives_range(WordVectors_npArray, \n AlternativesVectorOne_npArray,\n AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray,\n AlternativesVectorFour_npArray):\n '''\n '''\n \n \n synonym_alternatives_range = np.zeros(len(WordVectors_npArray))\n \n for word_int in range(len(WordVectors_npArray)):\n \n DistToAltOne = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorOne_npArray[word_int,:])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorTwo_npArray[word_int,:])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorThree_npArray[word_int,:])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorFour_npArray[word_int,:])\n print(DistToAltFour)\n \n synonym_alternatives_range[word_int] = (max(DistToAltOne, \\\n DistToAltTwo, DistToAltThree, DistToAltFour) - min(DistToAltOne, \\\n DistToAltTwo, DistToAltThree, DistToAltFour))\n \n \n return synonym_alternatives_range\n \ndef synonym_alternatives_average(WordVectors_npArray, \n AlternativesVectorOne_npArray,\n AlternativesVectorTwo_npArray,\n AlternativesVectorThree_npArray,\n AlternativesVectorFour_npArray):\n '''\n '''\n \n \n synonym_alternatives_average = np.zeros(len(WordVectors_npArray))\n \n for word_int in range(len(WordVectors_npArray)):\n DistToAltOne = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorOne_npArray[word_int,:])\n print(DistToAltOne)\n DistToAltTwo = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorTwo_npArray[word_int,:])\n print(DistToAltTwo)\n DistToAltThree = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorThree_npArray[word_int,:])\n print(DistToAltThree)\n DistToAltFour = distance.cosine(WordVectors_npArray[word_int,:], \\\n AlternativesVectorFour_npArray[word_int,:])\n print(DistToAltFour)\n \n synonym_alternatives_average[word_int] = (DistToAltOne +\\\n DistToAltTwo + DistToAltThree + DistToAltFour)/4\n \n return synonym_alternatives_average\n \n \n\ndef nth_neighbor_filter():\n ''' Maybe we won't have this.\n '''\n \n \n pass\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def log_all_ships(myMap):
logging.debug('Logging all ships:')
for ship_id, ship in myMap.data_ships[myMap.my_id].items():
logging.debug('ship_id: {}'.format(ship_id))
for k, v in ship.items():
logging.debug(' {}: {}'.format(k, v))
def log_all_planets(myMap):
logging.debug('Logging all planets:')
for planet_id, dict in myMap.data_planets.items():
logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))
<|reserved_special_token_0|>
def log_myMap_planets(myMap):
logging.debug('------myMap Planets------')
logging.debug('Planets (mine): {}'.format(myMap.planets_owned))
logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))
logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def log_all_ships(myMap):
logging.debug('Logging all ships:')
for ship_id, ship in myMap.data_ships[myMap.my_id].items():
logging.debug('ship_id: {}'.format(ship_id))
for k, v in ship.items():
logging.debug(' {}: {}'.format(k, v))
def log_all_planets(myMap):
logging.debug('Logging all planets:')
for planet_id, dict in myMap.data_planets.items():
logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))
def log_myMap_ships(myMap):
logging.debug('------myMap Ships------')
logging.debug('Ships (enemy): {}'.format(myMap.ships_enemy))
logging.debug('Ships (mine): {}'.format(myMap.ships_owned))
logging.debug('Ships (new): {}'.format(myMap.ships_new))
logging.debug('Ships (died): {}'.format(myMap.ships_died))
logging.debug('Ships (mining) (mine): {}'.format(myMap.ships_mining_ally))
logging.debug('Ships (mining) (enemy): {}'.format(myMap.ships_mining_enemy)
)
logging.debug('Ships (attacking_frontline): {}'.format(myMap.
ships_attacking_frontline))
logging.debug('Ships (attacking): {}'.format(myMap.ships_attacking))
logging.debug('Ships (evading): {}'.format(myMap.ships_evading))
logging.debug('Ships (supporting): {}'.format(myMap.ships_supporting))
logging.debug('Ships (defending): {}'.format(myMap.ships_defending))
logging.debug('Ships (expanding): {}'.format(myMap.ships_expanding))
logging.debug('Ships (running): {}'.format(myMap.ships_running))
logging.debug('Ships (sniping): {}'.format(myMap.ships_sniping))
logging.debug('Ships (battling): {}'.format(myMap.ships_battling))
def log_myMap_planets(myMap):
logging.debug('------myMap Planets------')
logging.debug('Planets (mine): {}'.format(myMap.planets_owned))
logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))
logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))
<|reserved_special_token_0|>
def log_dimensions(game_map):
logging.debug('Width: {} x Height: {}'.format(game_map.width, game_map.
height))
logging.debug(' ')
<|reserved_special_token_0|>
def log_numPlayers(game_map):
logging.debug('Number of players: {}'.format(len(game_map._players)))
logging.debug(' ')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def log_all_ships(myMap):
logging.debug('Logging all ships:')
for ship_id, ship in myMap.data_ships[myMap.my_id].items():
logging.debug('ship_id: {}'.format(ship_id))
for k, v in ship.items():
logging.debug(' {}: {}'.format(k, v))
def log_all_planets(myMap):
logging.debug('Logging all planets:')
for planet_id, dict in myMap.data_planets.items():
logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))
def log_myMap_ships(myMap):
logging.debug('------myMap Ships------')
logging.debug('Ships (enemy): {}'.format(myMap.ships_enemy))
logging.debug('Ships (mine): {}'.format(myMap.ships_owned))
logging.debug('Ships (new): {}'.format(myMap.ships_new))
logging.debug('Ships (died): {}'.format(myMap.ships_died))
logging.debug('Ships (mining) (mine): {}'.format(myMap.ships_mining_ally))
logging.debug('Ships (mining) (enemy): {}'.format(myMap.ships_mining_enemy)
)
logging.debug('Ships (attacking_frontline): {}'.format(myMap.
ships_attacking_frontline))
logging.debug('Ships (attacking): {}'.format(myMap.ships_attacking))
logging.debug('Ships (evading): {}'.format(myMap.ships_evading))
logging.debug('Ships (supporting): {}'.format(myMap.ships_supporting))
logging.debug('Ships (defending): {}'.format(myMap.ships_defending))
logging.debug('Ships (expanding): {}'.format(myMap.ships_expanding))
logging.debug('Ships (running): {}'.format(myMap.ships_running))
logging.debug('Ships (sniping): {}'.format(myMap.ships_sniping))
logging.debug('Ships (battling): {}'.format(myMap.ships_battling))
def log_myMap_planets(myMap):
logging.debug('------myMap Planets------')
logging.debug('Planets (mine): {}'.format(myMap.planets_owned))
logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))
logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))
def log_myShip(ship):
logging.debug('My ship id: {}, x: {}, y: {}'.format(ship.id, ship.x,
ship.y))
logging.debug(' ')
def log_dimensions(game_map):
logging.debug('Width: {} x Height: {}'.format(game_map.width, game_map.
height))
logging.debug(' ')
<|reserved_special_token_0|>
def log_numPlayers(game_map):
logging.debug('Number of players: {}'.format(len(game_map._players)))
logging.debug(' ')
<|reserved_special_token_1|>
import logging
import datetime
import numpy as np
def log_players(game_map):
logging.debug('------Players Info------')
for player in game_map.all_players():
logging.debug('-----Player ID: {}-----'.format(player.id))
for ship in player.all_ships():
logging.debug('----Ship ID: {}----'.format(ship.id))
logging.debug('X: {}'.format(ship.x))
logging.debug('Y: {}'.format(ship.y))
logging.debug('Health: {}'.format(ship.health))
logging.debug('Docking status: {}'.format(ship.docking_status))
logging.debug(' ')
def log_planets(game_map):
logging.debug('------Planet Info------')
for planet in game_map.all_planets():
logging.debug('----Planet Id: {}----'.format(planet.id))
logging.debug('X: {}'.format(planet.x))
logging.debug('Y: {}'.format(planet.y))
logging.debug('Num of docking spots: {}'.format(planet.
num_docking_spots))
logging.debug('Current production: {}'.format(planet.
current_production))
logging.debug('docked_ship_ids: {}'.format(planet._docked_ship_ids))
logging.debug('Health: {}'.format(planet.health))
logging.debug('Radius: {}'.format(planet.radius))
logging.debug('Owner: {}'.format(planet.owner))
logging.debug('Owned: {}'.format(planet.is_owned()))
logging.debug(' ')
def log_all_ships(myMap):
logging.debug('Logging all ships:')
for ship_id, ship in myMap.data_ships[myMap.my_id].items():
logging.debug('ship_id: {}'.format(ship_id))
for k, v in ship.items():
logging.debug(' {}: {}'.format(k, v))
def log_all_planets(myMap):
logging.debug('Logging all planets:')
for planet_id, dict in myMap.data_planets.items():
logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))
def log_myMap_ships(myMap):
logging.debug('------myMap Ships------')
logging.debug('Ships (enemy): {}'.format(myMap.ships_enemy))
logging.debug('Ships (mine): {}'.format(myMap.ships_owned))
logging.debug('Ships (new): {}'.format(myMap.ships_new))
logging.debug('Ships (died): {}'.format(myMap.ships_died))
logging.debug('Ships (mining) (mine): {}'.format(myMap.ships_mining_ally))
logging.debug('Ships (mining) (enemy): {}'.format(myMap.ships_mining_enemy)
)
logging.debug('Ships (attacking_frontline): {}'.format(myMap.
ships_attacking_frontline))
logging.debug('Ships (attacking): {}'.format(myMap.ships_attacking))
logging.debug('Ships (evading): {}'.format(myMap.ships_evading))
logging.debug('Ships (supporting): {}'.format(myMap.ships_supporting))
logging.debug('Ships (defending): {}'.format(myMap.ships_defending))
logging.debug('Ships (expanding): {}'.format(myMap.ships_expanding))
logging.debug('Ships (running): {}'.format(myMap.ships_running))
logging.debug('Ships (sniping): {}'.format(myMap.ships_sniping))
logging.debug('Ships (battling): {}'.format(myMap.ships_battling))
def log_myMap_planets(myMap):
logging.debug('------myMap Planets------')
logging.debug('Planets (mine): {}'.format(myMap.planets_owned))
logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))
logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))
def log_myShip(ship):
logging.debug('My ship id: {}, x: {}, y: {}'.format(ship.id, ship.x,
ship.y))
logging.debug(' ')
def log_dimensions(game_map):
logging.debug('Width: {} x Height: {}'.format(game_map.width, game_map.
height))
logging.debug(' ')
def log_myID(game_map):
logging.debug('My ID: {}'.format(game_map.my_id))
logging.debug(' ')
def log_numPlayers(game_map):
logging.debug('Number of players: {}'.format(len(game_map._players)))
logging.debug(' ')
<|reserved_special_token_1|>
import logging
import datetime
import numpy as np
def log_players(game_map):
logging.debug("------Players Info------")
for player in game_map.all_players():
logging.debug("-----Player ID: {}-----".format(player.id))
for ship in player.all_ships():
logging.debug("----Ship ID: {}----".format(ship.id))
logging.debug("X: {}".format(ship.x))
logging.debug("Y: {}".format(ship.y))
logging.debug("Health: {}".format(ship.health))
logging.debug("Docking status: {}".format(ship.docking_status)) ## UNDOCKED, DOCKED, DOCKING, UNDOCKING
logging.debug(" ")
def log_planets(game_map):
logging.debug("------Planet Info------")
for planet in game_map.all_planets():
logging.debug("----Planet Id: {}----".format(planet.id))
logging.debug("X: {}".format(planet.x))
logging.debug("Y: {}".format(planet.y))
logging.debug("Num of docking spots: {}".format(planet.num_docking_spots))
logging.debug("Current production: {}".format(planet.current_production))
logging.debug("docked_ship_ids: {}".format(planet._docked_ship_ids))
logging.debug("Health: {}".format(planet.health))
logging.debug("Radius: {}".format(planet.radius))
logging.debug("Owner: {}".format(planet.owner))
logging.debug("Owned: {}".format(planet.is_owned()))
logging.debug(" ")
def log_all_ships(myMap):
logging.debug("Logging all ships:")
# for player_id, dict in myMap.data_ships.items():
# logging.debug("Player id: {}".format(player_id))
# for ship_id, ship in dict.items():
# #logging.debug("ship_id: {} with data:{}".format(ship_id,ship))
# logging.debug("ship_id: {}".format(ship_id))
# for k,v in ship.items():
# logging.debug(" {}: {}".format(k,v))
for ship_id, ship in myMap.data_ships[myMap.my_id].items():
logging.debug("ship_id: {}".format(ship_id))
for k, v in ship.items():
logging.debug(" {}: {}".format(k,v))
def log_all_planets(myMap):
logging.debug("Logging all planets:")
for planet_id, dict in myMap.data_planets.items():
logging.debug("Planet id: {} with data: {}".format(planet_id, dict))
def log_myMap_ships(myMap):
logging.debug("------myMap Ships------")
logging.debug("Ships (enemy): {}".format(myMap.ships_enemy))
logging.debug("Ships (mine): {}".format(myMap.ships_owned))
logging.debug("Ships (new): {}".format(myMap.ships_new))
logging.debug("Ships (died): {}".format(myMap.ships_died))
logging.debug("Ships (mining) (mine): {}".format(myMap.ships_mining_ally))
logging.debug("Ships (mining) (enemy): {}".format(myMap.ships_mining_enemy))
logging.debug("Ships (attacking_frontline): {}".format(myMap.ships_attacking_frontline))
logging.debug("Ships (attacking): {}".format(myMap.ships_attacking))
logging.debug("Ships (evading): {}".format(myMap.ships_evading))
logging.debug("Ships (supporting): {}".format(myMap.ships_supporting))
logging.debug("Ships (defending): {}".format(myMap.ships_defending))
logging.debug("Ships (expanding): {}".format(myMap.ships_expanding))
logging.debug("Ships (running): {}".format(myMap.ships_running))
logging.debug("Ships (sniping): {}".format(myMap.ships_sniping))
logging.debug("Ships (battling): {}".format(myMap.ships_battling))
def log_myMap_planets(myMap):
logging.debug("------myMap Planets------")
logging.debug("Planets (mine): {}".format(myMap.planets_owned))
logging.debug("Planets (enemy): {}".format(myMap.planets_enemy))
logging.debug("Planets (unowned): {}".format(myMap.planets_unowned))
def log_myShip(ship):
logging.debug("My ship id: {}, x: {}, y: {}".format(ship.id, ship.x, ship.y))
logging.debug(" ")
def log_dimensions(game_map):
logging.debug("Width: {} x Height: {}".format(game_map.width,game_map.height))
logging.debug(" ")
def log_myID(game_map):
logging.debug("My ID: {}".format(game_map.my_id))
logging.debug(" ")
def log_numPlayers(game_map):
logging.debug("Number of players: {}".format(len(game_map._players)))
logging.debug(" ")
|
flexible
|
{
"blob_id": "879bb8d67c0e1e8b125ac5994fcb142e3366c9d8",
"index": 9094,
"step-1": "<mask token>\n\n\ndef log_all_ships(myMap):\n logging.debug('Logging all ships:')\n for ship_id, ship in myMap.data_ships[myMap.my_id].items():\n logging.debug('ship_id: {}'.format(ship_id))\n for k, v in ship.items():\n logging.debug(' {}: {}'.format(k, v))\n\n\ndef log_all_planets(myMap):\n logging.debug('Logging all planets:')\n for planet_id, dict in myMap.data_planets.items():\n logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))\n\n\n<mask token>\n\n\ndef log_myMap_planets(myMap):\n logging.debug('------myMap Planets------')\n logging.debug('Planets (mine): {}'.format(myMap.planets_owned))\n logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))\n logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef log_all_ships(myMap):\n logging.debug('Logging all ships:')\n for ship_id, ship in myMap.data_ships[myMap.my_id].items():\n logging.debug('ship_id: {}'.format(ship_id))\n for k, v in ship.items():\n logging.debug(' {}: {}'.format(k, v))\n\n\ndef log_all_planets(myMap):\n logging.debug('Logging all planets:')\n for planet_id, dict in myMap.data_planets.items():\n logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))\n\n\ndef log_myMap_ships(myMap):\n logging.debug('------myMap Ships------')\n logging.debug('Ships (enemy): {}'.format(myMap.ships_enemy))\n logging.debug('Ships (mine): {}'.format(myMap.ships_owned))\n logging.debug('Ships (new): {}'.format(myMap.ships_new))\n logging.debug('Ships (died): {}'.format(myMap.ships_died))\n logging.debug('Ships (mining) (mine): {}'.format(myMap.ships_mining_ally))\n logging.debug('Ships (mining) (enemy): {}'.format(myMap.ships_mining_enemy)\n )\n logging.debug('Ships (attacking_frontline): {}'.format(myMap.\n ships_attacking_frontline))\n logging.debug('Ships (attacking): {}'.format(myMap.ships_attacking))\n logging.debug('Ships (evading): {}'.format(myMap.ships_evading))\n logging.debug('Ships (supporting): {}'.format(myMap.ships_supporting))\n logging.debug('Ships (defending): {}'.format(myMap.ships_defending))\n logging.debug('Ships (expanding): {}'.format(myMap.ships_expanding))\n logging.debug('Ships (running): {}'.format(myMap.ships_running))\n logging.debug('Ships (sniping): {}'.format(myMap.ships_sniping))\n logging.debug('Ships (battling): {}'.format(myMap.ships_battling))\n\n\ndef log_myMap_planets(myMap):\n logging.debug('------myMap Planets------')\n logging.debug('Planets (mine): {}'.format(myMap.planets_owned))\n logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))\n logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))\n\n\n<mask token>\n\n\ndef log_dimensions(game_map):\n logging.debug('Width: {} x Height: {}'.format(game_map.width, game_map.\n height))\n logging.debug(' ')\n\n\n<mask token>\n\n\ndef log_numPlayers(game_map):\n logging.debug('Number of players: {}'.format(len(game_map._players)))\n logging.debug(' ')\n",
"step-3": "<mask token>\n\n\ndef log_all_ships(myMap):\n logging.debug('Logging all ships:')\n for ship_id, ship in myMap.data_ships[myMap.my_id].items():\n logging.debug('ship_id: {}'.format(ship_id))\n for k, v in ship.items():\n logging.debug(' {}: {}'.format(k, v))\n\n\ndef log_all_planets(myMap):\n logging.debug('Logging all planets:')\n for planet_id, dict in myMap.data_planets.items():\n logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))\n\n\ndef log_myMap_ships(myMap):\n logging.debug('------myMap Ships------')\n logging.debug('Ships (enemy): {}'.format(myMap.ships_enemy))\n logging.debug('Ships (mine): {}'.format(myMap.ships_owned))\n logging.debug('Ships (new): {}'.format(myMap.ships_new))\n logging.debug('Ships (died): {}'.format(myMap.ships_died))\n logging.debug('Ships (mining) (mine): {}'.format(myMap.ships_mining_ally))\n logging.debug('Ships (mining) (enemy): {}'.format(myMap.ships_mining_enemy)\n )\n logging.debug('Ships (attacking_frontline): {}'.format(myMap.\n ships_attacking_frontline))\n logging.debug('Ships (attacking): {}'.format(myMap.ships_attacking))\n logging.debug('Ships (evading): {}'.format(myMap.ships_evading))\n logging.debug('Ships (supporting): {}'.format(myMap.ships_supporting))\n logging.debug('Ships (defending): {}'.format(myMap.ships_defending))\n logging.debug('Ships (expanding): {}'.format(myMap.ships_expanding))\n logging.debug('Ships (running): {}'.format(myMap.ships_running))\n logging.debug('Ships (sniping): {}'.format(myMap.ships_sniping))\n logging.debug('Ships (battling): {}'.format(myMap.ships_battling))\n\n\ndef log_myMap_planets(myMap):\n logging.debug('------myMap Planets------')\n logging.debug('Planets (mine): {}'.format(myMap.planets_owned))\n logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))\n logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))\n\n\ndef log_myShip(ship):\n logging.debug('My ship id: {}, x: {}, y: {}'.format(ship.id, ship.x,\n ship.y))\n logging.debug(' ')\n\n\ndef log_dimensions(game_map):\n logging.debug('Width: {} x Height: {}'.format(game_map.width, game_map.\n height))\n logging.debug(' ')\n\n\n<mask token>\n\n\ndef log_numPlayers(game_map):\n logging.debug('Number of players: {}'.format(len(game_map._players)))\n logging.debug(' ')\n",
"step-4": "import logging\nimport datetime\nimport numpy as np\n\n\ndef log_players(game_map):\n logging.debug('------Players Info------')\n for player in game_map.all_players():\n logging.debug('-----Player ID: {}-----'.format(player.id))\n for ship in player.all_ships():\n logging.debug('----Ship ID: {}----'.format(ship.id))\n logging.debug('X: {}'.format(ship.x))\n logging.debug('Y: {}'.format(ship.y))\n logging.debug('Health: {}'.format(ship.health))\n logging.debug('Docking status: {}'.format(ship.docking_status))\n logging.debug(' ')\n\n\ndef log_planets(game_map):\n logging.debug('------Planet Info------')\n for planet in game_map.all_planets():\n logging.debug('----Planet Id: {}----'.format(planet.id))\n logging.debug('X: {}'.format(planet.x))\n logging.debug('Y: {}'.format(planet.y))\n logging.debug('Num of docking spots: {}'.format(planet.\n num_docking_spots))\n logging.debug('Current production: {}'.format(planet.\n current_production))\n logging.debug('docked_ship_ids: {}'.format(planet._docked_ship_ids))\n logging.debug('Health: {}'.format(planet.health))\n logging.debug('Radius: {}'.format(planet.radius))\n logging.debug('Owner: {}'.format(planet.owner))\n logging.debug('Owned: {}'.format(planet.is_owned()))\n logging.debug(' ')\n\n\ndef log_all_ships(myMap):\n logging.debug('Logging all ships:')\n for ship_id, ship in myMap.data_ships[myMap.my_id].items():\n logging.debug('ship_id: {}'.format(ship_id))\n for k, v in ship.items():\n logging.debug(' {}: {}'.format(k, v))\n\n\ndef log_all_planets(myMap):\n logging.debug('Logging all planets:')\n for planet_id, dict in myMap.data_planets.items():\n logging.debug('Planet id: {} with data: {}'.format(planet_id, dict))\n\n\ndef log_myMap_ships(myMap):\n logging.debug('------myMap Ships------')\n logging.debug('Ships (enemy): {}'.format(myMap.ships_enemy))\n logging.debug('Ships (mine): {}'.format(myMap.ships_owned))\n logging.debug('Ships (new): {}'.format(myMap.ships_new))\n logging.debug('Ships (died): {}'.format(myMap.ships_died))\n logging.debug('Ships (mining) (mine): {}'.format(myMap.ships_mining_ally))\n logging.debug('Ships (mining) (enemy): {}'.format(myMap.ships_mining_enemy)\n )\n logging.debug('Ships (attacking_frontline): {}'.format(myMap.\n ships_attacking_frontline))\n logging.debug('Ships (attacking): {}'.format(myMap.ships_attacking))\n logging.debug('Ships (evading): {}'.format(myMap.ships_evading))\n logging.debug('Ships (supporting): {}'.format(myMap.ships_supporting))\n logging.debug('Ships (defending): {}'.format(myMap.ships_defending))\n logging.debug('Ships (expanding): {}'.format(myMap.ships_expanding))\n logging.debug('Ships (running): {}'.format(myMap.ships_running))\n logging.debug('Ships (sniping): {}'.format(myMap.ships_sniping))\n logging.debug('Ships (battling): {}'.format(myMap.ships_battling))\n\n\ndef log_myMap_planets(myMap):\n logging.debug('------myMap Planets------')\n logging.debug('Planets (mine): {}'.format(myMap.planets_owned))\n logging.debug('Planets (enemy): {}'.format(myMap.planets_enemy))\n logging.debug('Planets (unowned): {}'.format(myMap.planets_unowned))\n\n\ndef log_myShip(ship):\n logging.debug('My ship id: {}, x: {}, y: {}'.format(ship.id, ship.x,\n ship.y))\n logging.debug(' ')\n\n\ndef log_dimensions(game_map):\n logging.debug('Width: {} x Height: {}'.format(game_map.width, game_map.\n height))\n logging.debug(' ')\n\n\ndef log_myID(game_map):\n logging.debug('My ID: {}'.format(game_map.my_id))\n logging.debug(' ')\n\n\ndef log_numPlayers(game_map):\n logging.debug('Number of players: {}'.format(len(game_map._players)))\n logging.debug(' ')\n",
"step-5": "import logging\nimport datetime\nimport numpy as np\n\ndef log_players(game_map):\n logging.debug(\"------Players Info------\")\n for player in game_map.all_players():\n logging.debug(\"-----Player ID: {}-----\".format(player.id))\n for ship in player.all_ships():\n logging.debug(\"----Ship ID: {}----\".format(ship.id))\n logging.debug(\"X: {}\".format(ship.x))\n logging.debug(\"Y: {}\".format(ship.y))\n logging.debug(\"Health: {}\".format(ship.health))\n logging.debug(\"Docking status: {}\".format(ship.docking_status)) ## UNDOCKED, DOCKED, DOCKING, UNDOCKING\n logging.debug(\" \")\n\ndef log_planets(game_map):\n logging.debug(\"------Planet Info------\")\n for planet in game_map.all_planets():\n logging.debug(\"----Planet Id: {}----\".format(planet.id))\n logging.debug(\"X: {}\".format(planet.x))\n logging.debug(\"Y: {}\".format(planet.y))\n logging.debug(\"Num of docking spots: {}\".format(planet.num_docking_spots))\n logging.debug(\"Current production: {}\".format(planet.current_production))\n logging.debug(\"docked_ship_ids: {}\".format(planet._docked_ship_ids))\n logging.debug(\"Health: {}\".format(planet.health))\n logging.debug(\"Radius: {}\".format(planet.radius))\n logging.debug(\"Owner: {}\".format(planet.owner))\n logging.debug(\"Owned: {}\".format(planet.is_owned()))\n\n logging.debug(\" \")\n\ndef log_all_ships(myMap):\n logging.debug(\"Logging all ships:\")\n # for player_id, dict in myMap.data_ships.items():\n # logging.debug(\"Player id: {}\".format(player_id))\n # for ship_id, ship in dict.items():\n # #logging.debug(\"ship_id: {} with data:{}\".format(ship_id,ship))\n # logging.debug(\"ship_id: {}\".format(ship_id))\n # for k,v in ship.items():\n # logging.debug(\" {}: {}\".format(k,v))\n for ship_id, ship in myMap.data_ships[myMap.my_id].items():\n logging.debug(\"ship_id: {}\".format(ship_id))\n for k, v in ship.items():\n logging.debug(\" {}: {}\".format(k,v))\n\n\ndef log_all_planets(myMap):\n logging.debug(\"Logging all planets:\")\n for planet_id, dict in myMap.data_planets.items():\n logging.debug(\"Planet id: {} with data: {}\".format(planet_id, dict))\n\ndef log_myMap_ships(myMap):\n logging.debug(\"------myMap Ships------\")\n logging.debug(\"Ships (enemy): {}\".format(myMap.ships_enemy))\n logging.debug(\"Ships (mine): {}\".format(myMap.ships_owned))\n logging.debug(\"Ships (new): {}\".format(myMap.ships_new))\n logging.debug(\"Ships (died): {}\".format(myMap.ships_died))\n logging.debug(\"Ships (mining) (mine): {}\".format(myMap.ships_mining_ally))\n logging.debug(\"Ships (mining) (enemy): {}\".format(myMap.ships_mining_enemy))\n logging.debug(\"Ships (attacking_frontline): {}\".format(myMap.ships_attacking_frontline))\n logging.debug(\"Ships (attacking): {}\".format(myMap.ships_attacking))\n logging.debug(\"Ships (evading): {}\".format(myMap.ships_evading))\n logging.debug(\"Ships (supporting): {}\".format(myMap.ships_supporting))\n logging.debug(\"Ships (defending): {}\".format(myMap.ships_defending))\n logging.debug(\"Ships (expanding): {}\".format(myMap.ships_expanding))\n logging.debug(\"Ships (running): {}\".format(myMap.ships_running))\n logging.debug(\"Ships (sniping): {}\".format(myMap.ships_sniping))\n logging.debug(\"Ships (battling): {}\".format(myMap.ships_battling))\n\ndef log_myMap_planets(myMap):\n logging.debug(\"------myMap Planets------\")\n logging.debug(\"Planets (mine): {}\".format(myMap.planets_owned))\n logging.debug(\"Planets (enemy): {}\".format(myMap.planets_enemy))\n logging.debug(\"Planets (unowned): {}\".format(myMap.planets_unowned))\n\ndef log_myShip(ship):\n logging.debug(\"My ship id: {}, x: {}, y: {}\".format(ship.id, ship.x, ship.y))\n logging.debug(\" \")\n\ndef log_dimensions(game_map):\n logging.debug(\"Width: {} x Height: {}\".format(game_map.width,game_map.height))\n logging.debug(\" \")\n\ndef log_myID(game_map):\n logging.debug(\"My ID: {}\".format(game_map.my_id))\n logging.debug(\" \")\n\ndef log_numPlayers(game_map):\n logging.debug(\"Number of players: {}\".format(len(game_map._players)))\n logging.debug(\" \")\n\n\n\n\n\n\n",
"step-ids": [
3,
6,
7,
11,
12
]
}
|
[
3,
6,
7,
11,
12
] |
import os
import mysql.connector
import time
from flask import Flask, render_template
app = Flask(__name__)
def dbconnect():
return mysql.connector.connect(user= , password= , host="mysqlshereen.mysql.database.azure.com", port=3306, database='test')
@app.route('/result', methods=['POST', 'GET'])
def query():
start_time = time.time()
display = []
conn=dbconnect()
curr=conn.cursor()
curr.execute("""
UPDATE TABLE SET columnName = null WHERE YourCondition
delete from FOOD where DIGITS >900;""")
sql=curr.fetchall()
for row in sql:
tuple = (row[0], row[1], row[3])
display.append(tuple)
end_time = time.time()
total_time = end_time - start_time
print("final time:", total_time)
display.append(total_time)
curr.close()
conn.close()
return render_template('display.html', display=display)
@app.route('/download', methods=['POST', 'GET'])
def download():
list = []
if request.method == 'POST':
mytext = request.form['text1']
mytext1 = request.form['text2']
conn = dbconnect()
curr = conn.cursor()
r1=int(mytext)
r2 = int(mytext1)
curr.execute('select DIGITS,CATEGORY from food DIGITS ">"' +r1+'DIGITS"<"'+r2)
sql = curr.fetchall()
#curr.execute('select PICTURE from FOOD')
data = curr.fetchone()[0]
for row in data:
with open('/home/shereen/quiz8/static/'+name+'.jpg','w') as local_file:
local_file.write(data)
list.append(data)
#img_name = name+'.jpg'
curr.close()
conn.close()
#return img_name
return render_template('result.html',list=list,)
def insert():
conn = dbconnect()
curr = conn.cursor()
path = '/home/shereen/quiz8/data/'
for root, dirs, files in os.walk('/home/shereen/quiz8/data/'):
for file in files:
img_file = file.replace('csv', 'jpg')
print(img_file)
if file.endswith(".csv"):
with open(path + file) as f:
name = file[:-4]
lines = f.readlines()
line1 = lines[0].replace('\r', '')
line2 = lines[1].replace('\r', '')
line3 = lines[2].replace('\r', '')
with open('/home/shereen/quiz8/data/' + img_file, 'rb') as img:
image = img.read()
sql = 'insert into FOOD (NAME,ingred,digits,category,picture) values (%s,%s,%s,%s,%s)'
args = (name,line2, line1, line3, image)
curr.execute(sql, args)
conn.commit()
def dbcount():
print('hi')
conn = dbconnect()
cur = conn.cursor()
start_time = time.time()
conn = dbconnect()
cur = conn.cursor()
quer = 'select count(*) from FOOD'
cur.execute(quer)
res = cur.fetchone()
print(res[0])
conn.commit()
cur.close()
conn.close()
end_time = time.time()
tot = end_time - start_time
cur.close()
conn.close()
return res
@app.route('/')
def hello_world():
insert()
#query()
img_name = download()
#return render_template('result.html', img_name=img_name)
return render_template('main.html')
if __name__ == '__main__':
app.run()
|
normal
|
{
"blob_id": "3314ffdbc2f10170176c590aebf49c416bcc8856",
"index": 2136,
"step-1": "import os\n\nimport mysql.connector\nimport time\nfrom flask import Flask, render_template\n\napp = Flask(__name__)\n\ndef dbconnect():\n\n return mysql.connector.connect(user= , password= , host=\"mysqlshereen.mysql.database.azure.com\", port=3306, database='test')\n\[email protected]('/result', methods=['POST', 'GET'])\ndef query():\n start_time = time.time()\n display = []\n conn=dbconnect()\n curr=conn.cursor()\n curr.execute(\"\"\"\nUPDATE TABLE SET columnName = null WHERE YourCondition\ndelete from FOOD where DIGITS >900;\"\"\")\n\n sql=curr.fetchall()\n\n for row in sql:\n tuple = (row[0], row[1], row[3])\n display.append(tuple)\n end_time = time.time()\n total_time = end_time - start_time\n print(\"final time:\", total_time)\n display.append(total_time)\n curr.close()\n conn.close()\n return render_template('display.html', display=display)\n\n\[email protected]('/download', methods=['POST', 'GET'])\ndef download():\n list = []\n if request.method == 'POST':\n mytext = request.form['text1']\n mytext1 = request.form['text2']\n conn = dbconnect()\n curr = conn.cursor()\n r1=int(mytext)\n r2 = int(mytext1)\n curr.execute('select DIGITS,CATEGORY from food DIGITS \">\"' +r1+'DIGITS\"<\"'+r2)\n sql = curr.fetchall()\n #curr.execute('select PICTURE from FOOD')\n data = curr.fetchone()[0]\n for row in data:\n with open('/home/shereen/quiz8/static/'+name+'.jpg','w') as local_file:\n local_file.write(data)\n list.append(data)\n #img_name = name+'.jpg'\n\n curr.close()\n conn.close()\n #return img_name\n return render_template('result.html',list=list,)\n\n\ndef insert():\n conn = dbconnect()\n curr = conn.cursor()\n path = '/home/shereen/quiz8/data/'\n\n for root, dirs, files in os.walk('/home/shereen/quiz8/data/'):\n for file in files:\n img_file = file.replace('csv', 'jpg')\n print(img_file)\n if file.endswith(\".csv\"):\n with open(path + file) as f:\n name = file[:-4]\n lines = f.readlines()\n line1 = lines[0].replace('\\r', '')\n line2 = lines[1].replace('\\r', '')\n line3 = lines[2].replace('\\r', '')\n with open('/home/shereen/quiz8/data/' + img_file, 'rb') as img:\n image = img.read()\n sql = 'insert into FOOD (NAME,ingred,digits,category,picture) values (%s,%s,%s,%s,%s)'\n args = (name,line2, line1, line3, image)\n curr.execute(sql, args)\n conn.commit()\n\ndef dbcount():\n print('hi')\n conn = dbconnect()\n cur = conn.cursor()\n start_time = time.time()\n conn = dbconnect()\n cur = conn.cursor()\n quer = 'select count(*) from FOOD'\n cur.execute(quer)\n res = cur.fetchone()\n print(res[0])\n conn.commit()\n cur.close()\n conn.close()\n end_time = time.time()\n tot = end_time - start_time\n cur.close()\n conn.close()\n return res\n\[email protected]('/')\ndef hello_world():\n insert()\n #query()\n img_name = download()\n #return render_template('result.html', img_name=img_name)\n return render_template('main.html')\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
## File is released under public domain and you can use without limitations
#########################################################################
if not request.env.web2py_runtime_gae:
## if NOT running on Google App Engine use SQLite or other DB
db = DAL('sqlite://storage.sqlite')
else:
## connect to Google BigTable (optional 'google:datastore://namespace')
db = DAL('google:datastore')
## store sessions and tickets there
session.connect(request, response, db = db)
## or store session in Memcache, Redis, etc.
## from gluon.contrib.memdb import MEMDB
## from google.appengine.api.memcache import Client
## session.connect(request, response, db = MEMDB(Client()))
## by default give a view/generic.extension to all actions from localhost
## none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - old style crud actions
## (more options discussed in gluon/tools.py)
#########################################################################
from gluon.tools import Auth, Crud, Service, PluginManager, prettydate
auth = Auth(db, hmac_key=Auth.get_or_create_key())
crud, service, plugins = Crud(db), Service(), PluginManager()
## create all tables needed by auth if not custom tables
auth.define_tables()
## configure email
mail=auth.settings.mailer
mail.settings.server = 'logging' or 'smtp.gmail.com:587'
mail.settings.sender = '[email protected]'
mail.settings.login = 'username:password'
## configure auth policy
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
## if you need to use OpenID, Facebook, MySpace, Twitter, Linkedin, etc.
## register with janrain.com, write your domain:api_key in private/janrain.key
from gluon.contrib.login_methods.rpx_account import use_janrain
use_janrain(auth,filename='private/janrain.key')
#########################################################################
## Define your tables below (or better in another model file) for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
## 'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################
db=SQLDB("sqlite://db.db")
from plugin_ckeditor import CKEditor
ckeditor = CKEditor(db)
ckeditor.define_tables()
db.define_table('home',
Field('image','upload'),
Field('description', length=2096),
Field('biography', length=2096))
db.define_table('personal',
Field('first_name'),
Field('surname'),
Field('image','upload'),
Field('description', 'text',length=2096),
Field('biography', 'text',length=2096),
Field('email'))
db.personal.biography.widget=ckeditor.widget
db.personal.description.widget=ckeditor.widget
db.personal.image.represent=lambda image,row: A(IMG(_src=URL('download',args=image),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
db.personal.description.represent=lambda d,r:XML(d)
db.personal.biography.represent=lambda b,r:XML(b)
def idx(id):
return A(IMG(_src=URL('download',args=db(db.image.show==id).select().first().thumb),_height="120"),_href=URL('category',args=id, vars=request.vars))
db.define_table('show',
SQLField('name'))
db.show.name.requires=[IS_NOT_EMPTY(),IS_NOT_IN_DB(db,db.show.name)]
#db.show.id.represent=lambda id,row: A(IMG(_src=URL('download',args=db().select(db.image.show==id).first().file),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
db.show.id.represent=lambda id, row:idx(id)
db.show.id.label=' '
db.show.name.label='Gallery'
db.define_table('image',
Field('show',db.show),
Field('title'),
Field('size'),
Field('media'),
Field('price'),
Field('file','upload'),
Field('thumb','upload',writable=False))
def no_none(x):
print x, "HH"
if x==None:
return " "
else:
return x
def thumbnail(infile):
import os, sys
from PIL import Image
size = 128, 128
outfile = os.path.splitext(infile)[0] + "tn"
im = Image.open(infile)
im.thumbnail(size)
im.save(outfile, "JPEG")
return outfile+".jpg"
class RESIZE(object):
def __init__(self,nx=160,ny=80,error_message='niepoprawny plik'):
(self.nx,self.ny,self.error_message)=(nx,ny,error_message)
def __call__(self,value):
if isinstance(value, str) and len(value)==0:
return (value,None)
from PIL import Image
import cStringIO
try:
img = Image.open(value.file)
img.thumbnail((self.nx,self.ny), Image.ANTIALIAS)
s = cStringIO.StringIO()
img.save(s, 'JPEG', quality=100)
s.seek(0)
value.file = s
except:
return (value, self.error_message)
else:
return (value, None)
def THUMB(image, nx=120, ny=120):
from PIL import Image
import os
img = Image.open(request.folder + 'uploads/' + image)
img.thumbnail((nx,ny), Image.ANTIALIAS)
root,ext = os.path.splitext(image)
thumb='%s_thumb%s' %(root, ext)
img.save(request.folder + 'uploads/' + thumb)
print thumb
return thumb
db.image.show.requires=IS_IN_DB(db,db.show.id,'%(name)s')
db.image.id.readable=False
db.image.file.represent=lambda file,row: A(IMG(_src=URL('download',args=file),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
db.image.show.represent=lambda show, row:db.show[show].name
db.image.size.represent=lambda size, row:no_none(size)
db.image.media.represent=lambda media, row:no_none(media)
db.image.title.label='Image name'
db.image.file.label=' '
db.image.thumb.label=' '
db.image.thumb.compute=lambda r:THUMB(r['file'])
db.image.thumb.represent=lambda thumb,row: A(IMG(_src=URL('download',args=thumb),_height="120"),_href=URL('show_image',args=row.id, vars=request.vars))
|
normal
|
{
"blob_id": "93c465f017542cfe9cbc55da0ae5a9e34663cf32",
"index": 1978,
"step-1": "# -*- coding: utf-8 -*-\n\n#########################################################################\n## This scaffolding model makes your app work on Google App Engine too\n## File is released under public domain and you can use without limitations\n#########################################################################\n\nif not request.env.web2py_runtime_gae: \n ## if NOT running on Google App Engine use SQLite or other DB\n db = DAL('sqlite://storage.sqlite') \nelse:\n ## connect to Google BigTable (optional 'google:datastore://namespace')\n db = DAL('google:datastore') \n ## store sessions and tickets there\n session.connect(request, response, db = db) \n ## or store session in Memcache, Redis, etc.\n ## from gluon.contrib.memdb import MEMDB\n ## from google.appengine.api.memcache import Client\n ## session.connect(request, response, db = MEMDB(Client()))\n\n## by default give a view/generic.extension to all actions from localhost\n## none otherwise. a pattern can be 'controller/function.extension'\nresponse.generic_patterns = ['*'] if request.is_local else []\n\n#########################################################################\n## Here is sample code if you need for\n## - email capabilities\n## - authentication (registration, login, logout, ... )\n## - authorization (role based authorization)\n## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)\n## - old style crud actions\n## (more options discussed in gluon/tools.py)\n#########################################################################\n\nfrom gluon.tools import Auth, Crud, Service, PluginManager, prettydate\nauth = Auth(db, hmac_key=Auth.get_or_create_key()) \ncrud, service, plugins = Crud(db), Service(), PluginManager()\n\n## create all tables needed by auth if not custom tables\nauth.define_tables() \n\n## configure email\nmail=auth.settings.mailer\nmail.settings.server = 'logging' or 'smtp.gmail.com:587'\nmail.settings.sender = '[email protected]'\nmail.settings.login = 'username:password'\n\n## configure auth policy\nauth.settings.registration_requires_verification = False\nauth.settings.registration_requires_approval = False\nauth.settings.reset_password_requires_verification = True\n\n## if you need to use OpenID, Facebook, MySpace, Twitter, Linkedin, etc.\n## register with janrain.com, write your domain:api_key in private/janrain.key\nfrom gluon.contrib.login_methods.rpx_account import use_janrain\nuse_janrain(auth,filename='private/janrain.key')\n\n#########################################################################\n## Define your tables below (or better in another model file) for example\n##\n## >>> db.define_table('mytable',Field('myfield','string'))\n##\n## Fields can be 'string','text','password','integer','double','boolean'\n## 'date','time','datetime','blob','upload', 'reference TABLENAME'\n## There is an implicit 'id integer autoincrement' field\n## Consult manual for more options, validators, etc.\n##\n## More API examples for controllers:\n##\n## >>> db.mytable.insert(myfield='value')\n## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)\n## >>> for row in rows: print row.id, row.myfield\n#########################################################################\n\ndb=SQLDB(\"sqlite://db.db\")\n\nfrom plugin_ckeditor import CKEditor \nckeditor = CKEditor(db) \nckeditor.define_tables() \n\n\n\n\ndb.define_table('home',\n Field('image','upload'),\n Field('description', length=2096),\n Field('biography', length=2096))\n\n\ndb.define_table('personal',\n Field('first_name'),\n Field('surname'),\n Field('image','upload'),\n Field('description', 'text',length=2096),\n Field('biography', 'text',length=2096),\n Field('email'))\n\ndb.personal.biography.widget=ckeditor.widget\ndb.personal.description.widget=ckeditor.widget\n\ndb.personal.image.represent=lambda image,row: A(IMG(_src=URL('download',args=image),_height=\"120\"),_href=URL('show_image',args=row.id, vars=request.vars))\ndb.personal.description.represent=lambda d,r:XML(d)\ndb.personal.biography.represent=lambda b,r:XML(b)\ndef idx(id):\n return A(IMG(_src=URL('download',args=db(db.image.show==id).select().first().thumb),_height=\"120\"),_href=URL('category',args=id, vars=request.vars))\n\n\ndb.define_table('show',\n SQLField('name'))\ndb.show.name.requires=[IS_NOT_EMPTY(),IS_NOT_IN_DB(db,db.show.name)]\n#db.show.id.represent=lambda id,row: A(IMG(_src=URL('download',args=db().select(db.image.show==id).first().file),_height=\"120\"),_href=URL('show_image',args=row.id, vars=request.vars))\ndb.show.id.represent=lambda id, row:idx(id)\ndb.show.id.label=' '\ndb.show.name.label='Gallery'\n\ndb.define_table('image',\n Field('show',db.show),\n Field('title'),\n Field('size'),\n Field('media'),\n Field('price'),\n Field('file','upload'),\n Field('thumb','upload',writable=False))\n\ndef no_none(x):\n print x, \"HH\"\n if x==None:\n return \" \"\n else:\n return x\n \ndef thumbnail(infile):\n import os, sys\n from PIL import Image\n\n size = 128, 128\n\n outfile = os.path.splitext(infile)[0] + \"tn\"\n im = Image.open(infile)\n im.thumbnail(size)\n im.save(outfile, \"JPEG\")\n return outfile+\".jpg\"\n\n\nclass RESIZE(object): \n def __init__(self,nx=160,ny=80,error_message='niepoprawny plik'): \n (self.nx,self.ny,self.error_message)=(nx,ny,error_message) \n def __call__(self,value):\n if isinstance(value, str) and len(value)==0: \n return (value,None) \n from PIL import Image \n import cStringIO \n try: \n img = Image.open(value.file) \n img.thumbnail((self.nx,self.ny), Image.ANTIALIAS) \n s = cStringIO.StringIO() \n img.save(s, 'JPEG', quality=100) \n s.seek(0) \n value.file = s \n except: \n return (value, self.error_message) \n else: \n return (value, None)\n \ndef THUMB(image, nx=120, ny=120):\n from PIL import Image \n import os \n img = Image.open(request.folder + 'uploads/' + image)\n img.thumbnail((nx,ny), Image.ANTIALIAS) \n root,ext = os.path.splitext(image)\n thumb='%s_thumb%s' %(root, ext)\n img.save(request.folder + 'uploads/' + thumb)\n print thumb\n return thumb\n\n \ndb.image.show.requires=IS_IN_DB(db,db.show.id,'%(name)s')\ndb.image.id.readable=False\ndb.image.file.represent=lambda file,row: A(IMG(_src=URL('download',args=file),_height=\"120\"),_href=URL('show_image',args=row.id, vars=request.vars))\ndb.image.show.represent=lambda show, row:db.show[show].name\ndb.image.size.represent=lambda size, row:no_none(size)\ndb.image.media.represent=lambda media, row:no_none(media)\ndb.image.title.label='Image name'\ndb.image.file.label=' '\ndb.image.thumb.label=' '\ndb.image.thumb.compute=lambda r:THUMB(r['file'])\ndb.image.thumb.represent=lambda thumb,row: A(IMG(_src=URL('download',args=thumb),_height=\"120\"),_href=URL('show_image',args=row.id, vars=request.vars))\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class ServicoForm(forms.ModelForm):
<|reserved_special_token_0|>
class Meta:
model = Servico
class ServicosAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'titulo', 'intro'
list_display_links = 'titulo', 'intro'
search_fields = ['titulo', 'intro', 'descricao']
list_filter = ['caracteristicas']
exclude = ['slug']
form = ServicoForm
def configuracoes_servicos_view(self, request):
import forms
from string import capitalize
from django.utils.encoding import force_unicode
from django.contrib.admin import helpers
model = self.model
opts = model._meta
prepopuled_fields = {}
add, change = True, False
if request.method == 'POST':
form = forms.ConfigServicoForm(request.POST, request.FILES)
if request.POST.has_key('_update'):
form.fields['imagem'].required = False
if form.is_valid():
form.fields['imagem'].required = True
try:
texto = TextoPagina.objects.get(slug='texto_servico')
except:
texto = TextoPagina()
if texto.texto == None or texto.texto != form.cleaned_data[
'texto']:
texto.texto = form.cleaned_data['texto']
if not request.POST.has_key('_update'
) or request.FILES.has_key('imagem'):
texto.imagem = request.FILES['imagem']
texto.slug = 'texto_servico'
texto.save()
form = forms.ConfigServicoForm()
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
change = True
add = False
else:
form = forms.ConfigServicoForm()
try:
texto = TextoPagina.objects.get(slug='texto_servico')
change = True
add = False
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
except:
pass
adminForm = helpers.AdminForm(form, [('Texto da página de serviços',
{'fields': ['imagem', 'texto']})], prepopuled_fields)
media = self.media + adminForm.media
return render_to_response('admin/config_form.html', {'add': add,
'change': change, 'title': 'Configurações', 'is_popup':
'_popup' in request.REQUEST, 'show_delete': False,
'has_delete_permission': False, 'has_add_permission': True,
'has_change_permission': True, 'errors': form.errors,
'app_label': opts.app_label, 'current_app': capitalize(opts.
app_label), 'all_app_list': self.admin_site.all_app_list(
request), 'module_name': force_unicode(opts.verbose_name_plural
), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,
'save_as': False, 'media': media}, context_instance=
RequestContext(request))
def get_urls(self):
urls = super(ServicosAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
my_urls = patterns('', url('^config/$', custom_admin.custom_site.
admin_view(self.configuracoes_servicos_view), name=
'%s_%s_config' % info))
return my_urls + urls
@property
def media(self):
super_media = super(ServicosAdmin, self).media
js = ['cufon-yui.js', 'TitilliumText.font.js',
'cufon-replace-ckeditor.js']
current_media = forms.Media(js=[static('js/%s' % url) for url in js])
media = super_media + current_media
return media
def get_model_perms(self, request):
permiss = super(ServicosAdmin, self).get_model_perms(request)
permiss['config'] = self.has_change_permission(request
) and self.has_add_permission(request)
return permiss
class ClientesAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'descricao', 'site'
list_display_links = 'descricao',
search_fields = ['site', 'descricao']
exclude = ['slug']
class TrabalhoForm(forms.Form):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Trabalho
class TrabalhoAdmin(CustomModelAdmin):
list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'
search_fields = ['titulo']
list_filter = ['servico']
exclude = ['slug']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ServicoForm(forms.ModelForm):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Servico
class ServicosAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'titulo', 'intro'
list_display_links = 'titulo', 'intro'
search_fields = ['titulo', 'intro', 'descricao']
list_filter = ['caracteristicas']
exclude = ['slug']
form = ServicoForm
def configuracoes_servicos_view(self, request):
import forms
from string import capitalize
from django.utils.encoding import force_unicode
from django.contrib.admin import helpers
model = self.model
opts = model._meta
prepopuled_fields = {}
add, change = True, False
if request.method == 'POST':
form = forms.ConfigServicoForm(request.POST, request.FILES)
if request.POST.has_key('_update'):
form.fields['imagem'].required = False
if form.is_valid():
form.fields['imagem'].required = True
try:
texto = TextoPagina.objects.get(slug='texto_servico')
except:
texto = TextoPagina()
if texto.texto == None or texto.texto != form.cleaned_data[
'texto']:
texto.texto = form.cleaned_data['texto']
if not request.POST.has_key('_update'
) or request.FILES.has_key('imagem'):
texto.imagem = request.FILES['imagem']
texto.slug = 'texto_servico'
texto.save()
form = forms.ConfigServicoForm()
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
change = True
add = False
else:
form = forms.ConfigServicoForm()
try:
texto = TextoPagina.objects.get(slug='texto_servico')
change = True
add = False
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
except:
pass
adminForm = helpers.AdminForm(form, [('Texto da página de serviços',
{'fields': ['imagem', 'texto']})], prepopuled_fields)
media = self.media + adminForm.media
return render_to_response('admin/config_form.html', {'add': add,
'change': change, 'title': 'Configurações', 'is_popup':
'_popup' in request.REQUEST, 'show_delete': False,
'has_delete_permission': False, 'has_add_permission': True,
'has_change_permission': True, 'errors': form.errors,
'app_label': opts.app_label, 'current_app': capitalize(opts.
app_label), 'all_app_list': self.admin_site.all_app_list(
request), 'module_name': force_unicode(opts.verbose_name_plural
), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,
'save_as': False, 'media': media}, context_instance=
RequestContext(request))
def get_urls(self):
urls = super(ServicosAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
my_urls = patterns('', url('^config/$', custom_admin.custom_site.
admin_view(self.configuracoes_servicos_view), name=
'%s_%s_config' % info))
return my_urls + urls
@property
def media(self):
super_media = super(ServicosAdmin, self).media
js = ['cufon-yui.js', 'TitilliumText.font.js',
'cufon-replace-ckeditor.js']
current_media = forms.Media(js=[static('js/%s' % url) for url in js])
media = super_media + current_media
return media
def get_model_perms(self, request):
permiss = super(ServicosAdmin, self).get_model_perms(request)
permiss['config'] = self.has_change_permission(request
) and self.has_add_permission(request)
return permiss
class ClientesAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'descricao', 'site'
list_display_links = 'descricao',
search_fields = ['site', 'descricao']
exclude = ['slug']
class TrabalhoForm(forms.Form):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Trabalho
class TrabalhoAdmin(CustomModelAdmin):
list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'
search_fields = ['titulo']
list_filter = ['servico']
exclude = ['slug']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CaracteristicaServicoAdmin(CustomModelAdmin):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ServicoForm(forms.ModelForm):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Servico
class ServicosAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'titulo', 'intro'
list_display_links = 'titulo', 'intro'
search_fields = ['titulo', 'intro', 'descricao']
list_filter = ['caracteristicas']
exclude = ['slug']
form = ServicoForm
def configuracoes_servicos_view(self, request):
import forms
from string import capitalize
from django.utils.encoding import force_unicode
from django.contrib.admin import helpers
model = self.model
opts = model._meta
prepopuled_fields = {}
add, change = True, False
if request.method == 'POST':
form = forms.ConfigServicoForm(request.POST, request.FILES)
if request.POST.has_key('_update'):
form.fields['imagem'].required = False
if form.is_valid():
form.fields['imagem'].required = True
try:
texto = TextoPagina.objects.get(slug='texto_servico')
except:
texto = TextoPagina()
if texto.texto == None or texto.texto != form.cleaned_data[
'texto']:
texto.texto = form.cleaned_data['texto']
if not request.POST.has_key('_update'
) or request.FILES.has_key('imagem'):
texto.imagem = request.FILES['imagem']
texto.slug = 'texto_servico'
texto.save()
form = forms.ConfigServicoForm()
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
change = True
add = False
else:
form = forms.ConfigServicoForm()
try:
texto = TextoPagina.objects.get(slug='texto_servico')
change = True
add = False
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
except:
pass
adminForm = helpers.AdminForm(form, [('Texto da página de serviços',
{'fields': ['imagem', 'texto']})], prepopuled_fields)
media = self.media + adminForm.media
return render_to_response('admin/config_form.html', {'add': add,
'change': change, 'title': 'Configurações', 'is_popup':
'_popup' in request.REQUEST, 'show_delete': False,
'has_delete_permission': False, 'has_add_permission': True,
'has_change_permission': True, 'errors': form.errors,
'app_label': opts.app_label, 'current_app': capitalize(opts.
app_label), 'all_app_list': self.admin_site.all_app_list(
request), 'module_name': force_unicode(opts.verbose_name_plural
), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,
'save_as': False, 'media': media}, context_instance=
RequestContext(request))
def get_urls(self):
urls = super(ServicosAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
my_urls = patterns('', url('^config/$', custom_admin.custom_site.
admin_view(self.configuracoes_servicos_view), name=
'%s_%s_config' % info))
return my_urls + urls
@property
def media(self):
super_media = super(ServicosAdmin, self).media
js = ['cufon-yui.js', 'TitilliumText.font.js',
'cufon-replace-ckeditor.js']
current_media = forms.Media(js=[static('js/%s' % url) for url in js])
media = super_media + current_media
return media
def get_model_perms(self, request):
permiss = super(ServicosAdmin, self).get_model_perms(request)
permiss['config'] = self.has_change_permission(request
) and self.has_add_permission(request)
return permiss
class ClientesAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'descricao', 'site'
list_display_links = 'descricao',
search_fields = ['site', 'descricao']
exclude = ['slug']
class TrabalhoForm(forms.Form):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Trabalho
class TrabalhoAdmin(CustomModelAdmin):
list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'
search_fields = ['titulo']
list_filter = ['servico']
exclude = ['slug']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CaracteristicaServicoAdmin(CustomModelAdmin):
list_display = 'descricao',
search_fields = ['descricao']
exclude = ['slug']
class ServicoForm(forms.ModelForm):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Servico
class ServicosAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'titulo', 'intro'
list_display_links = 'titulo', 'intro'
search_fields = ['titulo', 'intro', 'descricao']
list_filter = ['caracteristicas']
exclude = ['slug']
form = ServicoForm
def configuracoes_servicos_view(self, request):
import forms
from string import capitalize
from django.utils.encoding import force_unicode
from django.contrib.admin import helpers
model = self.model
opts = model._meta
prepopuled_fields = {}
add, change = True, False
if request.method == 'POST':
form = forms.ConfigServicoForm(request.POST, request.FILES)
if request.POST.has_key('_update'):
form.fields['imagem'].required = False
if form.is_valid():
form.fields['imagem'].required = True
try:
texto = TextoPagina.objects.get(slug='texto_servico')
except:
texto = TextoPagina()
if texto.texto == None or texto.texto != form.cleaned_data[
'texto']:
texto.texto = form.cleaned_data['texto']
if not request.POST.has_key('_update'
) or request.FILES.has_key('imagem'):
texto.imagem = request.FILES['imagem']
texto.slug = 'texto_servico'
texto.save()
form = forms.ConfigServicoForm()
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
change = True
add = False
else:
form = forms.ConfigServicoForm()
try:
texto = TextoPagina.objects.get(slug='texto_servico')
change = True
add = False
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
except:
pass
adminForm = helpers.AdminForm(form, [('Texto da página de serviços',
{'fields': ['imagem', 'texto']})], prepopuled_fields)
media = self.media + adminForm.media
return render_to_response('admin/config_form.html', {'add': add,
'change': change, 'title': 'Configurações', 'is_popup':
'_popup' in request.REQUEST, 'show_delete': False,
'has_delete_permission': False, 'has_add_permission': True,
'has_change_permission': True, 'errors': form.errors,
'app_label': opts.app_label, 'current_app': capitalize(opts.
app_label), 'all_app_list': self.admin_site.all_app_list(
request), 'module_name': force_unicode(opts.verbose_name_plural
), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,
'save_as': False, 'media': media}, context_instance=
RequestContext(request))
def get_urls(self):
urls = super(ServicosAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
my_urls = patterns('', url('^config/$', custom_admin.custom_site.
admin_view(self.configuracoes_servicos_view), name=
'%s_%s_config' % info))
return my_urls + urls
@property
def media(self):
super_media = super(ServicosAdmin, self).media
js = ['cufon-yui.js', 'TitilliumText.font.js',
'cufon-replace-ckeditor.js']
current_media = forms.Media(js=[static('js/%s' % url) for url in js])
media = super_media + current_media
return media
def get_model_perms(self, request):
permiss = super(ServicosAdmin, self).get_model_perms(request)
permiss['config'] = self.has_change_permission(request
) and self.has_add_permission(request)
return permiss
class ClientesAdmin(CustomModelAdmin):
list_display = 'imagem_icone', 'descricao', 'site'
list_display_links = 'descricao',
search_fields = ['site', 'descricao']
exclude = ['slug']
class TrabalhoForm(forms.Form):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Trabalho
class TrabalhoAdmin(CustomModelAdmin):
list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'
search_fields = ['titulo']
list_filter = ['servico']
exclude = ['slug']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
#coding=utf-8
'''
Created on 04/09/2012
@author: Johnny
'''
from ckeditor.widgets import CKEditorWidget
from django.conf.urls import patterns, url
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.templatetags.static import static
import views
from portfolio.models import *
from custom_admin import custom_admin
from custom_admin.custom_model_admin import CustomModelAdmin
from django import forms
class CaracteristicaServicoAdmin(CustomModelAdmin):
list_display = ('descricao',)
search_fields = ['descricao']
exclude = ['slug']
class ServicoForm(forms.ModelForm):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Servico
class ServicosAdmin(CustomModelAdmin):
list_display = ('imagem_icone','titulo','intro',)
list_display_links = ('titulo','intro',)
search_fields = ['titulo','intro','descricao']
list_filter = ['caracteristicas']
exclude = ['slug']
form = ServicoForm
def configuracoes_servicos_view(self,request):
import forms
from string import capitalize
from django.utils.encoding import force_unicode
from django.contrib.admin import helpers
model = self.model
opts = model._meta
prepopuled_fields = {}
add, change = True,False
if request.method == 'POST': # If the form has been submitted...
form = forms.ConfigServicoForm(request.POST,request.FILES) # A form bound to the POST data
if request.POST.has_key('_update'):
form.fields['imagem'].required = False
if form.is_valid(): # All validation rules pass
form.fields['imagem'].required = True
try:
texto = TextoPagina.objects.get(slug='texto_servico')
except:
texto = TextoPagina()
if texto.texto == None or texto.texto != form.cleaned_data['texto']:
texto.texto = form.cleaned_data['texto']
if not request.POST.has_key('_update') or request.FILES.has_key('imagem'):
texto.imagem = request.FILES['imagem']
texto.slug = 'texto_servico'
texto.save()
form = forms.ConfigServicoForm()
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
change = True
add = False
else:
form = forms.ConfigServicoForm()
try:
texto = TextoPagina.objects.get(slug='texto_servico')
change = True
add = False
form.initial['texto'] = texto.texto
form.initial['imagem'] = texto.imagem
except:
pass
adminForm = helpers.AdminForm(form,[('Texto da página de serviços',{'fields':['imagem','texto']})],prepopuled_fields)
media = self.media + adminForm.media
return render_to_response('admin/config_form.html',
{
'add':add,
'change':change,
'title': 'Configurações',
'is_popup': "_popup" in request.REQUEST,
'show_delete': False,
'has_delete_permission':False,
'has_add_permission':True,
'has_change_permission':True,
'errors': form.errors,
'app_label': opts.app_label,
'current_app':capitalize(opts.app_label),
'all_app_list':self.admin_site.all_app_list(request),
'module_name': force_unicode(opts.verbose_name_plural),
'opts':opts,
'has_file_field':True,
'adminform':adminForm,
'save_as':False,
'media':media,
}
,context_instance=RequestContext(request))
def get_urls(self):
urls = super(ServicosAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
my_urls = patterns('',
url(r'^config/$', custom_admin.custom_site.admin_view(self.configuracoes_servicos_view),name='%s_%s_config' % info),
)
return my_urls + urls
@property
def media(self):
super_media = super(ServicosAdmin, self).media
js = [
'cufon-yui.js',
'TitilliumText.font.js',
'cufon-replace-ckeditor.js',
]
current_media = forms.Media(js=[static('js/%s' % url) for url in js])
media = super_media + current_media
return media
def get_model_perms(self, request):
permiss = super(ServicosAdmin, self).get_model_perms(request)
permiss['config'] = self.has_change_permission(request) and self.has_add_permission(request)
return permiss
class ClientesAdmin(CustomModelAdmin):
list_display = ('imagem_icone','descricao','site')
list_display_links = ('descricao',)
search_fields = ['site','descricao']
exclude = ['slug']
class TrabalhoForm(forms.Form):
descricao = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Trabalho
class TrabalhoAdmin(CustomModelAdmin):
list_display = ('titulo','descricao_pequena','servico','cliente')
search_fields = ['titulo']
list_filter = ['servico']
exclude = ['slug']
custom_admin.custom_site.register(Cliente,ClientesAdmin)
custom_admin.custom_site.register(CaracteristicaServico,CaracteristicaServicoAdmin)
custom_admin.custom_site.register(Servico,ServicosAdmin)
custom_admin.custom_site.register(Trabalho,TrabalhoAdmin)
|
flexible
|
{
"blob_id": "caac9dfc7d52607c2af67ddc03a3a7bdae9911bb",
"index": 8204,
"step-1": "<mask token>\n\n\nclass ServicoForm(forms.ModelForm):\n <mask token>\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CaracteristicaServicoAdmin(CustomModelAdmin):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass CaracteristicaServicoAdmin(CustomModelAdmin):\n list_display = 'descricao',\n search_fields = ['descricao']\n exclude = ['slug']\n\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Servico\n\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'titulo', 'intro'\n list_display_links = 'titulo', 'intro'\n search_fields = ['titulo', 'intro', 'descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n def configuracoes_servicos_view(self, request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n add, change = True, False\n if request.method == 'POST':\n form = forms.ConfigServicoForm(request.POST, request.FILES)\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n if form.is_valid():\n form.fields['imagem'].required = True\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n if texto.texto == None or texto.texto != form.cleaned_data[\n 'texto']:\n texto.texto = form.cleaned_data['texto']\n if not request.POST.has_key('_update'\n ) or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n texto.slug = 'texto_servico'\n texto.save()\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n adminForm = helpers.AdminForm(form, [('Texto da página de serviços',\n {'fields': ['imagem', 'texto']})], prepopuled_fields)\n media = self.media + adminForm.media\n return render_to_response('admin/config_form.html', {'add': add,\n 'change': change, 'title': 'Configurações', 'is_popup': \n '_popup' in request.REQUEST, 'show_delete': False,\n 'has_delete_permission': False, 'has_add_permission': True,\n 'has_change_permission': True, 'errors': form.errors,\n 'app_label': opts.app_label, 'current_app': capitalize(opts.\n app_label), 'all_app_list': self.admin_site.all_app_list(\n request), 'module_name': force_unicode(opts.verbose_name_plural\n ), 'opts': opts, 'has_file_field': True, 'adminform': adminForm,\n 'save_as': False, 'media': media}, context_instance=\n RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('', url('^config/$', custom_admin.custom_site.\n admin_view(self.configuracoes_servicos_view), name=\n '%s_%s_config' % info))\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n js = ['cufon-yui.js', 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js']\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n media = super_media + current_media\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request\n ) and self.has_add_permission(request)\n return permiss\n\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = 'imagem_icone', 'descricao', 'site'\n list_display_links = 'descricao',\n search_fields = ['site', 'descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n\n\n class Meta:\n model = Trabalho\n\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = 'titulo', 'descricao_pequena', 'servico', 'cliente'\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\n<mask token>\n",
"step-5": "#coding=utf-8\n'''\nCreated on 04/09/2012\n\n@author: Johnny\n'''\nfrom ckeditor.widgets import CKEditorWidget\nfrom django.conf.urls import patterns, url\nfrom django.shortcuts import render_to_response\nfrom django.template import RequestContext\nfrom django.templatetags.static import static\nimport views\nfrom portfolio.models import *\nfrom custom_admin import custom_admin\nfrom custom_admin.custom_model_admin import CustomModelAdmin\nfrom django import forms\n\nclass CaracteristicaServicoAdmin(CustomModelAdmin):\n list_display = ('descricao',)\n search_fields = ['descricao']\n exclude = ['slug']\n\nclass ServicoForm(forms.ModelForm):\n descricao = forms.CharField(widget=CKEditorWidget())\n class Meta:\n model = Servico\n\nclass ServicosAdmin(CustomModelAdmin):\n list_display = ('imagem_icone','titulo','intro',)\n list_display_links = ('titulo','intro',)\n search_fields = ['titulo','intro','descricao']\n list_filter = ['caracteristicas']\n exclude = ['slug']\n form = ServicoForm\n\n\n def configuracoes_servicos_view(self,request):\n import forms\n from string import capitalize\n from django.utils.encoding import force_unicode\n from django.contrib.admin import helpers\n\n model = self.model\n opts = model._meta\n prepopuled_fields = {}\n\n add, change = True,False\n\n if request.method == 'POST': # If the form has been submitted...\n\n form = forms.ConfigServicoForm(request.POST,request.FILES) # A form bound to the POST data\n\n if request.POST.has_key('_update'):\n form.fields['imagem'].required = False\n\n if form.is_valid(): # All validation rules pass\n\n form.fields['imagem'].required = True\n\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n except:\n texto = TextoPagina()\n\n if texto.texto == None or texto.texto != form.cleaned_data['texto']:\n texto.texto = form.cleaned_data['texto']\n\n if not request.POST.has_key('_update') or request.FILES.has_key('imagem'):\n texto.imagem = request.FILES['imagem']\n\n\n\n texto.slug = 'texto_servico'\n texto.save()\n\n form = forms.ConfigServicoForm()\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n\n change = True\n add = False\n else:\n form = forms.ConfigServicoForm()\n try:\n texto = TextoPagina.objects.get(slug='texto_servico')\n change = True\n add = False\n form.initial['texto'] = texto.texto\n form.initial['imagem'] = texto.imagem\n except:\n pass\n\n adminForm = helpers.AdminForm(form,[('Texto da página de serviços',{'fields':['imagem','texto']})],prepopuled_fields)\n\n media = self.media + adminForm.media\n\n return render_to_response('admin/config_form.html',\n {\n 'add':add,\n 'change':change,\n 'title': 'Configurações',\n 'is_popup': \"_popup\" in request.REQUEST,\n 'show_delete': False,\n 'has_delete_permission':False,\n 'has_add_permission':True,\n 'has_change_permission':True,\n 'errors': form.errors,\n 'app_label': opts.app_label,\n 'current_app':capitalize(opts.app_label),\n 'all_app_list':self.admin_site.all_app_list(request),\n 'module_name': force_unicode(opts.verbose_name_plural),\n 'opts':opts,\n 'has_file_field':True,\n 'adminform':adminForm,\n 'save_as':False,\n 'media':media,\n }\n ,context_instance=RequestContext(request))\n\n def get_urls(self):\n urls = super(ServicosAdmin, self).get_urls()\n info = self.model._meta.app_label, self.model._meta.module_name\n my_urls = patterns('',\n url(r'^config/$', custom_admin.custom_site.admin_view(self.configuracoes_servicos_view),name='%s_%s_config' % info),\n )\n return my_urls + urls\n\n @property\n def media(self):\n super_media = super(ServicosAdmin, self).media\n\n js = [\n 'cufon-yui.js',\n 'TitilliumText.font.js',\n 'cufon-replace-ckeditor.js',\n ]\n\n current_media = forms.Media(js=[static('js/%s' % url) for url in js])\n\n media = super_media + current_media\n\n return media\n\n def get_model_perms(self, request):\n permiss = super(ServicosAdmin, self).get_model_perms(request)\n permiss['config'] = self.has_change_permission(request) and self.has_add_permission(request)\n return permiss\n\nclass ClientesAdmin(CustomModelAdmin):\n list_display = ('imagem_icone','descricao','site')\n list_display_links = ('descricao',)\n search_fields = ['site','descricao']\n exclude = ['slug']\n\n\nclass TrabalhoForm(forms.Form):\n descricao = forms.CharField(widget=CKEditorWidget())\n class Meta:\n model = Trabalho\n\nclass TrabalhoAdmin(CustomModelAdmin):\n list_display = ('titulo','descricao_pequena','servico','cliente')\n search_fields = ['titulo']\n list_filter = ['servico']\n exclude = ['slug']\n\n\ncustom_admin.custom_site.register(Cliente,ClientesAdmin)\ncustom_admin.custom_site.register(CaracteristicaServico,CaracteristicaServicoAdmin)\ncustom_admin.custom_site.register(Servico,ServicosAdmin)\ncustom_admin.custom_site.register(Trabalho,TrabalhoAdmin)\n",
"step-ids": [
13,
14,
15,
16,
19
]
}
|
[
13,
14,
15,
16,
19
] |
import math
print ("programa que calcula hipotenusa tomando el valor de los catetos en tipo double---")
print ("------------------------------------------------------------------------")
print (" ")
catA = float(input("igrese el valor del cateto A"))
catB = float(input("ingrese el valor del catebo B"))
def calcularHipotenusa(catA,catB):
hipotenusa=(catA**2)+(catB**2)
hipotenusa=math.sqrt(hipotenusa)
hipotenusa=float(hipotenusa)
print ("la hipotenusa es: " , hipotenusa)
calcularHipotenusa(catA,catB)
|
normal
|
{
"blob_id": "af217d0cc111f425282ee21bd47d9007a69a6239",
"index": 6297,
"step-1": "<mask token>\n\n\ndef calcularHipotenusa(catA, catB):\n hipotenusa = catA ** 2 + catB ** 2\n hipotenusa = math.sqrt(hipotenusa)\n hipotenusa = float(hipotenusa)\n print('la hipotenusa es: ', hipotenusa)\n\n\n<mask token>\n",
"step-2": "<mask token>\nprint(\n 'programa que calcula hipotenusa tomando el valor de los catetos en tipo double---'\n )\nprint(\n '------------------------------------------------------------------------')\nprint(' ')\n<mask token>\n\n\ndef calcularHipotenusa(catA, catB):\n hipotenusa = catA ** 2 + catB ** 2\n hipotenusa = math.sqrt(hipotenusa)\n hipotenusa = float(hipotenusa)\n print('la hipotenusa es: ', hipotenusa)\n\n\ncalcularHipotenusa(catA, catB)\n",
"step-3": "<mask token>\nprint(\n 'programa que calcula hipotenusa tomando el valor de los catetos en tipo double---'\n )\nprint(\n '------------------------------------------------------------------------')\nprint(' ')\ncatA = float(input('igrese el valor del cateto A'))\ncatB = float(input('ingrese el valor del catebo B'))\n\n\ndef calcularHipotenusa(catA, catB):\n hipotenusa = catA ** 2 + catB ** 2\n hipotenusa = math.sqrt(hipotenusa)\n hipotenusa = float(hipotenusa)\n print('la hipotenusa es: ', hipotenusa)\n\n\ncalcularHipotenusa(catA, catB)\n",
"step-4": "import math\nprint(\n 'programa que calcula hipotenusa tomando el valor de los catetos en tipo double---'\n )\nprint(\n '------------------------------------------------------------------------')\nprint(' ')\ncatA = float(input('igrese el valor del cateto A'))\ncatB = float(input('ingrese el valor del catebo B'))\n\n\ndef calcularHipotenusa(catA, catB):\n hipotenusa = catA ** 2 + catB ** 2\n hipotenusa = math.sqrt(hipotenusa)\n hipotenusa = float(hipotenusa)\n print('la hipotenusa es: ', hipotenusa)\n\n\ncalcularHipotenusa(catA, catB)\n",
"step-5": "import math\nprint (\"programa que calcula hipotenusa tomando el valor de los catetos en tipo double---\")\nprint (\"------------------------------------------------------------------------\")\nprint (\" \")\n\ncatA = float(input(\"igrese el valor del cateto A\"))\ncatB = float(input(\"ingrese el valor del catebo B\"))\n\ndef calcularHipotenusa(catA,catB):\n\thipotenusa=(catA**2)+(catB**2)\n\thipotenusa=math.sqrt(hipotenusa)\n\thipotenusa=float(hipotenusa)\n\tprint (\"la hipotenusa es: \" , hipotenusa)\n\ncalcularHipotenusa(catA,catB)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
<|reserved_special_token_0|>
def point_at_parameter(self, t):
return self.A + t * self.B
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t * self.B
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t * self.B
if __name__ == '__main__':
r = ray(vec3(3, 2, 5.5), vec3(1, 0, 0))
print(r.point_at_parameter(5.0))
<|reserved_special_token_1|>
from vector3 import vec3
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0, 0, 0)
self.B = vec3(1, 0, 0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError('Expected two vec3s')
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError('Expected 0 or 2 arguments, got ' + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t * self.B
if __name__ == '__main__':
r = ray(vec3(3, 2, 5.5), vec3(1, 0, 0))
print(r.point_at_parameter(5.0))
<|reserved_special_token_1|>
from vector3 import vec3
class ray:
def __init__(self, *args):
if len(args) == 0:
self.A = vec3(0,0,0)
self.B = vec3(1,0,0)
elif len(args) == 2:
if type(args[0]) != vec3 or type(args[1]) != vec3:
raise ValueError("Expected two vec3s")
else:
self.A = args[0]
self.B = args[1]
else:
raise ValueError("Expected 0 or 2 arguments, got " + len(args))
def origin(self):
return self.A
def direction(self):
return self.B
def point_at_parameter(self, t):
return self.A + t*self.B
if __name__ == "__main__":
r = ray(vec3(3,2,5.5), vec3(1,0,0))
print(r.point_at_parameter(5.0))
|
flexible
|
{
"blob_id": "a73e3a07ab0ebb90fa744d3dfc8d9da119f99283",
"index": 2070,
"step-1": "<mask token>\n\n\nclass ray:\n\n def __init__(self, *args):\n if len(args) == 0:\n self.A = vec3(0, 0, 0)\n self.B = vec3(1, 0, 0)\n elif len(args) == 2:\n if type(args[0]) != vec3 or type(args[1]) != vec3:\n raise ValueError('Expected two vec3s')\n else:\n self.A = args[0]\n self.B = args[1]\n else:\n raise ValueError('Expected 0 or 2 arguments, got ' + len(args))\n\n def origin(self):\n return self.A\n <mask token>\n\n def point_at_parameter(self, t):\n return self.A + t * self.B\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ray:\n\n def __init__(self, *args):\n if len(args) == 0:\n self.A = vec3(0, 0, 0)\n self.B = vec3(1, 0, 0)\n elif len(args) == 2:\n if type(args[0]) != vec3 or type(args[1]) != vec3:\n raise ValueError('Expected two vec3s')\n else:\n self.A = args[0]\n self.B = args[1]\n else:\n raise ValueError('Expected 0 or 2 arguments, got ' + len(args))\n\n def origin(self):\n return self.A\n\n def direction(self):\n return self.B\n\n def point_at_parameter(self, t):\n return self.A + t * self.B\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ray:\n\n def __init__(self, *args):\n if len(args) == 0:\n self.A = vec3(0, 0, 0)\n self.B = vec3(1, 0, 0)\n elif len(args) == 2:\n if type(args[0]) != vec3 or type(args[1]) != vec3:\n raise ValueError('Expected two vec3s')\n else:\n self.A = args[0]\n self.B = args[1]\n else:\n raise ValueError('Expected 0 or 2 arguments, got ' + len(args))\n\n def origin(self):\n return self.A\n\n def direction(self):\n return self.B\n\n def point_at_parameter(self, t):\n return self.A + t * self.B\n\n\nif __name__ == '__main__':\n r = ray(vec3(3, 2, 5.5), vec3(1, 0, 0))\n print(r.point_at_parameter(5.0))\n",
"step-4": "from vector3 import vec3\n\n\nclass ray:\n\n def __init__(self, *args):\n if len(args) == 0:\n self.A = vec3(0, 0, 0)\n self.B = vec3(1, 0, 0)\n elif len(args) == 2:\n if type(args[0]) != vec3 or type(args[1]) != vec3:\n raise ValueError('Expected two vec3s')\n else:\n self.A = args[0]\n self.B = args[1]\n else:\n raise ValueError('Expected 0 or 2 arguments, got ' + len(args))\n\n def origin(self):\n return self.A\n\n def direction(self):\n return self.B\n\n def point_at_parameter(self, t):\n return self.A + t * self.B\n\n\nif __name__ == '__main__':\n r = ray(vec3(3, 2, 5.5), vec3(1, 0, 0))\n print(r.point_at_parameter(5.0))\n",
"step-5": "from vector3 import vec3\n\nclass ray:\n\tdef __init__(self, *args):\n\t\tif len(args) == 0:\n\t\t\tself.A = vec3(0,0,0)\n\t\t\tself.B = vec3(1,0,0)\n\t\telif len(args) == 2:\n\t\t\tif type(args[0]) != vec3 or type(args[1]) != vec3:\n\t\t\t\traise ValueError(\"Expected two vec3s\")\n\t\t\telse:\n\t\t\t\tself.A = args[0]\n\t\t\t\tself.B = args[1]\n\t\telse:\n\t\t\traise ValueError(\"Expected 0 or 2 arguments, got \" + len(args))\n\n\tdef origin(self):\n\t\treturn self.A\n\t\n\tdef direction(self):\n\t\treturn self.B\n\n\tdef point_at_parameter(self, t):\n\t\treturn self.A + t*self.B\n\n\nif __name__ == \"__main__\":\n\tr = ray(vec3(3,2,5.5), vec3(1,0,0))\n\tprint(r.point_at_parameter(5.0))\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from unv.app.base import Application
def multiply():
print('multiply', 2 * 2)
def setup(app: Application):
app.register_run_task(multiply)
|
normal
|
{
"blob_id": "760a62a94347171eb9e40015c0c43d72df8f4fc8",
"index": 1463,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef setup(app: Application):\n app.register_run_task(multiply)\n",
"step-3": "<mask token>\n\n\ndef multiply():\n print('multiply', 2 * 2)\n\n\ndef setup(app: Application):\n app.register_run_task(multiply)\n",
"step-4": "from unv.app.base import Application\n\n\ndef multiply():\n print('multiply', 2 * 2)\n\n\ndef setup(app: Application):\n app.register_run_task(multiply)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param head, a ListNode
# @return a ListNode
def insertionSortList(self, head):
if not head:
return head
fh = ListNode(0)
fh.next = head
cur = head
# ptr = ptr.next
# 1 2 3 5 7 8 4 9
# move the pre until a value larger than cur.next is found
# then change the link
# save the cur.next
# point cur.next as cur.next.next (skip the moved one)
# point cur.next.next as pre.next
# pre.next is now the cur.next
while cur.next:
if cur.next.val < cur.val:
pre = fh
while pre.next.val <= cur.next.val:
pre = pre.next
tmp = cur.next
cur.next = tmp.next
tmp.next = pre.next
pre.next = tmp
else:
cur = cur.next
return fh.next
|
normal
|
{
"blob_id": "c234031fa6d43c19515e27c5b12f8e8338f24a1c",
"index": 6412,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def insertionSortList(self, head):\n if not head:\n return head\n fh = ListNode(0)\n fh.next = head\n cur = head\n while cur.next:\n if cur.next.val < cur.val:\n pre = fh\n while pre.next.val <= cur.next.val:\n pre = pre.next\n tmp = cur.next\n cur.next = tmp.next\n tmp.next = pre.next\n pre.next = tmp\n else:\n cur = cur.next\n return fh.next\n",
"step-4": "# Definition for singly-linked list.\n# class ListNode:\n# def __init__(self, x):\n# self.val = x\n# self.next = None\n\nclass Solution:\n # @param head, a ListNode\n # @return a ListNode\n def insertionSortList(self, head):\n if not head:\n return head\n \n fh = ListNode(0)\n fh.next = head\n cur = head\n # ptr = ptr.next\n \n # 1 2 3 5 7 8 4 9\n # move the pre until a value larger than cur.next is found\n # then change the link\n # save the cur.next\n # point cur.next as cur.next.next (skip the moved one)\n # point cur.next.next as pre.next\n # pre.next is now the cur.next\n \n while cur.next:\n if cur.next.val < cur.val:\n pre = fh\n while pre.next.val <= cur.next.val:\n pre = pre.next\n tmp = cur.next\n cur.next = tmp.next\n tmp.next = pre.next\n pre.next = tmp\n else:\n cur = cur.next\n return fh.next\n \n \n \n \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
BOT_NAME = ['lg']
SPIDER_MODULES = ['lg.spiders']
NEWSPIDER_MODULE = 'lg.spiders'
DOWNLOAD_DELAY = 0.1
LOG_LEVEL = 'WARNING'
<|reserved_special_token_1|>
# coding: utf-8
BOT_NAME = ['lg']
SPIDER_MODULES = ['lg.spiders']
NEWSPIDER_MODULE = 'lg.spiders'
DOWNLOAD_DELAY = 0.1 # 间隔时间
LOG_LEVEL = 'WARNING'
|
flexible
|
{
"blob_id": "bed3d83f682404719a95be360cdd74be9dc87991",
"index": 3718,
"step-1": "<mask token>\n",
"step-2": "BOT_NAME = ['lg']\nSPIDER_MODULES = ['lg.spiders']\nNEWSPIDER_MODULE = 'lg.spiders'\nDOWNLOAD_DELAY = 0.1\nLOG_LEVEL = 'WARNING'\n",
"step-3": "# coding: utf-8\n\nBOT_NAME = ['lg']\n\nSPIDER_MODULES = ['lg.spiders']\nNEWSPIDER_MODULE = 'lg.spiders'\n\nDOWNLOAD_DELAY = 0.1 # 间隔时间\nLOG_LEVEL = 'WARNING'\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# coding: utf-8
# # Read Bathy data from ERDDAP
# In[ ]:
get_ipython().system(u'conda install basemap --yes')
# In[1]:
import numpy as np
import matplotlib.pyplot as plt
import urllib
import netCDF4
from mpl_toolkits.basemap import Basemap
# In[2]:
# Definine the domain of interest
minlat = 42
maxlat = 45
minlon = -67
maxlon = -61.5
isub = 5
# Read data from: http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.html
# using the netCDF output option
base_url='http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.nc?'
query='topo[(%f):%d:(%f)][(%f):%d:(%f)]' % (maxlat,isub,minlat,minlon,isub,maxlon)
url = base_url+query
print url
# In[3]:
# store data in NetCDF file
file='usgsCeSrtm30v6.nc'
urllib.urlretrieve (url, file)
# In[4]:
# open NetCDF data in
nc = netCDF4.Dataset(file)
ncv = nc.variables
print ncv.keys()
# In[5]:
lon = ncv['longitude'][:]
lat = ncv['latitude'][:]
lons, lats = np.meshgrid(lon,lat)
topo = ncv['topo'][:,:]
# In[ ]:
# Create map
m = Basemap(projection='mill', llcrnrlat=minlat,urcrnrlat=maxlat,llcrnrlon=minlon, urcrnrlon=maxlon,resolution='h')
fig1 = plt.figure(figsize=(10,8))
cs = m.pcolormesh(lons,lats,topo,cmap=plt.cm.jet,latlon=True)
m.drawcoastlines()
m.drawmapboundary()
plt.title('SMRT30 - Bathymetry/Topography')
cbar = plt.colorbar(orientation='horizontal', extend='both')
cbar.ax.set_xlabel('meters')
# Save figure (without 'white' borders)
plt.savefig('topo.png', bbox_inches='tight')
|
normal
|
{
"blob_id": "6d0340a08701b0c4f34e9b833bca27cf455d682d",
"index": 827,
"step-1": "\n# coding: utf-8\n\n# # Read Bathy data from ERDDAP\n\n# In[ ]:\n\nget_ipython().system(u'conda install basemap --yes')\n\n\n# In[1]:\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport urllib\nimport netCDF4\nfrom mpl_toolkits.basemap import Basemap\n\n\n# In[2]:\n\n# Definine the domain of interest\nminlat = 42\nmaxlat = 45\nminlon = -67\nmaxlon = -61.5\nisub = 5\n \n# Read data from: http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.html\n# using the netCDF output option\nbase_url='http://coastwatch.pfeg.noaa.gov/erddap/griddap/usgsCeSrtm30v6.nc?'\nquery='topo[(%f):%d:(%f)][(%f):%d:(%f)]' % (maxlat,isub,minlat,minlon,isub,maxlon)\nurl = base_url+query\nprint url\n\n\n# In[3]:\n\n# store data in NetCDF file\nfile='usgsCeSrtm30v6.nc'\nurllib.urlretrieve (url, file)\n\n\n# In[4]:\n\n# open NetCDF data in \nnc = netCDF4.Dataset(file)\nncv = nc.variables\nprint ncv.keys()\n\n\n# In[5]:\n\nlon = ncv['longitude'][:]\nlat = ncv['latitude'][:]\nlons, lats = np.meshgrid(lon,lat)\ntopo = ncv['topo'][:,:]\n\n\n# In[ ]:\n\n# Create map\nm = Basemap(projection='mill', llcrnrlat=minlat,urcrnrlat=maxlat,llcrnrlon=minlon, urcrnrlon=maxlon,resolution='h')\nfig1 = plt.figure(figsize=(10,8))\ncs = m.pcolormesh(lons,lats,topo,cmap=plt.cm.jet,latlon=True)\nm.drawcoastlines()\nm.drawmapboundary()\nplt.title('SMRT30 - Bathymetry/Topography')\ncbar = plt.colorbar(orientation='horizontal', extend='both')\ncbar.ax.set_xlabel('meters')\n \n# Save figure (without 'white' borders)\nplt.savefig('topo.png', bbox_inches='tight')\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# Author : cold
# E-mail : [email protected]
# Date : 13/09/05 11:16:58
# Desc :
#
import twqq
from setuptools import setup
requires = ["tornado", "pycurl", "tornadohttpclient"]
packages = ["twqq"]
entry_points = {
}
setup(
name = "twqq",
version = twqq.__version__,
description = 'An asynchronous webqq client library based on tornado',
long_description = open("README.rst").read(),
author = 'cold',
author_email = '[email protected]',
url = 'http://www.linuxzen.com',
license = 'Apache 2.0',
platforms = 'any',
packages = packages,
package_data = {
},
entry_points = entry_points,
install_requires = requires,
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
"Intended Audience :: Developers",
'License :: OSI Approved :: Apache Software License',
'Topic :: Internet :: WWW/HTTP',
'Programming Language :: Python :: 2.7',
],
)
|
normal
|
{
"blob_id": "9492142a569da1d21b1927e79d97f9cf6276efdc",
"index": 2800,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='[email protected]', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n",
"step-3": "<mask token>\nrequires = ['tornado', 'pycurl', 'tornadohttpclient']\npackages = ['twqq']\nentry_points = {}\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='[email protected]', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n",
"step-4": "import twqq\nfrom setuptools import setup\nrequires = ['tornado', 'pycurl', 'tornadohttpclient']\npackages = ['twqq']\nentry_points = {}\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='[email protected]', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n",
"step-5": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n#\n# Author : cold\n# E-mail : [email protected]\n# Date : 13/09/05 11:16:58\n# Desc :\n#\nimport twqq\nfrom setuptools import setup\n\nrequires = [\"tornado\", \"pycurl\", \"tornadohttpclient\"]\n\npackages = [\"twqq\"]\n\nentry_points = {\n}\n\n\nsetup(\n name = \"twqq\",\n version = twqq.__version__,\n description = 'An asynchronous webqq client library based on tornado',\n long_description = open(\"README.rst\").read(),\n author = 'cold',\n author_email = '[email protected]',\n url = 'http://www.linuxzen.com',\n license = 'Apache 2.0',\n platforms = 'any',\n packages = packages,\n package_data = {\n },\n entry_points = entry_points,\n install_requires = requires,\n classifiers=['Development Status :: 3 - Alpha',\n 'Environment :: Console',\n \"Intended Audience :: Developers\",\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP',\n 'Programming Language :: Python :: 2.7',\n ],\n)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Part 1 - Build the CNN
from keras.models import Sequential
from keras.layers import Convolution2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Dense
## Initialize the CNN
classifier = Sequential()
## Step 1 - Convolution Layer
classifier.add(Convolution2D(32, 3, 3,
border_mode = 'same',
input_shape = (64, 64, 3),
activation = 'relu' ))
## Step 2 - Max Pooling Layer
## Specify pool size of 2 x 2 for max summation
classifier.add(MaxPooling2D( pool_size = (2, 2) ))
## Can improve performance by adding another convolutional layer
## Since input is from pooled samples, don't need to specify input shape
## as Keras will have the shape
classifier.add(Convolution2D(32, 3, 3,
border_mode = 'same',
activation = 'relu' ))
classifier.add(MaxPooling2D( pool_size = (2, 2) ))
## Step 3 - Flattening
classifier.add(Flatten())
## Step 4 - Full Connection
### Add hidden layer
### Number of hidden nodes (128) was arbitrarily selected
### Use rectifier as activation again
classifier.add(Dense(output_dim = 128,
activation = 'relu'))
## Can also improve performance by adding another hidden layer
### Add output layer
### Use sigmoid function as activation
classifier.add(Dense(output_dim = 1,
activation = 'sigmoid'))
## Compile the CNN
## Use the adam stochastic descent algorithm
## Use the binary cross entropy function for the loss function because this is
## a logistic regression classifying a binary output
## Use accuracy for metrics function
classifier.compile(optimizer = 'adam',
loss = 'binary_crossentropy',
metrics = ['accuracy'])
# Part 2 - Fit the CNN to the images
## Need this for MacOS error about libiomp5.dylib
import os
os.environ['KMP_DUPLICATE_LIB_OK']='True'
## Import ImageDataGenerator that will perform
## image augmentation (random transformations to increase
## data sample size from current set of images)
from keras.preprocessing.image import ImageDataGenerator
## Creating data augmenter for training images
train_datagen = ImageDataGenerator(rescale = 1./255,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True)
## Create data augmenter for test images
test_datagen = ImageDataGenerator(rescale = 1./255)
## Point training augmenter to training set
## class mode is 'binary' because it's a binary classification
training_set = train_datagen.flow_from_directory('dataset/training_set',
target_size = (64, 64),
batch_size = 32,
class_mode = 'binary')
## Point training augmenter to test set
## class mode is 'binary' because it's a binary classification
test_set = test_datagen.flow_from_directory('dataset/test_set',
target_size = (64, 64),
batch_size = 32,
class_mode = 'binary')
## Fit the classifier to the augmented images
classifier.fit_generator(training_set,
steps_per_epoch = 8000,
nb_epoch = 25,
validation_data = test_set,
nb_val_samples = 2000)
|
normal
|
{
"blob_id": "b0aeede44a4b54006cf0b7d541d5b476a7178a93",
"index": 6155,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nclassifier.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(64,\n 64, 3), activation='relu'))\nclassifier.add(MaxPooling2D(pool_size=(2, 2)))\nclassifier.add(Convolution2D(32, 3, 3, border_mode='same', activation='relu'))\nclassifier.add(MaxPooling2D(pool_size=(2, 2)))\nclassifier.add(Flatten())\nclassifier.add(Dense(output_dim=128, activation='relu'))\nclassifier.add(Dense(output_dim=1, activation='sigmoid'))\nclassifier.compile(optimizer='adam', loss='binary_crossentropy', metrics=[\n 'accuracy'])\n<mask token>\nclassifier.fit_generator(training_set, steps_per_epoch=8000, nb_epoch=25,\n validation_data=test_set, nb_val_samples=2000)\n",
"step-3": "<mask token>\nclassifier = Sequential()\nclassifier.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(64,\n 64, 3), activation='relu'))\nclassifier.add(MaxPooling2D(pool_size=(2, 2)))\nclassifier.add(Convolution2D(32, 3, 3, border_mode='same', activation='relu'))\nclassifier.add(MaxPooling2D(pool_size=(2, 2)))\nclassifier.add(Flatten())\nclassifier.add(Dense(output_dim=128, activation='relu'))\nclassifier.add(Dense(output_dim=1, activation='sigmoid'))\nclassifier.compile(optimizer='adam', loss='binary_crossentropy', metrics=[\n 'accuracy'])\n<mask token>\nos.environ['KMP_DUPLICATE_LIB_OK'] = 'True'\n<mask token>\ntrain_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True)\ntest_datagen = ImageDataGenerator(rescale=1.0 / 255)\ntraining_set = train_datagen.flow_from_directory('dataset/training_set',\n target_size=(64, 64), batch_size=32, class_mode='binary')\ntest_set = test_datagen.flow_from_directory('dataset/test_set', target_size\n =(64, 64), batch_size=32, class_mode='binary')\nclassifier.fit_generator(training_set, steps_per_epoch=8000, nb_epoch=25,\n validation_data=test_set, nb_val_samples=2000)\n",
"step-4": "from keras.models import Sequential\nfrom keras.layers import Convolution2D\nfrom keras.layers import MaxPooling2D\nfrom keras.layers import Flatten\nfrom keras.layers import Dense\nclassifier = Sequential()\nclassifier.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(64,\n 64, 3), activation='relu'))\nclassifier.add(MaxPooling2D(pool_size=(2, 2)))\nclassifier.add(Convolution2D(32, 3, 3, border_mode='same', activation='relu'))\nclassifier.add(MaxPooling2D(pool_size=(2, 2)))\nclassifier.add(Flatten())\nclassifier.add(Dense(output_dim=128, activation='relu'))\nclassifier.add(Dense(output_dim=1, activation='sigmoid'))\nclassifier.compile(optimizer='adam', loss='binary_crossentropy', metrics=[\n 'accuracy'])\nimport os\nos.environ['KMP_DUPLICATE_LIB_OK'] = 'True'\nfrom keras.preprocessing.image import ImageDataGenerator\ntrain_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True)\ntest_datagen = ImageDataGenerator(rescale=1.0 / 255)\ntraining_set = train_datagen.flow_from_directory('dataset/training_set',\n target_size=(64, 64), batch_size=32, class_mode='binary')\ntest_set = test_datagen.flow_from_directory('dataset/test_set', target_size\n =(64, 64), batch_size=32, class_mode='binary')\nclassifier.fit_generator(training_set, steps_per_epoch=8000, nb_epoch=25,\n validation_data=test_set, nb_val_samples=2000)\n",
"step-5": "# Part 1 - Build the CNN\nfrom keras.models import Sequential\nfrom keras.layers import Convolution2D\nfrom keras.layers import MaxPooling2D\nfrom keras.layers import Flatten\nfrom keras.layers import Dense\n\n## Initialize the CNN\nclassifier = Sequential()\n\n## Step 1 - Convolution Layer\nclassifier.add(Convolution2D(32, 3, 3, \n border_mode = 'same', \n input_shape = (64, 64, 3), \n activation = 'relu' ))\n\n## Step 2 - Max Pooling Layer\n## Specify pool size of 2 x 2 for max summation\nclassifier.add(MaxPooling2D( pool_size = (2, 2) ))\n\n## Can improve performance by adding another convolutional layer\n## Since input is from pooled samples, don't need to specify input shape\n## as Keras will have the shape\nclassifier.add(Convolution2D(32, 3, 3, \n border_mode = 'same', \n activation = 'relu' ))\nclassifier.add(MaxPooling2D( pool_size = (2, 2) ))\n\n## Step 3 - Flattening\nclassifier.add(Flatten())\n\n## Step 4 - Full Connection\n\n### Add hidden layer\n### Number of hidden nodes (128) was arbitrarily selected\n### Use rectifier as activation again\nclassifier.add(Dense(output_dim = 128, \n activation = 'relu'))\n\n## Can also improve performance by adding another hidden layer\n\n### Add output layer\n### Use sigmoid function as activation\nclassifier.add(Dense(output_dim = 1,\n activation = 'sigmoid'))\n\n## Compile the CNN\n## Use the adam stochastic descent algorithm\n## Use the binary cross entropy function for the loss function because this is\n## a logistic regression classifying a binary output\n## Use accuracy for metrics function\nclassifier.compile(optimizer = 'adam',\n loss = 'binary_crossentropy', \n metrics = ['accuracy'])\n\n# Part 2 - Fit the CNN to the images\n\n## Need this for MacOS error about libiomp5.dylib\nimport os\n\nos.environ['KMP_DUPLICATE_LIB_OK']='True'\n\n\n## Import ImageDataGenerator that will perform \n## image augmentation (random transformations to increase\n## data sample size from current set of images)\nfrom keras.preprocessing.image import ImageDataGenerator\n\n## Creating data augmenter for training images\ntrain_datagen = ImageDataGenerator(rescale = 1./255, \n shear_range = 0.2,\n zoom_range = 0.2,\n horizontal_flip = True)\n\n## Create data augmenter for test images\ntest_datagen = ImageDataGenerator(rescale = 1./255)\n\n## Point training augmenter to training set\n## class mode is 'binary' because it's a binary classification\ntraining_set = train_datagen.flow_from_directory('dataset/training_set', \n target_size = (64, 64),\n batch_size = 32,\n class_mode = 'binary')\n## Point training augmenter to test set\n## class mode is 'binary' because it's a binary classification\ntest_set = test_datagen.flow_from_directory('dataset/test_set', \n target_size = (64, 64),\n batch_size = 32,\n class_mode = 'binary')\n\n## Fit the classifier to the augmented images\nclassifier.fit_generator(training_set,\n steps_per_epoch = 8000,\n nb_epoch = 25,\n validation_data = test_set,\n nb_val_samples = 2000)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('Pages', '0014_auto_20191223_2032')]
operations = [migrations.AlterField(model_name='dept', name=
'Hospital_id', field=models.ForeignKey(default='null', on_delete=
django.db.models.deletion.CASCADE, to='Pages.Hospital'))]
<|reserved_special_token_1|>
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [('Pages', '0014_auto_20191223_2032')]
operations = [migrations.AlterField(model_name='dept', name=
'Hospital_id', field=models.ForeignKey(default='null', on_delete=
django.db.models.deletion.CASCADE, to='Pages.Hospital'))]
<|reserved_special_token_1|>
# Generated by Django 2.2.6 on 2019-12-23 16:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Pages', '0014_auto_20191223_2032'),
]
operations = [
migrations.AlterField(
model_name='dept',
name='Hospital_id',
field=models.ForeignKey(default='null', on_delete=django.db.models.deletion.CASCADE, to='Pages.Hospital'),
),
]
|
flexible
|
{
"blob_id": "d09984c6e6a0ce82389dbbbade63507e9687355d",
"index": 771,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Pages', '0014_auto_20191223_2032')]\n operations = [migrations.AlterField(model_name='dept', name=\n 'Hospital_id', field=models.ForeignKey(default='null', on_delete=\n django.db.models.deletion.CASCADE, to='Pages.Hospital'))]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Pages', '0014_auto_20191223_2032')]\n operations = [migrations.AlterField(model_name='dept', name=\n 'Hospital_id', field=models.ForeignKey(default='null', on_delete=\n django.db.models.deletion.CASCADE, to='Pages.Hospital'))]\n",
"step-5": "# Generated by Django 2.2.6 on 2019-12-23 16:38\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Pages', '0014_auto_20191223_2032'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='dept',\n name='Hospital_id',\n field=models.ForeignKey(default='null', on_delete=django.db.models.deletion.CASCADE, to='Pages.Hospital'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class PasswordRecoveryForm(forms.Form):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TokenRequestForm(forms.Form):
email = forms.EmailField()
def send(self):
url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.
cleaned_data['email'])
requests.post(url)
class LoginForm(forms.Form):
username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs
={'placeholder': 'Username'}))
password = forms.CharField(widget=widgets.PasswordInput(attrs={
'placeholder': 'Password'}), min_length=6)
class AddUserToTeamForm(forms.Form):
def __init__(self, teams=None, *args, **kwargs):
super(AddUserToTeamForm, self).__init__(*args, **kwargs)
if teams:
choices = []
for team in teams:
choices.append((team, team))
self.fields['team'].choices = choices
email = forms.EmailField(max_length=60)
team = forms.ChoiceField(choices=[])
class SignupForm(forms.Form):
email = forms.EmailField(max_length=60)
password = forms.CharField(widget=widgets.PasswordInput, min_length=6)
same_password_again = forms.CharField(widget=widgets.PasswordInput,
min_length=6)
def clean(self):
cleaned_data = super(SignupForm, self).clean()
password = cleaned_data.get('password')
same_password_again = cleaned_data.get('same_password_again')
if not password == same_password_again:
msg = 'You must type the same password twice!'
self._errors['same_password_again'] = self.error_class([msg])
raise forms.ValidationError(msg)
return cleaned_data
class KeyForm(forms.Form):
name = forms.CharField()
key = forms.CharField(widget=forms.Textarea)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PasswordRecoveryForm(forms.Form):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def send(self):
url = '{0}/users/{1}/password?token={2}'.format(settings.TSURU_HOST,
self.cleaned_data['email'], self.cleaned_data['token'])
requests.post(url)
class TokenRequestForm(forms.Form):
email = forms.EmailField()
def send(self):
url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.
cleaned_data['email'])
requests.post(url)
class LoginForm(forms.Form):
username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs
={'placeholder': 'Username'}))
password = forms.CharField(widget=widgets.PasswordInput(attrs={
'placeholder': 'Password'}), min_length=6)
class AddUserToTeamForm(forms.Form):
def __init__(self, teams=None, *args, **kwargs):
super(AddUserToTeamForm, self).__init__(*args, **kwargs)
if teams:
choices = []
for team in teams:
choices.append((team, team))
self.fields['team'].choices = choices
email = forms.EmailField(max_length=60)
team = forms.ChoiceField(choices=[])
class SignupForm(forms.Form):
email = forms.EmailField(max_length=60)
password = forms.CharField(widget=widgets.PasswordInput, min_length=6)
same_password_again = forms.CharField(widget=widgets.PasswordInput,
min_length=6)
def clean(self):
cleaned_data = super(SignupForm, self).clean()
password = cleaned_data.get('password')
same_password_again = cleaned_data.get('same_password_again')
if not password == same_password_again:
msg = 'You must type the same password twice!'
self._errors['same_password_again'] = self.error_class([msg])
raise forms.ValidationError(msg)
return cleaned_data
class KeyForm(forms.Form):
name = forms.CharField()
key = forms.CharField(widget=forms.Textarea)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ChangePasswordForm(forms.Form):
old = forms.CharField(widget=forms.PasswordInput())
new = forms.CharField(widget=forms.PasswordInput())
confirm = forms.CharField(widget=forms.PasswordInput())
class PasswordRecoveryForm(forms.Form):
email = forms.EmailField()
token = forms.CharField()
def send(self):
url = '{0}/users/{1}/password?token={2}'.format(settings.TSURU_HOST,
self.cleaned_data['email'], self.cleaned_data['token'])
requests.post(url)
class TokenRequestForm(forms.Form):
email = forms.EmailField()
def send(self):
url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.
cleaned_data['email'])
requests.post(url)
class LoginForm(forms.Form):
username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs
={'placeholder': 'Username'}))
password = forms.CharField(widget=widgets.PasswordInput(attrs={
'placeholder': 'Password'}), min_length=6)
class AddUserToTeamForm(forms.Form):
def __init__(self, teams=None, *args, **kwargs):
super(AddUserToTeamForm, self).__init__(*args, **kwargs)
if teams:
choices = []
for team in teams:
choices.append((team, team))
self.fields['team'].choices = choices
email = forms.EmailField(max_length=60)
team = forms.ChoiceField(choices=[])
class SignupForm(forms.Form):
email = forms.EmailField(max_length=60)
password = forms.CharField(widget=widgets.PasswordInput, min_length=6)
same_password_again = forms.CharField(widget=widgets.PasswordInput,
min_length=6)
def clean(self):
cleaned_data = super(SignupForm, self).clean()
password = cleaned_data.get('password')
same_password_again = cleaned_data.get('same_password_again')
if not password == same_password_again:
msg = 'You must type the same password twice!'
self._errors['same_password_again'] = self.error_class([msg])
raise forms.ValidationError(msg)
return cleaned_data
class KeyForm(forms.Form):
name = forms.CharField()
key = forms.CharField(widget=forms.Textarea)
<|reserved_special_token_1|>
from django import forms
from django.forms import widgets
from tsuru_dashboard import settings
import requests
class ChangePasswordForm(forms.Form):
old = forms.CharField(widget=forms.PasswordInput())
new = forms.CharField(widget=forms.PasswordInput())
confirm = forms.CharField(widget=forms.PasswordInput())
class PasswordRecoveryForm(forms.Form):
email = forms.EmailField()
token = forms.CharField()
def send(self):
url = '{0}/users/{1}/password?token={2}'.format(settings.TSURU_HOST,
self.cleaned_data['email'], self.cleaned_data['token'])
requests.post(url)
class TokenRequestForm(forms.Form):
email = forms.EmailField()
def send(self):
url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.
cleaned_data['email'])
requests.post(url)
class LoginForm(forms.Form):
username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs
={'placeholder': 'Username'}))
password = forms.CharField(widget=widgets.PasswordInput(attrs={
'placeholder': 'Password'}), min_length=6)
class AddUserToTeamForm(forms.Form):
def __init__(self, teams=None, *args, **kwargs):
super(AddUserToTeamForm, self).__init__(*args, **kwargs)
if teams:
choices = []
for team in teams:
choices.append((team, team))
self.fields['team'].choices = choices
email = forms.EmailField(max_length=60)
team = forms.ChoiceField(choices=[])
class SignupForm(forms.Form):
email = forms.EmailField(max_length=60)
password = forms.CharField(widget=widgets.PasswordInput, min_length=6)
same_password_again = forms.CharField(widget=widgets.PasswordInput,
min_length=6)
def clean(self):
cleaned_data = super(SignupForm, self).clean()
password = cleaned_data.get('password')
same_password_again = cleaned_data.get('same_password_again')
if not password == same_password_again:
msg = 'You must type the same password twice!'
self._errors['same_password_again'] = self.error_class([msg])
raise forms.ValidationError(msg)
return cleaned_data
class KeyForm(forms.Form):
name = forms.CharField()
key = forms.CharField(widget=forms.Textarea)
<|reserved_special_token_1|>
from django import forms
from django.forms import widgets
from tsuru_dashboard import settings
import requests
class ChangePasswordForm(forms.Form):
old = forms.CharField(widget=forms.PasswordInput())
new = forms.CharField(widget=forms.PasswordInput())
confirm = forms.CharField(widget=forms.PasswordInput())
class PasswordRecoveryForm(forms.Form):
email = forms.EmailField()
token = forms.CharField()
def send(self):
url = "{0}/users/{1}/password?token={2}".format(
settings.TSURU_HOST,
self.cleaned_data['email'],
self.cleaned_data['token']
)
requests.post(url)
class TokenRequestForm(forms.Form):
email = forms.EmailField()
def send(self):
url = "{0}/users/{1}/password".format(settings.TSURU_HOST,
self.cleaned_data['email'])
requests.post(url)
class LoginForm(forms.Form):
username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs={'placeholder': 'Username'}))
password = forms.CharField(widget=widgets.PasswordInput(attrs={'placeholder': 'Password'}), min_length=6)
class AddUserToTeamForm(forms.Form):
def __init__(self, teams=None, *args, **kwargs):
super(AddUserToTeamForm, self).__init__(*args, **kwargs)
if teams:
choices = []
for team in teams:
choices.append((team, team))
self.fields["team"].choices = choices
email = forms.EmailField(max_length=60)
team = forms.ChoiceField(choices=[])
class SignupForm(forms.Form):
email = forms.EmailField(max_length=60)
password = forms.CharField(widget=widgets.PasswordInput, min_length=6)
same_password_again = forms.CharField(widget=widgets.PasswordInput,
min_length=6)
def clean(self):
cleaned_data = super(SignupForm, self).clean()
password = cleaned_data.get("password")
same_password_again = cleaned_data.get("same_password_again")
if not password == same_password_again:
msg = "You must type the same password twice!"
self._errors["same_password_again"] = self.error_class([msg])
raise forms.ValidationError(msg)
return cleaned_data
class KeyForm(forms.Form):
name = forms.CharField()
key = forms.CharField(widget=forms.Textarea)
|
flexible
|
{
"blob_id": "27fc11ae68531c7dbafdcf134f0eef019210e2de",
"index": 8347,
"step-1": "<mask token>\n\n\nclass PasswordRecoveryForm(forms.Form):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TokenRequestForm(forms.Form):\n email = forms.EmailField()\n\n def send(self):\n url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.\n cleaned_data['email'])\n requests.post(url)\n\n\nclass LoginForm(forms.Form):\n username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs\n ={'placeholder': 'Username'}))\n password = forms.CharField(widget=widgets.PasswordInput(attrs={\n 'placeholder': 'Password'}), min_length=6)\n\n\nclass AddUserToTeamForm(forms.Form):\n\n def __init__(self, teams=None, *args, **kwargs):\n super(AddUserToTeamForm, self).__init__(*args, **kwargs)\n if teams:\n choices = []\n for team in teams:\n choices.append((team, team))\n self.fields['team'].choices = choices\n email = forms.EmailField(max_length=60)\n team = forms.ChoiceField(choices=[])\n\n\nclass SignupForm(forms.Form):\n email = forms.EmailField(max_length=60)\n password = forms.CharField(widget=widgets.PasswordInput, min_length=6)\n same_password_again = forms.CharField(widget=widgets.PasswordInput,\n min_length=6)\n\n def clean(self):\n cleaned_data = super(SignupForm, self).clean()\n password = cleaned_data.get('password')\n same_password_again = cleaned_data.get('same_password_again')\n if not password == same_password_again:\n msg = 'You must type the same password twice!'\n self._errors['same_password_again'] = self.error_class([msg])\n raise forms.ValidationError(msg)\n return cleaned_data\n\n\nclass KeyForm(forms.Form):\n name = forms.CharField()\n key = forms.CharField(widget=forms.Textarea)\n",
"step-2": "<mask token>\n\n\nclass PasswordRecoveryForm(forms.Form):\n <mask token>\n <mask token>\n\n def send(self):\n url = '{0}/users/{1}/password?token={2}'.format(settings.TSURU_HOST,\n self.cleaned_data['email'], self.cleaned_data['token'])\n requests.post(url)\n\n\nclass TokenRequestForm(forms.Form):\n email = forms.EmailField()\n\n def send(self):\n url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.\n cleaned_data['email'])\n requests.post(url)\n\n\nclass LoginForm(forms.Form):\n username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs\n ={'placeholder': 'Username'}))\n password = forms.CharField(widget=widgets.PasswordInput(attrs={\n 'placeholder': 'Password'}), min_length=6)\n\n\nclass AddUserToTeamForm(forms.Form):\n\n def __init__(self, teams=None, *args, **kwargs):\n super(AddUserToTeamForm, self).__init__(*args, **kwargs)\n if teams:\n choices = []\n for team in teams:\n choices.append((team, team))\n self.fields['team'].choices = choices\n email = forms.EmailField(max_length=60)\n team = forms.ChoiceField(choices=[])\n\n\nclass SignupForm(forms.Form):\n email = forms.EmailField(max_length=60)\n password = forms.CharField(widget=widgets.PasswordInput, min_length=6)\n same_password_again = forms.CharField(widget=widgets.PasswordInput,\n min_length=6)\n\n def clean(self):\n cleaned_data = super(SignupForm, self).clean()\n password = cleaned_data.get('password')\n same_password_again = cleaned_data.get('same_password_again')\n if not password == same_password_again:\n msg = 'You must type the same password twice!'\n self._errors['same_password_again'] = self.error_class([msg])\n raise forms.ValidationError(msg)\n return cleaned_data\n\n\nclass KeyForm(forms.Form):\n name = forms.CharField()\n key = forms.CharField(widget=forms.Textarea)\n",
"step-3": "<mask token>\n\n\nclass ChangePasswordForm(forms.Form):\n old = forms.CharField(widget=forms.PasswordInput())\n new = forms.CharField(widget=forms.PasswordInput())\n confirm = forms.CharField(widget=forms.PasswordInput())\n\n\nclass PasswordRecoveryForm(forms.Form):\n email = forms.EmailField()\n token = forms.CharField()\n\n def send(self):\n url = '{0}/users/{1}/password?token={2}'.format(settings.TSURU_HOST,\n self.cleaned_data['email'], self.cleaned_data['token'])\n requests.post(url)\n\n\nclass TokenRequestForm(forms.Form):\n email = forms.EmailField()\n\n def send(self):\n url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.\n cleaned_data['email'])\n requests.post(url)\n\n\nclass LoginForm(forms.Form):\n username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs\n ={'placeholder': 'Username'}))\n password = forms.CharField(widget=widgets.PasswordInput(attrs={\n 'placeholder': 'Password'}), min_length=6)\n\n\nclass AddUserToTeamForm(forms.Form):\n\n def __init__(self, teams=None, *args, **kwargs):\n super(AddUserToTeamForm, self).__init__(*args, **kwargs)\n if teams:\n choices = []\n for team in teams:\n choices.append((team, team))\n self.fields['team'].choices = choices\n email = forms.EmailField(max_length=60)\n team = forms.ChoiceField(choices=[])\n\n\nclass SignupForm(forms.Form):\n email = forms.EmailField(max_length=60)\n password = forms.CharField(widget=widgets.PasswordInput, min_length=6)\n same_password_again = forms.CharField(widget=widgets.PasswordInput,\n min_length=6)\n\n def clean(self):\n cleaned_data = super(SignupForm, self).clean()\n password = cleaned_data.get('password')\n same_password_again = cleaned_data.get('same_password_again')\n if not password == same_password_again:\n msg = 'You must type the same password twice!'\n self._errors['same_password_again'] = self.error_class([msg])\n raise forms.ValidationError(msg)\n return cleaned_data\n\n\nclass KeyForm(forms.Form):\n name = forms.CharField()\n key = forms.CharField(widget=forms.Textarea)\n",
"step-4": "from django import forms\nfrom django.forms import widgets\nfrom tsuru_dashboard import settings\nimport requests\n\n\nclass ChangePasswordForm(forms.Form):\n old = forms.CharField(widget=forms.PasswordInput())\n new = forms.CharField(widget=forms.PasswordInput())\n confirm = forms.CharField(widget=forms.PasswordInput())\n\n\nclass PasswordRecoveryForm(forms.Form):\n email = forms.EmailField()\n token = forms.CharField()\n\n def send(self):\n url = '{0}/users/{1}/password?token={2}'.format(settings.TSURU_HOST,\n self.cleaned_data['email'], self.cleaned_data['token'])\n requests.post(url)\n\n\nclass TokenRequestForm(forms.Form):\n email = forms.EmailField()\n\n def send(self):\n url = '{0}/users/{1}/password'.format(settings.TSURU_HOST, self.\n cleaned_data['email'])\n requests.post(url)\n\n\nclass LoginForm(forms.Form):\n username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs\n ={'placeholder': 'Username'}))\n password = forms.CharField(widget=widgets.PasswordInput(attrs={\n 'placeholder': 'Password'}), min_length=6)\n\n\nclass AddUserToTeamForm(forms.Form):\n\n def __init__(self, teams=None, *args, **kwargs):\n super(AddUserToTeamForm, self).__init__(*args, **kwargs)\n if teams:\n choices = []\n for team in teams:\n choices.append((team, team))\n self.fields['team'].choices = choices\n email = forms.EmailField(max_length=60)\n team = forms.ChoiceField(choices=[])\n\n\nclass SignupForm(forms.Form):\n email = forms.EmailField(max_length=60)\n password = forms.CharField(widget=widgets.PasswordInput, min_length=6)\n same_password_again = forms.CharField(widget=widgets.PasswordInput,\n min_length=6)\n\n def clean(self):\n cleaned_data = super(SignupForm, self).clean()\n password = cleaned_data.get('password')\n same_password_again = cleaned_data.get('same_password_again')\n if not password == same_password_again:\n msg = 'You must type the same password twice!'\n self._errors['same_password_again'] = self.error_class([msg])\n raise forms.ValidationError(msg)\n return cleaned_data\n\n\nclass KeyForm(forms.Form):\n name = forms.CharField()\n key = forms.CharField(widget=forms.Textarea)\n",
"step-5": "from django import forms\nfrom django.forms import widgets\nfrom tsuru_dashboard import settings\n\nimport requests\n\n\nclass ChangePasswordForm(forms.Form):\n old = forms.CharField(widget=forms.PasswordInput())\n new = forms.CharField(widget=forms.PasswordInput())\n confirm = forms.CharField(widget=forms.PasswordInput())\n\n\nclass PasswordRecoveryForm(forms.Form):\n email = forms.EmailField()\n token = forms.CharField()\n\n def send(self):\n url = \"{0}/users/{1}/password?token={2}\".format(\n settings.TSURU_HOST,\n self.cleaned_data['email'],\n self.cleaned_data['token']\n )\n requests.post(url)\n\n\nclass TokenRequestForm(forms.Form):\n email = forms.EmailField()\n\n def send(self):\n url = \"{0}/users/{1}/password\".format(settings.TSURU_HOST,\n self.cleaned_data['email'])\n requests.post(url)\n\n\nclass LoginForm(forms.Form):\n username = forms.EmailField(max_length=60, widget=forms.TextInput(attrs={'placeholder': 'Username'}))\n password = forms.CharField(widget=widgets.PasswordInput(attrs={'placeholder': 'Password'}), min_length=6)\n\n\nclass AddUserToTeamForm(forms.Form):\n\n def __init__(self, teams=None, *args, **kwargs):\n super(AddUserToTeamForm, self).__init__(*args, **kwargs)\n if teams:\n choices = []\n for team in teams:\n choices.append((team, team))\n self.fields[\"team\"].choices = choices\n\n email = forms.EmailField(max_length=60)\n team = forms.ChoiceField(choices=[])\n\n\nclass SignupForm(forms.Form):\n email = forms.EmailField(max_length=60)\n password = forms.CharField(widget=widgets.PasswordInput, min_length=6)\n same_password_again = forms.CharField(widget=widgets.PasswordInput,\n min_length=6)\n\n def clean(self):\n cleaned_data = super(SignupForm, self).clean()\n password = cleaned_data.get(\"password\")\n same_password_again = cleaned_data.get(\"same_password_again\")\n\n if not password == same_password_again:\n msg = \"You must type the same password twice!\"\n self._errors[\"same_password_again\"] = self.error_class([msg])\n raise forms.ValidationError(msg)\n\n return cleaned_data\n\n\nclass KeyForm(forms.Form):\n name = forms.CharField()\n key = forms.CharField(widget=forms.Textarea)\n",
"step-ids": [
14,
15,
18,
19,
20
]
}
|
[
14,
15,
18,
19,
20
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getKeys(f):
keys = {}
f = open(f, 'r')
for line in f:
apiInfo = line.split(',')
keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)
keys.pop('apiName', None)
return keys
<|reserved_special_token_1|>
import string
def getKeys(f):
keys = {}
f = open(f, 'r')
for line in f:
apiInfo = line.split(',')
keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)
keys.pop('apiName', None)
return keys
<|reserved_special_token_1|>
import string
#takes file as input, outputs a dictionary of keys from the file
#file should be in format (apiName, key/id)
#dictionary key = apiName, value = key/id
def getKeys(f):
keys = {}
f = open(f, 'r')
for line in f:
apiInfo = line.split(',')
keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)
keys.pop('apiName', None)
return keys
#print(getKeys('keys.txt'))
|
flexible
|
{
"blob_id": "3653c6fce33467600a3eea72578ed995606bfc03",
"index": 4100,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef getKeys(f):\n keys = {}\n f = open(f, 'r')\n for line in f:\n apiInfo = line.split(',')\n keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)\n keys.pop('apiName', None)\n return keys\n",
"step-3": "import string\n\n\ndef getKeys(f):\n keys = {}\n f = open(f, 'r')\n for line in f:\n apiInfo = line.split(',')\n keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)\n keys.pop('apiName', None)\n return keys\n",
"step-4": "import string\n\n#takes file as input, outputs a dictionary of keys from the file\n#file should be in format (apiName, key/id)\n#dictionary key = apiName, value = key/id\ndef getKeys(f):\n keys = {}\n f = open(f, 'r')\n for line in f:\n apiInfo = line.split(',')\n keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)\n keys.pop('apiName', None)\n return keys\n\n#print(getKeys('keys.txt'))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# coding: utf-8
"""
Negotiation API
The <b>Negotiations API</b> gives sellers the ability to proactively send discount offers to buyers who have shown an \"interest\" in their listings. <br><br>By sending buyers discount offers on listings where they have shown an interest, sellers can increase the velocity of their sales. <br><br>There are various ways for a buyer to show <i>interest </i> in a listing. For example, if a buyer adds the listing to their <b>Watch</b> list, or if they add the listing to their shopping cart and later abandon the cart, they are deemed to have shown an interest in the listing. <br><br>In the offers that sellers send, they can discount their listings by either a percentage off the listing price, or they can set a new discounted price that is lower than the original listing price. <br><br>For details about how seller offers work, see <a href=\"/api-docs/sell/static/marketing/offers-to-buyers.html\" title=\"Selling Integration Guide\">Sending offers to buyers</a>. # noqa: E501
OpenAPI spec version: v1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...sell_negotiation.api_client import ApiClient
class OfferApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def find_eligible_items(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""find_eligible_items # noqa: E501
This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_eligible_items(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10
:param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0
:return: PagedEligibleItemCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
else:
(data) = self.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
return data
def find_eligible_items_with_http_info(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""find_eligible_items # noqa: E501
This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10
:param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0
:return: PagedEligibleItemCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_ebay_c_marketplace_id', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_eligible_items" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x_ebay_c_marketplace_id' is set
if ('x_ebay_c_marketplace_id' not in params or
params['x_ebay_c_marketplace_id'] is None):
raise ValueError("Missing the required parameter `x_ebay_c_marketplace_id` when calling `find_eligible_items`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
if 'x_ebay_c_marketplace_id' in params:
header_params['X-EBAY-C-MARKETPLACE-ID'] = params['x_ebay_c_marketplace_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/find_eligible_items', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedEligibleItemCollection', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_offer_to_interested_buyers(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""send_offer_to_interested_buyers # noqa: E501
This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_offer_to_interested_buyers(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param CreateOffersRequest body: Send offer to eligible items request.
:return: SendOfferToInterestedBuyersCollectionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
else:
(data) = self.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501
return data
def send_offer_to_interested_buyers_with_http_info(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501
"""send_offer_to_interested_buyers # noqa: E501
This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)
:param CreateOffersRequest body: Send offer to eligible items request.
:return: SendOfferToInterestedBuyersCollectionResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_ebay_c_marketplace_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_offer_to_interested_buyers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x_ebay_c_marketplace_id' is set
if ('x_ebay_c_marketplace_id' not in params or
params['x_ebay_c_marketplace_id'] is None):
raise ValueError("Missing the required parameter `x_ebay_c_marketplace_id` when calling `send_offer_to_interested_buyers`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_ebay_c_marketplace_id' in params:
header_params['X-EBAY-C-MARKETPLACE-ID'] = params['x_ebay_c_marketplace_id'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/send_offer_to_interested_buyers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendOfferToInterestedBuyersCollectionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
normal
|
{
"blob_id": "a93818440410bde004f0203f18112fa1b666959c",
"index": 9615,
"step-1": "<mask token>\n\n\nclass OfferApi(object):\n <mask token>\n <mask token>\n <mask token>\n\n def find_eligible_items_with_http_info(self, x_ebay_c_marketplace_id,\n **kwargs):\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'limit', 'offset']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method find_eligible_items\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `find_eligible_items`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'limit' in params:\n query_params.append(('limit', params['limit']))\n if 'offset' in params:\n query_params.append(('offset', params['offset']))\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n auth_settings = ['api_auth']\n return self.api_client.call_api('/find_eligible_items', 'GET',\n path_params, query_params, header_params, body=body_params,\n post_params=form_params, files=local_var_files, response_type=\n 'PagedEligibleItemCollection', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def send_offer_to_interested_buyers(self, x_ebay_c_marketplace_id, **kwargs\n ):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n else:\n data = self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n return data\n\n def send_offer_to_interested_buyers_with_http_info(self,\n x_ebay_c_marketplace_id, **kwargs):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'body']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method send_offer_to_interested_buyers\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `send_offer_to_interested_buyers`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n if 'body' in params:\n body_params = params['body']\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['api_auth']\n return self.api_client.call_api('/send_offer_to_interested_buyers',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='SendOfferToInterestedBuyersCollectionResponse',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n",
"step-2": "<mask token>\n\n\nclass OfferApi(object):\n <mask token>\n <mask token>\n\n def find_eligible_items(self, x_ebay_c_marketplace_id, **kwargs):\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.find_eligible_items_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n else:\n data = self.find_eligible_items_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n return data\n\n def find_eligible_items_with_http_info(self, x_ebay_c_marketplace_id,\n **kwargs):\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'limit', 'offset']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method find_eligible_items\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `find_eligible_items`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'limit' in params:\n query_params.append(('limit', params['limit']))\n if 'offset' in params:\n query_params.append(('offset', params['offset']))\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n auth_settings = ['api_auth']\n return self.api_client.call_api('/find_eligible_items', 'GET',\n path_params, query_params, header_params, body=body_params,\n post_params=form_params, files=local_var_files, response_type=\n 'PagedEligibleItemCollection', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def send_offer_to_interested_buyers(self, x_ebay_c_marketplace_id, **kwargs\n ):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n else:\n data = self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n return data\n\n def send_offer_to_interested_buyers_with_http_info(self,\n x_ebay_c_marketplace_id, **kwargs):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'body']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method send_offer_to_interested_buyers\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `send_offer_to_interested_buyers`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n if 'body' in params:\n body_params = params['body']\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['api_auth']\n return self.api_client.call_api('/send_offer_to_interested_buyers',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='SendOfferToInterestedBuyersCollectionResponse',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n",
"step-3": "<mask token>\n\n\nclass OfferApi(object):\n <mask token>\n\n def __init__(self, api_client=None):\n if api_client is None:\n api_client = ApiClient()\n self.api_client = api_client\n\n def find_eligible_items(self, x_ebay_c_marketplace_id, **kwargs):\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.find_eligible_items_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n else:\n data = self.find_eligible_items_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n return data\n\n def find_eligible_items_with_http_info(self, x_ebay_c_marketplace_id,\n **kwargs):\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'limit', 'offset']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method find_eligible_items\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `find_eligible_items`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'limit' in params:\n query_params.append(('limit', params['limit']))\n if 'offset' in params:\n query_params.append(('offset', params['offset']))\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n auth_settings = ['api_auth']\n return self.api_client.call_api('/find_eligible_items', 'GET',\n path_params, query_params, header_params, body=body_params,\n post_params=form_params, files=local_var_files, response_type=\n 'PagedEligibleItemCollection', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def send_offer_to_interested_buyers(self, x_ebay_c_marketplace_id, **kwargs\n ):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n else:\n data = self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n return data\n\n def send_offer_to_interested_buyers_with_http_info(self,\n x_ebay_c_marketplace_id, **kwargs):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'body']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method send_offer_to_interested_buyers\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `send_offer_to_interested_buyers`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n if 'body' in params:\n body_params = params['body']\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['api_auth']\n return self.api_client.call_api('/send_offer_to_interested_buyers',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='SendOfferToInterestedBuyersCollectionResponse',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n",
"step-4": "<mask token>\nfrom __future__ import absolute_import\nimport re\nimport six\nfrom ...sell_negotiation.api_client import ApiClient\n\n\nclass OfferApi(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n Ref: https://github.com/swagger-api/swagger-codegen\n \"\"\"\n\n def __init__(self, api_client=None):\n if api_client is None:\n api_client = ApiClient()\n self.api_client = api_client\n\n def find_eligible_items(self, x_ebay_c_marketplace_id, **kwargs):\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.find_eligible_items_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n else:\n data = self.find_eligible_items_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n return data\n\n def find_eligible_items_with_http_info(self, x_ebay_c_marketplace_id,\n **kwargs):\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'limit', 'offset']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method find_eligible_items\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `find_eligible_items`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'limit' in params:\n query_params.append(('limit', params['limit']))\n if 'offset' in params:\n query_params.append(('offset', params['offset']))\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n auth_settings = ['api_auth']\n return self.api_client.call_api('/find_eligible_items', 'GET',\n path_params, query_params, header_params, body=body_params,\n post_params=form_params, files=local_var_files, response_type=\n 'PagedEligibleItemCollection', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def send_offer_to_interested_buyers(self, x_ebay_c_marketplace_id, **kwargs\n ):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n else:\n data = self.send_offer_to_interested_buyers_with_http_info(\n x_ebay_c_marketplace_id, **kwargs)\n return data\n\n def send_offer_to_interested_buyers_with_http_info(self,\n x_ebay_c_marketplace_id, **kwargs):\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['x_ebay_c_marketplace_id', 'body']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method send_offer_to_interested_buyers\"\n % key)\n params[key] = val\n del params['kwargs']\n if 'x_ebay_c_marketplace_id' not in params or params[\n 'x_ebay_c_marketplace_id'] is None:\n raise ValueError(\n 'Missing the required parameter `x_ebay_c_marketplace_id` when calling `send_offer_to_interested_buyers`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params[\n 'x_ebay_c_marketplace_id']\n form_params = []\n local_var_files = {}\n body_params = None\n if 'body' in params:\n body_params = params['body']\n header_params['Accept'] = self.api_client.select_header_accept([\n 'application/json'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['api_auth']\n return self.api_client.call_api('/send_offer_to_interested_buyers',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='SendOfferToInterestedBuyersCollectionResponse',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n",
"step-5": "# coding: utf-8\n\n\"\"\"\n Negotiation API\n\n The <b>Negotiations API</b> gives sellers the ability to proactively send discount offers to buyers who have shown an \\\"interest\\\" in their listings. <br><br>By sending buyers discount offers on listings where they have shown an interest, sellers can increase the velocity of their sales. <br><br>There are various ways for a buyer to show <i>interest </i> in a listing. For example, if a buyer adds the listing to their <b>Watch</b> list, or if they add the listing to their shopping cart and later abandon the cart, they are deemed to have shown an interest in the listing. <br><br>In the offers that sellers send, they can discount their listings by either a percentage off the listing price, or they can set a new discounted price that is lower than the original listing price. <br><br>For details about how seller offers work, see <a href=\\\"/api-docs/sell/static/marketing/offers-to-buyers.html\\\" title=\\\"Selling Integration Guide\\\">Sending offers to buyers</a>. # noqa: E501\n\n OpenAPI spec version: v1.1.0\n \n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\nfrom __future__ import absolute_import\n\nimport re # noqa: F401\n\n# python 2 and python 3 compatibility library\nimport six\n\nfrom ...sell_negotiation.api_client import ApiClient\n\n\nclass OfferApi(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n Ref: https://github.com/swagger-api/swagger-codegen\n \"\"\"\n\n def __init__(self, api_client=None):\n if api_client is None:\n api_client = ApiClient()\n self.api_client = api_client\n\n def find_eligible_items(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501\n else:\n (data) = self.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501\n return data\n\n def find_eligible_items_with_http_info(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501\n \"\"\"find_eligible_items # noqa: E501\n\n This method evaluates a seller's current listings and returns the set of IDs that are eligible for a seller-initiated discount offer to a buyer. A listing ID is returned only when one or more buyers have shown an "interest" in the listing. If any buyers have shown interest in a listing, the seller can initiate a "negotiation" with them by calling sendOfferToInterestedBuyers, which sends all interested buyers a message that offers the listing at a discount. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.find_eligible_items_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which you want to search for eligible listings. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param str limit: This query parameter specifies the maximum number of items to return from the result set on a page in the paginated response. Minimum: 1 Maximum: 200 Default: 10\n :param str offset: This query parameter specifies the number of results to skip in the result set before returning the first result in the paginated response. Combine offset with the limit query parameter to control the items returned in the response. For example, if you supply an offset of 0 and a limit of 10, the first page of the response contains the first 10 results from the complete list of items retrieved by the call. If offset is 10 and limit is 20, the first page of the response contains items 11-30 from the complete result set. Default: 0\n :return: PagedEligibleItemCollection\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['x_ebay_c_marketplace_id', 'limit', 'offset'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method find_eligible_items\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'x_ebay_c_marketplace_id' is set\n if ('x_ebay_c_marketplace_id' not in params or\n params['x_ebay_c_marketplace_id'] is None):\n raise ValueError(\"Missing the required parameter `x_ebay_c_marketplace_id` when calling `find_eligible_items`\") # noqa: E501\n\n collection_formats = {}\n\n path_params = {}\n\n query_params = []\n if 'limit' in params:\n query_params.append(('limit', params['limit'])) # noqa: E501\n if 'offset' in params:\n query_params.append(('offset', params['offset'])) # noqa: E501\n\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params['x_ebay_c_marketplace_id'] # noqa: E501\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['api_auth'] # noqa: E501\n\n return self.api_client.call_api(\n '/find_eligible_items', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='PagedEligibleItemCollection', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def send_offer_to_interested_buyers(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501\n else:\n (data) = self.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, **kwargs) # noqa: E501\n return data\n\n def send_offer_to_interested_buyers_with_http_info(self, x_ebay_c_marketplace_id, **kwargs): # noqa: E501\n \"\"\"send_offer_to_interested_buyers # noqa: E501\n\n This method sends eligible buyers offers to purchase items in a listing at a discount. When a buyer has shown interest in a listing, they become "eligible" to receive a seller-initiated offer to purchase the item(s). Sellers use findEligibleItems to get the set of listings that have interested buyers. If a listing has interested buyers, sellers can use this method (sendOfferToInterestedBuyers) to send an offer to the buyers who are interested in the listing. The offer gives buyers the ability to purchase the associated listings at a discounted price. For details about how to create seller offers to buyers, see Sending offers to buyers. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.send_offer_to_interested_buyers_with_http_info(x_ebay_c_marketplace_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str x_ebay_c_marketplace_id: The eBay marketplace on which your listings with "eligible" buyers appear. For a complete list of supported marketplaces, see Negotiation API requirements and restrictions. (required)\n :param CreateOffersRequest body: Send offer to eligible items request.\n :return: SendOfferToInterestedBuyersCollectionResponse\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['x_ebay_c_marketplace_id', 'body'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method send_offer_to_interested_buyers\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'x_ebay_c_marketplace_id' is set\n if ('x_ebay_c_marketplace_id' not in params or\n params['x_ebay_c_marketplace_id'] is None):\n raise ValueError(\"Missing the required parameter `x_ebay_c_marketplace_id` when calling `send_offer_to_interested_buyers`\") # noqa: E501\n\n collection_formats = {}\n\n path_params = {}\n\n query_params = []\n\n header_params = {}\n if 'x_ebay_c_marketplace_id' in params:\n header_params['X-EBAY-C-MARKETPLACE-ID'] = params['x_ebay_c_marketplace_id'] # noqa: E501\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['application/json']) # noqa: E501\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['api_auth'] # noqa: E501\n\n return self.api_client.call_api(\n '/send_offer_to_interested_buyers', 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='SendOfferToInterestedBuyersCollectionResponse', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n",
"step-ids": [
4,
5,
6,
8,
9
]
}
|
[
4,
5,
6,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if sys.hexversion < 50331648:
from .foo import foo
<|reserved_special_token_1|>
import sys
if sys.hexversion < 50331648:
from .foo import foo
<|reserved_special_token_1|>
import sys
if sys.hexversion < 0x03000000:
from .foo import foo
|
flexible
|
{
"blob_id": "485729398b51bebd16f38800c6100289b7b0b347",
"index": 9023,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif sys.hexversion < 50331648:\n from .foo import foo\n",
"step-3": "import sys\nif sys.hexversion < 50331648:\n from .foo import foo\n",
"step-4": "\nimport sys\n\nif sys.hexversion < 0x03000000:\n from .foo import foo\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
plt.xlabel('Freq (in rad/s)')
plt.ylabel('Phase (in deg)')
plt.title('Phase plot')
plt.semilogx(w, phase1, label='With Controller')
plt.semilogx(w, phase2, label='Without Controller')
plt.grid()
plt.legend()
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
K = 96
Kp = 1
Td = 1.884
s1 = signal.lti([-1 / Td], [0, -2, -4, -6], K)
s2 = signal.lti([], [0, -2, -4, -6], K)
w, mag1, phase1 = signal.bode(s1)
_, mag2, phase2 = signal.bode(s2)
plt.xlabel('Freq (in rad/s)')
plt.ylabel('Phase (in deg)')
plt.title('Phase plot')
plt.semilogx(w, phase1, label='With Controller')
plt.semilogx(w, phase2, label='Without Controller')
plt.grid()
plt.legend()
plt.show()
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
from scipy import signal
K = 96
Kp = 1
Td = 1.884
s1 = signal.lti([-1 / Td], [0, -2, -4, -6], K)
s2 = signal.lti([], [0, -2, -4, -6], K)
w, mag1, phase1 = signal.bode(s1)
_, mag2, phase2 = signal.bode(s2)
plt.xlabel('Freq (in rad/s)')
plt.ylabel('Phase (in deg)')
plt.title('Phase plot')
plt.semilogx(w, phase1, label='With Controller')
plt.semilogx(w, phase2, label='Without Controller')
plt.grid()
plt.legend()
plt.show()
<|reserved_special_token_1|>
#Coded by J. Prabhath
#14th April, 2020
#Released under GNU GPL
import numpy as np
import matplotlib.pyplot as plt
from scipy import signal
K = 96
Kp = 1
Td = 1.884
s1 = signal.lti([-1/Td],[0,-2,-4,-6], K)
s2 = signal.lti([],[0,-2,-4,-6], K)
w,mag1,phase1 = signal.bode(s1)
_,mag2,phase2 = signal.bode(s2)
plt.xlabel('Freq (in rad/s)')
plt.ylabel('Phase (in deg)')
plt.title('Phase plot')
plt.semilogx(w,phase1, label = 'With Controller')
plt.semilogx(w,phase2, label = 'Without Controller')
plt.grid()
plt.legend()
plt.show()
|
flexible
|
{
"blob_id": "84e84d9f35702c2572ad5e7daa92a271674986dc",
"index": 3882,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.xlabel('Freq (in rad/s)')\nplt.ylabel('Phase (in deg)')\nplt.title('Phase plot')\nplt.semilogx(w, phase1, label='With Controller')\nplt.semilogx(w, phase2, label='Without Controller')\nplt.grid()\nplt.legend()\nplt.show()\n",
"step-3": "<mask token>\nK = 96\nKp = 1\nTd = 1.884\ns1 = signal.lti([-1 / Td], [0, -2, -4, -6], K)\ns2 = signal.lti([], [0, -2, -4, -6], K)\nw, mag1, phase1 = signal.bode(s1)\n_, mag2, phase2 = signal.bode(s2)\nplt.xlabel('Freq (in rad/s)')\nplt.ylabel('Phase (in deg)')\nplt.title('Phase plot')\nplt.semilogx(w, phase1, label='With Controller')\nplt.semilogx(w, phase2, label='Without Controller')\nplt.grid()\nplt.legend()\nplt.show()\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy import signal\nK = 96\nKp = 1\nTd = 1.884\ns1 = signal.lti([-1 / Td], [0, -2, -4, -6], K)\ns2 = signal.lti([], [0, -2, -4, -6], K)\nw, mag1, phase1 = signal.bode(s1)\n_, mag2, phase2 = signal.bode(s2)\nplt.xlabel('Freq (in rad/s)')\nplt.ylabel('Phase (in deg)')\nplt.title('Phase plot')\nplt.semilogx(w, phase1, label='With Controller')\nplt.semilogx(w, phase2, label='Without Controller')\nplt.grid()\nplt.legend()\nplt.show()\n",
"step-5": "#Coded by J. Prabhath\n#14th April, 2020\n#Released under GNU GPL\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy import signal\n\nK = 96\nKp = 1\nTd = 1.884\n\ns1 = signal.lti([-1/Td],[0,-2,-4,-6], K)\ns2 = signal.lti([],[0,-2,-4,-6], K)\nw,mag1,phase1 = signal.bode(s1)\n_,mag2,phase2 = signal.bode(s2)\n\nplt.xlabel('Freq (in rad/s)')\nplt.ylabel('Phase (in deg)')\nplt.title('Phase plot')\nplt.semilogx(w,phase1, label = 'With Controller')\nplt.semilogx(w,phase2, label = 'Without Controller')\nplt.grid()\nplt.legend()\nplt.show()\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StyblinskiTang(Function2D):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def grad(self, x):
""" Grad function. """
g = np.zeros(x.shape)
g[0] = -16.0 * x[0] ** 1.0 + 2.0 * x[0] ** 3.0 + 2.5
g[1] = -16.0 * x[1] ** 1.0 + 2.0 * x[1] ** 3.0 + 2.5
return g
def hess(self, x):
""" Hess function. """
h = np.zeros((2, 2) + x.shape[1:])
h[0][0] = 6.0 * x[0] ** 2.0 - 16.0
h[0][1] = 0.0
h[1][0] = h[0][1]
h[1][1] = 6.0 * x[1] ** 2.0 - 16.0
return h
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StyblinskiTang(Function2D):
""" Styblinski-Tang Function. """
def __init__(self):
""" Constructor. """
self.min = np.array([-2.903534, -2.903534])
self.value = -39.16599 * 2.0
self.domain = np.array([[-5.0, 5.0], [-5.0, 5.0]])
self.n = 2
self.smooth = True
self.info = [True, True, True]
self.latex_name = 'Styblinski-Tang Function'
self.latex_type = 'Other'
self.latex_cost = (
'\\[ f(\\mathbf{x}) = \\frac{1}{2} \\sum_{i=0}^{d-1} (x_i^4 - 16 x_i^2 + 5 x_i) \\]'
)
self.latex_desc = (
'The local minima are separated by a local maximum. There is only a single global minimum.'
)
def cost(self, x):
""" Cost function. """
c = np.zeros(x.shape[1:])
c = 0.5 * (x[0] ** 4.0 - 16 * x[0] ** 2.0 + 5.0 * x[0] + x[1] **
4.0 - 16 * x[1] ** 2.0 + 5.0 * x[1])
return c
def grad(self, x):
""" Grad function. """
g = np.zeros(x.shape)
g[0] = -16.0 * x[0] ** 1.0 + 2.0 * x[0] ** 3.0 + 2.5
g[1] = -16.0 * x[1] ** 1.0 + 2.0 * x[1] ** 3.0 + 2.5
return g
def hess(self, x):
""" Hess function. """
h = np.zeros((2, 2) + x.shape[1:])
h[0][0] = 6.0 * x[0] ** 2.0 - 16.0
h[0][1] = 0.0
h[1][0] = h[0][1]
h[1][1] = 6.0 * x[1] ** 2.0 - 16.0
return h
<|reserved_special_token_1|>
import numpy as np
from ctf.functions2d.function2d import Function2D
class StyblinskiTang(Function2D):
""" Styblinski-Tang Function. """
def __init__(self):
""" Constructor. """
self.min = np.array([-2.903534, -2.903534])
self.value = -39.16599 * 2.0
self.domain = np.array([[-5.0, 5.0], [-5.0, 5.0]])
self.n = 2
self.smooth = True
self.info = [True, True, True]
self.latex_name = 'Styblinski-Tang Function'
self.latex_type = 'Other'
self.latex_cost = (
'\\[ f(\\mathbf{x}) = \\frac{1}{2} \\sum_{i=0}^{d-1} (x_i^4 - 16 x_i^2 + 5 x_i) \\]'
)
self.latex_desc = (
'The local minima are separated by a local maximum. There is only a single global minimum.'
)
def cost(self, x):
""" Cost function. """
c = np.zeros(x.shape[1:])
c = 0.5 * (x[0] ** 4.0 - 16 * x[0] ** 2.0 + 5.0 * x[0] + x[1] **
4.0 - 16 * x[1] ** 2.0 + 5.0 * x[1])
return c
def grad(self, x):
""" Grad function. """
g = np.zeros(x.shape)
g[0] = -16.0 * x[0] ** 1.0 + 2.0 * x[0] ** 3.0 + 2.5
g[1] = -16.0 * x[1] ** 1.0 + 2.0 * x[1] ** 3.0 + 2.5
return g
def hess(self, x):
""" Hess function. """
h = np.zeros((2, 2) + x.shape[1:])
h[0][0] = 6.0 * x[0] ** 2.0 - 16.0
h[0][1] = 0.0
h[1][0] = h[0][1]
h[1][1] = 6.0 * x[1] ** 2.0 - 16.0
return h
<|reserved_special_token_1|>
# Imports
import numpy as np
from ctf.functions2d.function2d import Function2D
# Problem
class StyblinskiTang(Function2D):
""" Styblinski-Tang Function. """
def __init__(self):
""" Constructor. """
# Information
self.min = np.array([-2.903534, -2.903534])
self.value = -39.16599*2.0
self.domain = np.array([[-5.0, 5.0], [-5.0, 5.0]])
self.n = 2
self.smooth = True
self.info = [True, True, True]
# Description
self.latex_name = "Styblinski-Tang Function"
self.latex_type = "Other"
self.latex_cost = r'\[ f(\mathbf{x}) = \frac{1}{2} \sum_{i=0}^{d-1} (x_i^4 - 16 x_i^2 + 5 x_i) \]'
self.latex_desc = "The local minima are separated by a local maximum. There is only a single global minimum."
def cost(self, x):
""" Cost function. """
# Cost
c = np.zeros(x.shape[1:])
# Calculate Cost
c = 0.5*(x[0]**4.0 - 16*x[0]**2.0 + 5.0*x[0] + x[1]**4.0 - 16*x[1]**2.0 + 5.0*x[1])
# Return Cost
return c
def grad(self, x):
""" Grad function. """
# Grad
g = np.zeros(x.shape)
# Calculate Grads
g[0] = -16.0*x[0]**1.0 + 2.0*x[0]**3.0 + 2.5
g[1] = -16.0*x[1]**1.0 + 2.0*x[1]**3.0 + 2.5
# Return Grad
return g
def hess(self, x):
""" Hess function. """
# Hess
h = np.zeros((2, 2) + x.shape[1:])
# Calculate Hess
h[0][0] = 6.0*x[0]**2.0 - 16.0
h[0][1] = 0.0
h[1][0] = h[0][1]
h[1][1] = 6.0*x[1]**2.0 - 16.0
# Return Hess
return h
|
flexible
|
{
"blob_id": "5d8715dd02feff4e13919858051abeb5b6828011",
"index": 6798,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass StyblinskiTang(Function2D):\n <mask token>\n <mask token>\n <mask token>\n\n def grad(self, x):\n \"\"\" Grad function. \"\"\"\n g = np.zeros(x.shape)\n g[0] = -16.0 * x[0] ** 1.0 + 2.0 * x[0] ** 3.0 + 2.5\n g[1] = -16.0 * x[1] ** 1.0 + 2.0 * x[1] ** 3.0 + 2.5\n return g\n\n def hess(self, x):\n \"\"\" Hess function. \"\"\"\n h = np.zeros((2, 2) + x.shape[1:])\n h[0][0] = 6.0 * x[0] ** 2.0 - 16.0\n h[0][1] = 0.0\n h[1][0] = h[0][1]\n h[1][1] = 6.0 * x[1] ** 2.0 - 16.0\n return h\n",
"step-3": "<mask token>\n\n\nclass StyblinskiTang(Function2D):\n \"\"\" Styblinski-Tang Function. \"\"\"\n\n def __init__(self):\n \"\"\" Constructor. \"\"\"\n self.min = np.array([-2.903534, -2.903534])\n self.value = -39.16599 * 2.0\n self.domain = np.array([[-5.0, 5.0], [-5.0, 5.0]])\n self.n = 2\n self.smooth = True\n self.info = [True, True, True]\n self.latex_name = 'Styblinski-Tang Function'\n self.latex_type = 'Other'\n self.latex_cost = (\n '\\\\[ f(\\\\mathbf{x}) = \\\\frac{1}{2} \\\\sum_{i=0}^{d-1} (x_i^4 - 16 x_i^2 + 5 x_i) \\\\]'\n )\n self.latex_desc = (\n 'The local minima are separated by a local maximum. There is only a single global minimum.'\n )\n\n def cost(self, x):\n \"\"\" Cost function. \"\"\"\n c = np.zeros(x.shape[1:])\n c = 0.5 * (x[0] ** 4.0 - 16 * x[0] ** 2.0 + 5.0 * x[0] + x[1] ** \n 4.0 - 16 * x[1] ** 2.0 + 5.0 * x[1])\n return c\n\n def grad(self, x):\n \"\"\" Grad function. \"\"\"\n g = np.zeros(x.shape)\n g[0] = -16.0 * x[0] ** 1.0 + 2.0 * x[0] ** 3.0 + 2.5\n g[1] = -16.0 * x[1] ** 1.0 + 2.0 * x[1] ** 3.0 + 2.5\n return g\n\n def hess(self, x):\n \"\"\" Hess function. \"\"\"\n h = np.zeros((2, 2) + x.shape[1:])\n h[0][0] = 6.0 * x[0] ** 2.0 - 16.0\n h[0][1] = 0.0\n h[1][0] = h[0][1]\n h[1][1] = 6.0 * x[1] ** 2.0 - 16.0\n return h\n",
"step-4": "import numpy as np\nfrom ctf.functions2d.function2d import Function2D\n\n\nclass StyblinskiTang(Function2D):\n \"\"\" Styblinski-Tang Function. \"\"\"\n\n def __init__(self):\n \"\"\" Constructor. \"\"\"\n self.min = np.array([-2.903534, -2.903534])\n self.value = -39.16599 * 2.0\n self.domain = np.array([[-5.0, 5.0], [-5.0, 5.0]])\n self.n = 2\n self.smooth = True\n self.info = [True, True, True]\n self.latex_name = 'Styblinski-Tang Function'\n self.latex_type = 'Other'\n self.latex_cost = (\n '\\\\[ f(\\\\mathbf{x}) = \\\\frac{1}{2} \\\\sum_{i=0}^{d-1} (x_i^4 - 16 x_i^2 + 5 x_i) \\\\]'\n )\n self.latex_desc = (\n 'The local minima are separated by a local maximum. There is only a single global minimum.'\n )\n\n def cost(self, x):\n \"\"\" Cost function. \"\"\"\n c = np.zeros(x.shape[1:])\n c = 0.5 * (x[0] ** 4.0 - 16 * x[0] ** 2.0 + 5.0 * x[0] + x[1] ** \n 4.0 - 16 * x[1] ** 2.0 + 5.0 * x[1])\n return c\n\n def grad(self, x):\n \"\"\" Grad function. \"\"\"\n g = np.zeros(x.shape)\n g[0] = -16.0 * x[0] ** 1.0 + 2.0 * x[0] ** 3.0 + 2.5\n g[1] = -16.0 * x[1] ** 1.0 + 2.0 * x[1] ** 3.0 + 2.5\n return g\n\n def hess(self, x):\n \"\"\" Hess function. \"\"\"\n h = np.zeros((2, 2) + x.shape[1:])\n h[0][0] = 6.0 * x[0] ** 2.0 - 16.0\n h[0][1] = 0.0\n h[1][0] = h[0][1]\n h[1][1] = 6.0 * x[1] ** 2.0 - 16.0\n return h\n",
"step-5": "# Imports\nimport numpy as np\n\nfrom ctf.functions2d.function2d import Function2D\n\n\n\n# Problem\nclass StyblinskiTang(Function2D):\n \"\"\" Styblinski-Tang Function. \"\"\"\n\n def __init__(self):\n \"\"\" Constructor. \"\"\"\n # Information\n self.min = np.array([-2.903534, -2.903534])\n self.value = -39.16599*2.0\n self.domain = np.array([[-5.0, 5.0], [-5.0, 5.0]])\n self.n = 2\n self.smooth = True\n self.info = [True, True, True]\n # Description\n self.latex_name = \"Styblinski-Tang Function\"\n self.latex_type = \"Other\"\n self.latex_cost = r'\\[ f(\\mathbf{x}) = \\frac{1}{2} \\sum_{i=0}^{d-1} (x_i^4 - 16 x_i^2 + 5 x_i) \\]'\n self.latex_desc = \"The local minima are separated by a local maximum. There is only a single global minimum.\"\n\n def cost(self, x):\n \"\"\" Cost function. \"\"\"\n # Cost\n c = np.zeros(x.shape[1:])\n # Calculate Cost\n c = 0.5*(x[0]**4.0 - 16*x[0]**2.0 + 5.0*x[0] + x[1]**4.0 - 16*x[1]**2.0 + 5.0*x[1])\n # Return Cost\n return c\n\n def grad(self, x):\n \"\"\" Grad function. \"\"\"\n # Grad\n g = np.zeros(x.shape)\n # Calculate Grads\n g[0] = -16.0*x[0]**1.0 + 2.0*x[0]**3.0 + 2.5\n g[1] = -16.0*x[1]**1.0 + 2.0*x[1]**3.0 + 2.5\n # Return Grad\n return g\n\n def hess(self, x):\n \"\"\" Hess function. \"\"\"\n # Hess\n h = np.zeros((2, 2) + x.shape[1:])\n # Calculate Hess\n h[0][0] = 6.0*x[0]**2.0 - 16.0\n h[0][1] = 0.0\n h[1][0] = h[0][1]\n h[1][1] = 6.0*x[1]**2.0 - 16.0\n # Return Hess\n return h",
"step-ids": [
0,
3,
6,
7,
8
]
}
|
[
0,
3,
6,
7,
8
] |
#파이썬 심화
#클래스 메소드, 인스턴스 메소드, 스테이틱 메소드
# 기본 인스턴스 메소드
class Student(object):
"""
Student Class
Author : Kim
Date : 2020.11.07
Description : Class, Static, Instance Method
"""
#Class Variable
tuition_per = 1.0
def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):
self._id = id
self._first_name = first_name
self._last_name = last_name
self._email = email
self._grade = grade
self._tuition = tuition
self._gpa = gpa
# Instance Method
def full_name(self):
return '{} {}'.format(self._first_name, self._last_name)
# Instance Method
def detail_info(self):
return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id, self.full_name(), self._email, self._grade, self._tuition, self._gpa)
# Instance Method
def get_fee(self):
return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self._tuition)
# Instance Method
def get_fee_culc(self):
return 'After tuition -> Id: {}, fee: {}'.format(self._id, self._tuition*Student.tuition_per)
def __str__(self):
return 'Student Info - > name: {} grade: {} email: {}'.format(self.full_name(), self._grade, self._email)
#Class Method
@classmethod
def raise_fee(cls, per):
if per <= 1:
print('Please Enter 1 or More')
cls.tuition_per = per
print('Succed! tuiton increased')
#Class Method
@classmethod
def student_const(cls, id, first_name, last_name, email, grade, tuition, gpa):
return cls(id, first_name, last_name, email, grade, tuition * cls.tuition_per, gpa)
#Static Method
@staticmethod
def is_scholarship_st(inst):
if inst._gpa >= 4.3:
return '{} is a scholarship recipient.'.format(inst._last_name)
return "Sorry. Not a scholarship recipient"
#학생 인스턴스
student_1 = Student(1, "Kim", 'Sarang', '[email protected]', '1', 400, 3.5)
student_2 = Student(2, "Lee", 'Myungho', '[email protected]', '2', 500, 4.3)
# 기본 정보
print(student_1)
print(student_2)
print()
#전체 정보
print(student_1.detail_info())
print(student_2.detail_info())
#학비 정보(인상전)
print(student_1.get_fee())
print(student_2.get_fee())
#학비 인상 (클래스 매소드 미사용)
# Student.tuition_per = 1.2
#학비 인상 (클래스 매소드 사용)
Student.raise_fee(1.5)
#학비 정보(인상후)
print(student_1.get_fee_culc())
print(student_2.get_fee_culc())
# 클래스 메소드 인스턴스 생성 실습
student_3 = Student.student_const(3, 'Park', 'Minji', '[email protected]', '3', 550, 4.5)
student_4 = Student.student_const(4, 'Cho', 'Sunghan', '[email protected]', '4', 6000, 4.1)
# 전체 정보
print(student_3.detail_info())
print(student_4.detail_info())
print()
#학생 학비 변경 확인
print(student_3._tuition)
print(student_4._tuition)
print()
# 장학금 혜택 여부(스테이틱 메소드 미사용)
def is_scholarship(inst):
if inst._gpa >= 4.3:
return '{} is a scholarship recipient.'.format(inst._last_name)
return "Sorry. Not a scholarship recipient"
print(is_scholarship(student_1))
print(is_scholarship(student_2))
print(is_scholarship(student_3))
print(is_scholarship(student_4))
print()
# 장학금 혜택 여부(스테이틱 메소드 사용)
print(Student.is_scholarship_st(student_1))
print(Student.is_scholarship_st(student_2))
print(Student.is_scholarship_st(student_3))
print(Student.is_scholarship_st(student_4))
print()
print(student_1.is_scholarship_st(student_1))
print(student_2.is_scholarship_st(student_2))
print(student_3.is_scholarship_st(student_3))
print(student_4.is_scholarship_st(student_4))
|
normal
|
{
"blob_id": "f507fbe7c92134c0a7149aafe7de88debebd42f5",
"index": 7760,
"step-1": "class Student(object):\n <mask token>\n <mask token>\n <mask token>\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n <mask token>\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n <mask token>\n <mask token>\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Student(object):\n <mask token>\n <mask token>\n <mask token>\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n\n def get_fee(self):\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition)\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n\n def __str__(self):\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.\n full_name(), self._grade, self._email)\n <mask token>\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n <mask token>\n\n\n<mask token>\n",
"step-3": "class Student(object):\n <mask token>\n tuition_per = 1.0\n\n def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):\n self._id = id\n self._first_name = first_name\n self._last_name = last_name\n self._email = email\n self._grade = grade\n self._tuition = tuition\n self._gpa = gpa\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n\n def get_fee(self):\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition)\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n\n def __str__(self):\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.\n full_name(), self._grade, self._email)\n\n @classmethod\n def raise_fee(cls, per):\n if per <= 1:\n print('Please Enter 1 or More')\n cls.tuition_per = per\n print('Succed! tuiton increased')\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n\n @staticmethod\n def is_scholarship_st(inst):\n if inst._gpa >= 4.3:\n return '{} is a scholarship recipient.'.format(inst._last_name)\n return 'Sorry. Not a scholarship recipient'\n\n\n<mask token>\n",
"step-4": "class Student(object):\n \"\"\"\n Student Class\n Author : Kim\n Date : 2020.11.07\n Description : Class, Static, Instance Method\n \"\"\"\n tuition_per = 1.0\n\n def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):\n self._id = id\n self._first_name = first_name\n self._last_name = last_name\n self._email = email\n self._grade = grade\n self._tuition = tuition\n self._gpa = gpa\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n\n def get_fee(self):\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition)\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n\n def __str__(self):\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.\n full_name(), self._grade, self._email)\n\n @classmethod\n def raise_fee(cls, per):\n if per <= 1:\n print('Please Enter 1 or More')\n cls.tuition_per = per\n print('Succed! tuiton increased')\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n\n @staticmethod\n def is_scholarship_st(inst):\n if inst._gpa >= 4.3:\n return '{} is a scholarship recipient.'.format(inst._last_name)\n return 'Sorry. Not a scholarship recipient'\n\n\n<mask token>\n\n\ndef is_scholarship(inst):\n if inst._gpa >= 4.3:\n return '{} is a scholarship recipient.'.format(inst._last_name)\n return 'Sorry. Not a scholarship recipient'\n\n\n<mask token>\n",
"step-5": "#파이썬 심화\r\n#클래스 메소드, 인스턴스 메소드, 스테이틱 메소드\r\n\r\n# 기본 인스턴스 메소드\r\n\r\nclass Student(object):\r\n \"\"\"\r\n Student Class\r\n Author : Kim\r\n Date : 2020.11.07\r\n Description : Class, Static, Instance Method\r\n \"\"\"\r\n\r\n #Class Variable\r\n tuition_per = 1.0\r\n\r\n def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):\r\n self._id = id\r\n self._first_name = first_name\r\n self._last_name = last_name\r\n self._email = email\r\n self._grade = grade\r\n self._tuition = tuition\r\n self._gpa = gpa\r\n\r\n # Instance Method\r\n def full_name(self):\r\n return '{} {}'.format(self._first_name, self._last_name)\r\n\r\n # Instance Method\r\n def detail_info(self):\r\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id, self.full_name(), self._email, self._grade, self._tuition, self._gpa)\r\n \r\n # Instance Method\r\n def get_fee(self):\r\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self._tuition)\r\n\r\n # Instance Method\r\n def get_fee_culc(self):\r\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self._tuition*Student.tuition_per)\r\n\r\n def __str__(self):\r\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.full_name(), self._grade, self._email)\r\n\r\n #Class Method\r\n @classmethod\r\n def raise_fee(cls, per):\r\n if per <= 1:\r\n print('Please Enter 1 or More')\r\n cls.tuition_per = per\r\n print('Succed! tuiton increased')\r\n\r\n #Class Method\r\n @classmethod\r\n def student_const(cls, id, first_name, last_name, email, grade, tuition, gpa):\r\n return cls(id, first_name, last_name, email, grade, tuition * cls.tuition_per, gpa)\r\n\r\n #Static Method\r\n @staticmethod\r\n def is_scholarship_st(inst):\r\n if inst._gpa >= 4.3:\r\n return '{} is a scholarship recipient.'.format(inst._last_name)\r\n return \"Sorry. Not a scholarship recipient\"\r\n\r\n \r\n\r\n#학생 인스턴스\r\nstudent_1 = Student(1, \"Kim\", 'Sarang', '[email protected]', '1', 400, 3.5)\r\nstudent_2 = Student(2, \"Lee\", 'Myungho', '[email protected]', '2', 500, 4.3)\r\n\r\n# 기본 정보\r\nprint(student_1)\r\nprint(student_2)\r\n\r\nprint()\r\n\r\n#전체 정보\r\nprint(student_1.detail_info())\r\nprint(student_2.detail_info())\r\n\r\n#학비 정보(인상전)\r\nprint(student_1.get_fee())\r\nprint(student_2.get_fee())\r\n\r\n#학비 인상 (클래스 매소드 미사용)\r\n# Student.tuition_per = 1.2\r\n\r\n#학비 인상 (클래스 매소드 사용)\r\nStudent.raise_fee(1.5)\r\n\r\n#학비 정보(인상후)\r\nprint(student_1.get_fee_culc())\r\nprint(student_2.get_fee_culc())\r\n\r\n# 클래스 메소드 인스턴스 생성 실습\r\nstudent_3 = Student.student_const(3, 'Park', 'Minji', '[email protected]', '3', 550, 4.5)\r\nstudent_4 = Student.student_const(4, 'Cho', 'Sunghan', '[email protected]', '4', 6000, 4.1)\r\n\r\n# 전체 정보\r\nprint(student_3.detail_info())\r\nprint(student_4.detail_info())\r\nprint()\r\n\r\n#학생 학비 변경 확인\r\nprint(student_3._tuition)\r\nprint(student_4._tuition)\r\nprint()\r\n\r\n# 장학금 혜택 여부(스테이틱 메소드 미사용)\r\ndef is_scholarship(inst):\r\n if inst._gpa >= 4.3:\r\n return '{} is a scholarship recipient.'.format(inst._last_name)\r\n return \"Sorry. Not a scholarship recipient\"\r\n\r\nprint(is_scholarship(student_1))\r\nprint(is_scholarship(student_2))\r\nprint(is_scholarship(student_3))\r\nprint(is_scholarship(student_4))\r\n\r\nprint()\r\n\r\n# 장학금 혜택 여부(스테이틱 메소드 사용)\r\nprint(Student.is_scholarship_st(student_1))\r\nprint(Student.is_scholarship_st(student_2))\r\nprint(Student.is_scholarship_st(student_3))\r\nprint(Student.is_scholarship_st(student_4))\r\n\r\nprint()\r\n\r\nprint(student_1.is_scholarship_st(student_1))\r\nprint(student_2.is_scholarship_st(student_2))\r\nprint(student_3.is_scholarship_st(student_3))\r\nprint(student_4.is_scholarship_st(student_4))",
"step-ids": [
5,
7,
11,
13,
16
]
}
|
[
5,
7,
11,
13,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ours2.append(mine)
ours2.append(yours)
print(ours1)
print(ours2)
<|reserved_special_token_0|>
print(ours1)
print(ours2)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
yours = ['Yale', 'MIT', 'Berkeley']
mine = ['Harvard', 'CAU', 'Stanford']
ours1 = mine + yours
ours2 = []
ours2.append(mine)
ours2.append(yours)
print(ours1)
print(ours2)
yours[1] = 'Mich'
print(ours1)
print(ours2)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 22 20:21:16 2018
@author: Yijie
"""
#Q4:
#(1)
yours = ['Yale','MIT','Berkeley']
mine = ['Harvard','CAU','Stanford']
ours1 = mine + yours
ours2=[]
ours2.append(mine)
ours2.append(yours)
print(ours1)
print(ours2)
# Difference:the print out results indicate that the list 'ours1' is having
#the objects in 'yours' and 'mine' together, while 'ours2' has a dividing line
# between 'yours' and 'mine'.
#(2) question: do you want to remove something?
yours[1]='Mich'
print(ours1)
print(ours2)
#ours1 stays unchanged while ours2 changed because ours1 is a new list, while ours2 is adding
|
flexible
|
{
"blob_id": "bf65d4a4e066e3e06b888d4b9ed49e10e66b4e78",
"index": 8145,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nours2.append(mine)\nours2.append(yours)\nprint(ours1)\nprint(ours2)\n<mask token>\nprint(ours1)\nprint(ours2)\n",
"step-3": "<mask token>\nyours = ['Yale', 'MIT', 'Berkeley']\nmine = ['Harvard', 'CAU', 'Stanford']\nours1 = mine + yours\nours2 = []\nours2.append(mine)\nours2.append(yours)\nprint(ours1)\nprint(ours2)\nyours[1] = 'Mich'\nprint(ours1)\nprint(ours2)\n",
"step-4": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jan 22 20:21:16 2018\n\n@author: Yijie\n\"\"\"\n\n#Q4:\n#(1)\nyours = ['Yale','MIT','Berkeley']\nmine = ['Harvard','CAU','Stanford']\nours1 = mine + yours\nours2=[]\nours2.append(mine)\nours2.append(yours)\nprint(ours1)\nprint(ours2)\n# Difference:the print out results indicate that the list 'ours1' is having \n#the objects in 'yours' and 'mine' together, while 'ours2' has a dividing line\n# between 'yours' and 'mine'.\n\n#(2) question: do you want to remove something?\nyours[1]='Mich'\nprint(ours1)\nprint(ours2)\n#ours1 stays unchanged while ours2 changed because ours1 is a new list, while ours2 is adding ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
async def add_time(chat, time):
return col.insert_one({'chat': chat, 'time': time})
async def get_time(chat):
return col.find_one({'chat': chat})
async def update_time(chat, time):
return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':
time}})
<|reserved_special_token_1|>
<|reserved_special_token_0|>
col = mongo.cli['Cupidbot']['timer']
async def add_time(chat, time):
return col.insert_one({'chat': chat, 'time': time})
async def get_time(chat):
return col.find_one({'chat': chat})
async def update_time(chat, time):
return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':
time}})
<|reserved_special_token_1|>
from . import mongo
col = mongo.cli['Cupidbot']['timer']
async def add_time(chat, time):
return col.insert_one({'chat': chat, 'time': time})
async def get_time(chat):
return col.find_one({'chat': chat})
async def update_time(chat, time):
return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':
time}})
|
flexible
|
{
"blob_id": "e4ce10f5db56e4e2e1988da3cee542a4a09785a8",
"index": 5381,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nasync def add_time(chat, time):\n return col.insert_one({'chat': chat, 'time': time})\n\n\nasync def get_time(chat):\n return col.find_one({'chat': chat})\n\n\nasync def update_time(chat, time):\n return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':\n time}})\n",
"step-3": "<mask token>\ncol = mongo.cli['Cupidbot']['timer']\n\n\nasync def add_time(chat, time):\n return col.insert_one({'chat': chat, 'time': time})\n\n\nasync def get_time(chat):\n return col.find_one({'chat': chat})\n\n\nasync def update_time(chat, time):\n return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':\n time}})\n",
"step-4": "from . import mongo\ncol = mongo.cli['Cupidbot']['timer']\n\n\nasync def add_time(chat, time):\n return col.insert_one({'chat': chat, 'time': time})\n\n\nasync def get_time(chat):\n return col.find_one({'chat': chat})\n\n\nasync def update_time(chat, time):\n return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':\n time}})\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from graphics.rectangle import *
from graphics.circle import *
from graphics.DGraphics.cuboid import *
from graphics.DGraphics.sphere import *
print ("------rectangle-------")
l=int(input("enter length : "))
b=int(input("enter breadth : "))
print("area of rectangle : ",RectArea(1,b))
print("perimeter of rectangle : ",Rectperimeter(1,b))
print()
print ("-------circle-------")
r=int(input("enter radius : "))
print("area of circle : ",circlearea(r))
print("perimeter of circle : ",circleperimeter(r))
print()
print ("-----cuboid-----")
l=int(input("enter length : "))
w=int(input("enter width : "))
h=int(input("enter height : "))
print("area of cuboid :",cuboidarea(1,w,h))
print("perimeter of cuboid : ",cuboidperimeter(1,w,h))
print()
print ("-------shpere-----")
r=int(input("enter radius: "))
print("area of shpere: ",spherearea(r))
print("perimeter of shpere : ",sphereperimeter(r))
print()
|
normal
|
{
"blob_id": "f275085a2e4e3efc8eb841b5322d9d71f2e43846",
"index": 7998,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('------rectangle-------')\n<mask token>\nprint('area of rectangle : ', RectArea(1, b))\nprint('perimeter of rectangle : ', Rectperimeter(1, b))\nprint()\nprint('-------circle-------')\n<mask token>\nprint('area of circle : ', circlearea(r))\nprint('perimeter of circle : ', circleperimeter(r))\nprint()\nprint('-----cuboid-----')\n<mask token>\nprint('area of cuboid :', cuboidarea(1, w, h))\nprint('perimeter of cuboid : ', cuboidperimeter(1, w, h))\nprint()\nprint('-------shpere-----')\n<mask token>\nprint('area of shpere: ', spherearea(r))\nprint('perimeter of shpere : ', sphereperimeter(r))\nprint()\n",
"step-3": "<mask token>\nprint('------rectangle-------')\nl = int(input('enter length : '))\nb = int(input('enter breadth : '))\nprint('area of rectangle : ', RectArea(1, b))\nprint('perimeter of rectangle : ', Rectperimeter(1, b))\nprint()\nprint('-------circle-------')\nr = int(input('enter radius : '))\nprint('area of circle : ', circlearea(r))\nprint('perimeter of circle : ', circleperimeter(r))\nprint()\nprint('-----cuboid-----')\nl = int(input('enter length : '))\nw = int(input('enter width : '))\nh = int(input('enter height : '))\nprint('area of cuboid :', cuboidarea(1, w, h))\nprint('perimeter of cuboid : ', cuboidperimeter(1, w, h))\nprint()\nprint('-------shpere-----')\nr = int(input('enter radius: '))\nprint('area of shpere: ', spherearea(r))\nprint('perimeter of shpere : ', sphereperimeter(r))\nprint()\n",
"step-4": "from graphics.rectangle import *\nfrom graphics.circle import *\nfrom graphics.DGraphics.cuboid import *\nfrom graphics.DGraphics.sphere import *\nprint('------rectangle-------')\nl = int(input('enter length : '))\nb = int(input('enter breadth : '))\nprint('area of rectangle : ', RectArea(1, b))\nprint('perimeter of rectangle : ', Rectperimeter(1, b))\nprint()\nprint('-------circle-------')\nr = int(input('enter radius : '))\nprint('area of circle : ', circlearea(r))\nprint('perimeter of circle : ', circleperimeter(r))\nprint()\nprint('-----cuboid-----')\nl = int(input('enter length : '))\nw = int(input('enter width : '))\nh = int(input('enter height : '))\nprint('area of cuboid :', cuboidarea(1, w, h))\nprint('perimeter of cuboid : ', cuboidperimeter(1, w, h))\nprint()\nprint('-------shpere-----')\nr = int(input('enter radius: '))\nprint('area of shpere: ', spherearea(r))\nprint('perimeter of shpere : ', sphereperimeter(r))\nprint()\n",
"step-5": "from graphics.rectangle import *\r\nfrom graphics.circle import *\r\nfrom graphics.DGraphics.cuboid import *\r\nfrom graphics.DGraphics.sphere import *\r\nprint (\"------rectangle-------\")\r\nl=int(input(\"enter length : \"))\r\nb=int(input(\"enter breadth : \"))\r\nprint(\"area of rectangle : \",RectArea(1,b))\r\nprint(\"perimeter of rectangle : \",Rectperimeter(1,b))\r\nprint()\r\nprint (\"-------circle-------\")\r\nr=int(input(\"enter radius : \"))\r\nprint(\"area of circle : \",circlearea(r))\r\nprint(\"perimeter of circle : \",circleperimeter(r))\r\nprint()\r\nprint (\"-----cuboid-----\")\r\nl=int(input(\"enter length : \"))\r\nw=int(input(\"enter width : \"))\r\nh=int(input(\"enter height : \"))\r\nprint(\"area of cuboid :\",cuboidarea(1,w,h))\r\nprint(\"perimeter of cuboid : \",cuboidperimeter(1,w,h))\r\nprint()\r\nprint (\"-------shpere-----\")\r\nr=int(input(\"enter radius: \"))\r\nprint(\"area of shpere: \",spherearea(r))\r\nprint(\"perimeter of shpere : \",sphereperimeter(r))\r\nprint()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n \r\n \r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import scipy.sparse
from multiprocessing.sharedctypes import Array
from ctypes import c_double
import numpy as np
from multiprocessing import Pool
import matplotlib.pyplot as plt
from time import time
import scipy.io as sio
import sys
# np.random.seed(1)
d = 100
n = 100000
k=10
learning_rate = 0.4
T_freq = 100
num_threads = 1
epochs = 1
Iterations = 10
def getSyntheticData(n,d,k):
mean = np.array([0] * d)
alpha = 0.8
cov_diag = [alpha**i for i in range(d)]
covariance = np.diag(cov_diag)
truth = np.sum(cov_diag[:k])
samples = np.random.multivariate_normal(mean,covariance,n)
return [samples,covariance,truth]
def oja_async(sample):
# print rate_shared[0]
sample = sample.reshape(d,1)
U = np.frombuffer(coef_shared)
U = U.reshape(d,k)
grad = np.dot(sample,np.dot(sample.T,U))
rate_shared[0] = rate_shared[0]+1
U = U + (learning_rate/rate_shared[0])*grad
# U = U + (learning_rate/np.sqrt(rate_shared[0]))*grad
for i in range(d):
for j in range(k):
coef_shared[j+i*k] = U[i][j]
U= np.linalg.qr(U)[0]
if rate_shared[0]%T_freq ==0:
error = truth-np.trace(np.dot(np.dot(U.T,covariance),U))
return [error,time()]
# else:
# return None
def hogwild(samples,k,num_threads):
n = len(samples)
d = len(samples[0])
st = time()
# print num_threads
p = Pool(num_threads)
error_n_times = p.map(oja_async, samples)
error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t!= None]
# print error_n_times_refined;
errors = [ent[0] for ent in error_n_times_refined]
end_times = [ent[1] for ent in error_n_times_refined]
times = [et - st for et in end_times]
errors = [x for _,x in sorted(zip(times,errors))]
times = sorted(times)
n_t_freq = n/T_freq
return [errors[:n_t_freq],times[:n_t_freq]]
def evaluate(model):
data_train = data["train"]
# data_test = data["test"]
covariance_train = np.dot(data_train,data_train.T)/n
# covariance_test = np.dot(data_test,data_test.T)/n
truth_train = np.trace(covariance_train)
# truth_test = np.trace(covariance_test)
# error_train = np.linalg.norm(data_train - np.dot(np.dot(model,model.T),data_train),"fro")/n
# error_test = np.linalg.norm(data_test - np.dot(np.dot(model,model.T),data_test),"fro")/n
error_train = truth_train - np.trace(np.dot(np.dot(model.T,covariance_train),model))
# error_test = truth_test - np.trace(np.dot(np.dot(model.T,covariance_test),model))
# return error_train, error_test
return error_train, error_train
def ojaNormal(samples,k):
errors = []
elapsed_times = []
start_time = time()
U = np.random.randn(d,k)
# U = np.linalg.qr(U)[0]
t = 0
for x in samples:
t=t+1
x = x.reshape(d,1)
U = U + (np.dot(x,np.dot(x.T,U)))*learning_rate/t
if t%T_freq == 0:
U_proj= np.linalg.qr(U)[0]
# U = U_proj
error = truth- np.trace(np.dot(np.dot(U_proj.T,covariance),U_proj))
errors.append(error)
elapsed_times.append(time() - start_time)
U_final = np.linalg.qr(U)[0]
return [errors,elapsed_times]
def plotEverything(errors_oja, times_oja,errors_hogwild_one, times_hogwild_one,errors_hogwild_two, times_hogwild_two,errors_hogwild_four, times_hogwild_four):
plt.figure(0)
plt.xlabel('Time (secs)')
plt.ylabel('Error')
plt.plot(times_oja,errors_oja)
plt.plot(times_hogwild_one,errors_hogwild_one)
plt.plot(times_hogwild_two,errors_hogwild_two)
plt.plot(times_hogwild_four,errors_hogwild_four)
plt.legend(("oja","hogwild, 1 process","hogwild 2 processes","hogwild, 4 processes"))
# plt.legend(("oja","hogwild 2 processes","hogwild, 4 processes"))
plt.title("k = "+str(k))
iterations_oja = range(1,len(errors_oja)+1)
iterations_hogwild_one = range(1,len(errors_hogwild_one)+1)
iterations_hogwild_two = range(1,len(errors_hogwild_two)+1)
iterations_hogwild_four = range(1,len(errors_hogwild_four)+1)
plt.figure(1)
plt.xlabel('Iterations')
plt.ylabel('Error')
plt.plot(iterations_oja,errors_oja)
plt.plot(iterations_hogwild_one,errors_hogwild_one)
plt.plot(iterations_hogwild_two,errors_hogwild_two)
plt.plot(iterations_hogwild_four,errors_hogwild_four)
plt.legend(("oja","hogwild, 1 process","hogwild 2 processes","hogwild, 4 processes"))
# plt.legend(("oja","hogwild 2 processes","hogwild, 4 processes"))
plt.title("k = "+str(k))
plt.show()
[samples,covariance,truth] = getSyntheticData(n,d,k)
total_samples = []
for i in range(epochs):
total_samples.extend(samples)
errors_oja_sum = [0]*n
times_oja_sum = [0]*n
errors_hogwild_sum_one = [0]*n
times_hogwild_sum_one = [0]*n
errors_hogwild_sum_two = [0]*n
times_hogwild_sum_two = [0]*n
errors_hogwild_sum_four= [0]*n
times_hogwild_sum_four = [0]*n
for t in range(Iterations):
[errors_oja, times_oja] = ojaNormal(total_samples,k)
errors_oja_sum = [e_sum + e for (e_sum,e) in zip(errors_oja_sum,errors_oja)]
times_oja_sum = [t_sum + t for (t_sum,t) in zip(times_oja_sum,times_oja)]
coef_shared = Array(c_double,
(np.random.randn(d,k).flat),
lock=False)
rate_shared = Array(c_double,
[0],
lock=False)
[errors_hogwild_one, times_hogwild_one] = hogwild(total_samples,k,1)
coef_shared = Array(c_double,
(np.random.randn(d,k).flat),
lock=False)
rate_shared = Array(c_double,
[0],
lock=False)
[errors_hogwild_two, times_hogwild_two] = hogwild(total_samples,k,2)
coef_shared = Array(c_double,
(np.random.randn(d,k).flat),
lock=False)
rate_shared = Array(c_double,
[0],
lock=False)
[errors_hogwild_four, times_hogwild_four] = hogwild(total_samples,k,4)
errors_hogwild_sum_one = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_one,errors_hogwild_one)]
times_hogwild_sum_one = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_one,times_hogwild_one)]
errors_hogwild_sum_two = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_two,errors_hogwild_two)]
times_hogwild_sum_two = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_two,times_hogwild_two)]
errors_hogwild_sum_four = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_four,errors_hogwild_four)]
times_hogwild_sum_four = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_four,times_hogwild_four)]
errors_oja_average = [e/Iterations for e in errors_oja_sum]
times_oja_average = [t/Iterations for t in times_oja_sum]
times_hogwild_average_one = [t/Iterations for t in times_hogwild_sum_one]
errors_hogwild_average_one = [e/Iterations for e in errors_hogwild_sum_one]
times_hogwild_average_two = [t/Iterations for t in times_hogwild_sum_two]
errors_hogwild_average_two = [e/Iterations for e in errors_hogwild_sum_two]
times_hogwild_average_four = [t/Iterations for t in times_hogwild_sum_four]
errors_hogwild_average_four = [e/Iterations for e in errors_hogwild_sum_four]
plotEverything(errors_oja_average, times_oja_average,errors_hogwild_average_one, times_hogwild_average_one,errors_hogwild_average_two, times_hogwild_average_two,errors_hogwild_average_four, times_hogwild_average_four)
|
normal
|
{
"blob_id": "bf04bf41f657a6ada4777fe5de98d6a68beda9d3",
"index": 9769,
"step-1": "<mask token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\n<mask token>\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\nd = 100\nn = 100000\nk = 10\nlearning_rate = 0.4\nT_freq = 100\nnum_threads = 1\nepochs = 1\nIterations = 10\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\ndef hogwild(samples, k, num_threads):\n n = len(samples)\n d = len(samples[0])\n st = time()\n p = Pool(num_threads)\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t != None]\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [(et - st) for et in end_times]\n errors = [x for _, x in sorted(zip(times, errors))]\n times = sorted(times)\n n_t_freq = n / T_freq\n return [errors[:n_t_freq], times[:n_t_freq]]\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n[samples, covariance, truth] = getSyntheticData(n, d, k)\ntotal_samples = []\nfor i in range(epochs):\n total_samples.extend(samples)\nerrors_oja_sum = [0] * n\ntimes_oja_sum = [0] * n\nerrors_hogwild_sum_one = [0] * n\ntimes_hogwild_sum_one = [0] * n\nerrors_hogwild_sum_two = [0] * n\ntimes_hogwild_sum_two = [0] * n\nerrors_hogwild_sum_four = [0] * n\ntimes_hogwild_sum_four = [0] * n\nfor t in range(Iterations):\n [errors_oja, times_oja] = ojaNormal(total_samples, k)\n errors_oja_sum = [(e_sum + e) for e_sum, e in zip(errors_oja_sum,\n errors_oja)]\n times_oja_sum = [(t_sum + t) for t_sum, t in zip(times_oja_sum, times_oja)]\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_one, times_hogwild_one] = hogwild(total_samples, k, 1)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_two, times_hogwild_two] = hogwild(total_samples, k, 2)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_four, times_hogwild_four] = hogwild(total_samples, k, 4)\n errors_hogwild_sum_one = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_one, errors_hogwild_one)]\n times_hogwild_sum_one = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_one, times_hogwild_one)]\n errors_hogwild_sum_two = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_two, errors_hogwild_two)]\n times_hogwild_sum_two = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_two, times_hogwild_two)]\n errors_hogwild_sum_four = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_four, errors_hogwild_four)]\n times_hogwild_sum_four = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_four, times_hogwild_four)]\nerrors_oja_average = [(e / Iterations) for e in errors_oja_sum]\ntimes_oja_average = [(t / Iterations) for t in times_oja_sum]\ntimes_hogwild_average_one = [(t / Iterations) for t in times_hogwild_sum_one]\nerrors_hogwild_average_one = [(e / Iterations) for e in errors_hogwild_sum_one]\ntimes_hogwild_average_two = [(t / Iterations) for t in times_hogwild_sum_two]\nerrors_hogwild_average_two = [(e / Iterations) for e in errors_hogwild_sum_two]\ntimes_hogwild_average_four = [(t / Iterations) for t in times_hogwild_sum_four]\nerrors_hogwild_average_four = [(e / Iterations) for e in\n errors_hogwild_sum_four]\nplotEverything(errors_oja_average, times_oja_average,\n errors_hogwild_average_one, times_hogwild_average_one,\n errors_hogwild_average_two, times_hogwild_average_two,\n errors_hogwild_average_four, times_hogwild_average_four)\n",
"step-4": "import scipy.sparse\nfrom multiprocessing.sharedctypes import Array\nfrom ctypes import c_double\nimport numpy as np\nfrom multiprocessing import Pool\nimport matplotlib.pyplot as plt\nfrom time import time\nimport scipy.io as sio\nimport sys\nd = 100\nn = 100000\nk = 10\nlearning_rate = 0.4\nT_freq = 100\nnum_threads = 1\nepochs = 1\nIterations = 10\n\n\ndef getSyntheticData(n, d, k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [(alpha ** i) for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k])\n samples = np.random.multivariate_normal(mean, covariance, n)\n return [samples, covariance, truth]\n\n\ndef oja_async(sample):\n sample = sample.reshape(d, 1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d, k)\n grad = np.dot(sample, np.dot(sample.T, U))\n rate_shared[0] = rate_shared[0] + 1\n U = U + learning_rate / rate_shared[0] * grad\n for i in range(d):\n for j in range(k):\n coef_shared[j + i * k] = U[i][j]\n U = np.linalg.qr(U)[0]\n if rate_shared[0] % T_freq == 0:\n error = truth - np.trace(np.dot(np.dot(U.T, covariance), U))\n return [error, time()]\n\n\ndef hogwild(samples, k, num_threads):\n n = len(samples)\n d = len(samples[0])\n st = time()\n p = Pool(num_threads)\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t != None]\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [(et - st) for et in end_times]\n errors = [x for _, x in sorted(zip(times, errors))]\n times = sorted(times)\n n_t_freq = n / T_freq\n return [errors[:n_t_freq], times[:n_t_freq]]\n\n\ndef evaluate(model):\n data_train = data['train']\n covariance_train = np.dot(data_train, data_train.T) / n\n truth_train = np.trace(covariance_train)\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,\n covariance_train), model))\n return error_train, error_train\n\n\ndef ojaNormal(samples, k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d, k)\n t = 0\n for x in samples:\n t = t + 1\n x = x.reshape(d, 1)\n U = U + np.dot(x, np.dot(x.T, U)) * learning_rate / t\n if t % T_freq == 0:\n U_proj = np.linalg.qr(U)[0]\n error = truth - np.trace(np.dot(np.dot(U_proj.T, covariance),\n U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n U_final = np.linalg.qr(U)[0]\n return [errors, elapsed_times]\n\n\ndef plotEverything(errors_oja, times_oja, errors_hogwild_one,\n times_hogwild_one, errors_hogwild_two, times_hogwild_two,\n errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja, errors_oja)\n plt.plot(times_hogwild_one, errors_hogwild_one)\n plt.plot(times_hogwild_two, errors_hogwild_two)\n plt.plot(times_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n iterations_oja = range(1, len(errors_oja) + 1)\n iterations_hogwild_one = range(1, len(errors_hogwild_one) + 1)\n iterations_hogwild_two = range(1, len(errors_hogwild_two) + 1)\n iterations_hogwild_four = range(1, len(errors_hogwild_four) + 1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja, errors_oja)\n plt.plot(iterations_hogwild_one, errors_hogwild_one)\n plt.plot(iterations_hogwild_two, errors_hogwild_two)\n plt.plot(iterations_hogwild_four, errors_hogwild_four)\n plt.legend(('oja', 'hogwild, 1 process', 'hogwild 2 processes',\n 'hogwild, 4 processes'))\n plt.title('k = ' + str(k))\n plt.show()\n\n\n[samples, covariance, truth] = getSyntheticData(n, d, k)\ntotal_samples = []\nfor i in range(epochs):\n total_samples.extend(samples)\nerrors_oja_sum = [0] * n\ntimes_oja_sum = [0] * n\nerrors_hogwild_sum_one = [0] * n\ntimes_hogwild_sum_one = [0] * n\nerrors_hogwild_sum_two = [0] * n\ntimes_hogwild_sum_two = [0] * n\nerrors_hogwild_sum_four = [0] * n\ntimes_hogwild_sum_four = [0] * n\nfor t in range(Iterations):\n [errors_oja, times_oja] = ojaNormal(total_samples, k)\n errors_oja_sum = [(e_sum + e) for e_sum, e in zip(errors_oja_sum,\n errors_oja)]\n times_oja_sum = [(t_sum + t) for t_sum, t in zip(times_oja_sum, times_oja)]\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_one, times_hogwild_one] = hogwild(total_samples, k, 1)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_two, times_hogwild_two] = hogwild(total_samples, k, 2)\n coef_shared = Array(c_double, np.random.randn(d, k).flat, lock=False)\n rate_shared = Array(c_double, [0], lock=False)\n [errors_hogwild_four, times_hogwild_four] = hogwild(total_samples, k, 4)\n errors_hogwild_sum_one = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_one, errors_hogwild_one)]\n times_hogwild_sum_one = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_one, times_hogwild_one)]\n errors_hogwild_sum_two = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_two, errors_hogwild_two)]\n times_hogwild_sum_two = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_two, times_hogwild_two)]\n errors_hogwild_sum_four = [(e_sum + e) for e_sum, e in zip(\n errors_hogwild_sum_four, errors_hogwild_four)]\n times_hogwild_sum_four = [(t_sum + t) for t_sum, t in zip(\n times_hogwild_sum_four, times_hogwild_four)]\nerrors_oja_average = [(e / Iterations) for e in errors_oja_sum]\ntimes_oja_average = [(t / Iterations) for t in times_oja_sum]\ntimes_hogwild_average_one = [(t / Iterations) for t in times_hogwild_sum_one]\nerrors_hogwild_average_one = [(e / Iterations) for e in errors_hogwild_sum_one]\ntimes_hogwild_average_two = [(t / Iterations) for t in times_hogwild_sum_two]\nerrors_hogwild_average_two = [(e / Iterations) for e in errors_hogwild_sum_two]\ntimes_hogwild_average_four = [(t / Iterations) for t in times_hogwild_sum_four]\nerrors_hogwild_average_four = [(e / Iterations) for e in\n errors_hogwild_sum_four]\nplotEverything(errors_oja_average, times_oja_average,\n errors_hogwild_average_one, times_hogwild_average_one,\n errors_hogwild_average_two, times_hogwild_average_two,\n errors_hogwild_average_four, times_hogwild_average_four)\n",
"step-5": "import scipy.sparse\nfrom multiprocessing.sharedctypes import Array\nfrom ctypes import c_double\nimport numpy as np\nfrom multiprocessing import Pool\nimport matplotlib.pyplot as plt\nfrom time import time\nimport scipy.io as sio\nimport sys\n# np.random.seed(1)\n\n\n\nd = 100\nn = 100000\nk=10\nlearning_rate = 0.4\nT_freq = 100\nnum_threads = 1\nepochs = 1\nIterations = 10\n\ndef getSyntheticData(n,d,k):\n mean = np.array([0] * d)\n alpha = 0.8\n cov_diag = [alpha**i for i in range(d)]\n covariance = np.diag(cov_diag)\n truth = np.sum(cov_diag[:k]) \n samples = np.random.multivariate_normal(mean,covariance,n)\n return [samples,covariance,truth]\n\n\ndef oja_async(sample):\n # print rate_shared[0]\n sample = sample.reshape(d,1)\n U = np.frombuffer(coef_shared)\n U = U.reshape(d,k)\n grad = np.dot(sample,np.dot(sample.T,U))\n rate_shared[0] = rate_shared[0]+1\n U = U + (learning_rate/rate_shared[0])*grad\n # U = U + (learning_rate/np.sqrt(rate_shared[0]))*grad\n\n for i in range(d):\n for j in range(k):\n coef_shared[j+i*k] = U[i][j]\n\n U= np.linalg.qr(U)[0]\n if rate_shared[0]%T_freq ==0:\n error = truth-np.trace(np.dot(np.dot(U.T,covariance),U))\n return [error,time()]\n # else:\n # return None\n\ndef hogwild(samples,k,num_threads):\n n = len(samples)\n d = len(samples[0])\n\n st = time()\n # print num_threads\n p = Pool(num_threads) \n\n error_n_times = p.map(oja_async, samples)\n error_n_times_refined = [e_n_t for e_n_t in error_n_times if e_n_t!= None]\n # print error_n_times_refined;\n errors = [ent[0] for ent in error_n_times_refined]\n end_times = [ent[1] for ent in error_n_times_refined]\n times = [et - st for et in end_times]\n errors = [x for _,x in sorted(zip(times,errors))]\n times = sorted(times)\n\n n_t_freq = n/T_freq\n return [errors[:n_t_freq],times[:n_t_freq]]\n \n\n\ndef evaluate(model):\n data_train = data[\"train\"]\n# data_test = data[\"test\"]\n covariance_train = np.dot(data_train,data_train.T)/n\n# covariance_test = np.dot(data_test,data_test.T)/n\n truth_train = np.trace(covariance_train)\n# truth_test = np.trace(covariance_test)\n# error_train = np.linalg.norm(data_train - np.dot(np.dot(model,model.T),data_train),\"fro\")/n\n# error_test = np.linalg.norm(data_test - np.dot(np.dot(model,model.T),data_test),\"fro\")/n\n error_train = truth_train - np.trace(np.dot(np.dot(model.T,covariance_train),model))\n# error_test = truth_test - np.trace(np.dot(np.dot(model.T,covariance_test),model))\n# return error_train, error_test\n return error_train, error_train\n\ndef ojaNormal(samples,k):\n errors = []\n elapsed_times = []\n start_time = time()\n U = np.random.randn(d,k)\n # U = np.linalg.qr(U)[0]\n\n t = 0\n for x in samples:\n t=t+1\n x = x.reshape(d,1)\n U = U + (np.dot(x,np.dot(x.T,U)))*learning_rate/t\n if t%T_freq == 0:\n U_proj= np.linalg.qr(U)[0]\n # U = U_proj\n error = truth- np.trace(np.dot(np.dot(U_proj.T,covariance),U_proj))\n errors.append(error)\n elapsed_times.append(time() - start_time)\n\n U_final = np.linalg.qr(U)[0]\n return [errors,elapsed_times] \n\n\n\ndef plotEverything(errors_oja, times_oja,errors_hogwild_one, times_hogwild_one,errors_hogwild_two, times_hogwild_two,errors_hogwild_four, times_hogwild_four):\n plt.figure(0)\n plt.xlabel('Time (secs)')\n plt.ylabel('Error')\n plt.plot(times_oja,errors_oja)\n plt.plot(times_hogwild_one,errors_hogwild_one)\n plt.plot(times_hogwild_two,errors_hogwild_two)\n plt.plot(times_hogwild_four,errors_hogwild_four)\n plt.legend((\"oja\",\"hogwild, 1 process\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n # plt.legend((\"oja\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n plt.title(\"k = \"+str(k))\n\n iterations_oja = range(1,len(errors_oja)+1)\n iterations_hogwild_one = range(1,len(errors_hogwild_one)+1)\n iterations_hogwild_two = range(1,len(errors_hogwild_two)+1)\n iterations_hogwild_four = range(1,len(errors_hogwild_four)+1)\n plt.figure(1)\n plt.xlabel('Iterations')\n plt.ylabel('Error')\n plt.plot(iterations_oja,errors_oja)\n plt.plot(iterations_hogwild_one,errors_hogwild_one)\n plt.plot(iterations_hogwild_two,errors_hogwild_two)\n plt.plot(iterations_hogwild_four,errors_hogwild_four)\n plt.legend((\"oja\",\"hogwild, 1 process\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n # plt.legend((\"oja\",\"hogwild 2 processes\",\"hogwild, 4 processes\"))\n plt.title(\"k = \"+str(k))\n plt.show()\n\n\n[samples,covariance,truth] = getSyntheticData(n,d,k)\ntotal_samples = []\n\nfor i in range(epochs):\n total_samples.extend(samples)\n\nerrors_oja_sum = [0]*n\ntimes_oja_sum = [0]*n\n\nerrors_hogwild_sum_one = [0]*n\ntimes_hogwild_sum_one = [0]*n\n\n\nerrors_hogwild_sum_two = [0]*n\ntimes_hogwild_sum_two = [0]*n\n\nerrors_hogwild_sum_four= [0]*n\ntimes_hogwild_sum_four = [0]*n\n\n\nfor t in range(Iterations):\n [errors_oja, times_oja] = ojaNormal(total_samples,k)\n\n errors_oja_sum = [e_sum + e for (e_sum,e) in zip(errors_oja_sum,errors_oja)]\n times_oja_sum = [t_sum + t for (t_sum,t) in zip(times_oja_sum,times_oja)]\n\n coef_shared = Array(c_double, \n (np.random.randn(d,k).flat),\n lock=False) \n rate_shared = Array(c_double, \n [0],\n lock=False) \n [errors_hogwild_one, times_hogwild_one] = hogwild(total_samples,k,1)\n\n coef_shared = Array(c_double, \n (np.random.randn(d,k).flat),\n lock=False) \n rate_shared = Array(c_double, \n [0],\n lock=False) \n [errors_hogwild_two, times_hogwild_two] = hogwild(total_samples,k,2)\n\n coef_shared = Array(c_double, \n (np.random.randn(d,k).flat),\n lock=False) \n rate_shared = Array(c_double, \n [0],\n lock=False) \n [errors_hogwild_four, times_hogwild_four] = hogwild(total_samples,k,4)\n\n\n errors_hogwild_sum_one = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_one,errors_hogwild_one)]\n times_hogwild_sum_one = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_one,times_hogwild_one)]\n\n\n errors_hogwild_sum_two = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_two,errors_hogwild_two)]\n times_hogwild_sum_two = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_two,times_hogwild_two)]\n\n errors_hogwild_sum_four = [e_sum + e for (e_sum,e) in zip(errors_hogwild_sum_four,errors_hogwild_four)]\n times_hogwild_sum_four = [t_sum + t for (t_sum,t) in zip(times_hogwild_sum_four,times_hogwild_four)]\n\nerrors_oja_average = [e/Iterations for e in errors_oja_sum]\ntimes_oja_average = [t/Iterations for t in times_oja_sum]\n\ntimes_hogwild_average_one = [t/Iterations for t in times_hogwild_sum_one]\nerrors_hogwild_average_one = [e/Iterations for e in errors_hogwild_sum_one]\n\ntimes_hogwild_average_two = [t/Iterations for t in times_hogwild_sum_two]\nerrors_hogwild_average_two = [e/Iterations for e in errors_hogwild_sum_two]\n\ntimes_hogwild_average_four = [t/Iterations for t in times_hogwild_sum_four]\nerrors_hogwild_average_four = [e/Iterations for e in errors_hogwild_sum_four]\nplotEverything(errors_oja_average, times_oja_average,errors_hogwild_average_one, times_hogwild_average_one,errors_hogwild_average_two, times_hogwild_average_two,errors_hogwild_average_four, times_hogwild_average_four)\n\n",
"step-ids": [
2,
5,
8,
9,
10
]
}
|
[
2,
5,
8,
9,
10
] |
from django.contrib import admin
from django.urls import path
from . import views
urlpatterns = [
path('', views.skincare, name="skin"),
path('productSearch/', views.productSearch, name="productSearch"),
path('detail/', views.detail, name="detail"),
]
|
normal
|
{
"blob_id": "c31c59d172b2b23ca4676be0690603f33b56f557",
"index": 4867,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', views.skincare, name='skin'), path('productSearch/',\n views.productSearch, name='productSearch'), path('detail/', views.\n detail, name='detail')]\n",
"step-3": "from django.contrib import admin\nfrom django.urls import path\nfrom . import views\nurlpatterns = [path('', views.skincare, name='skin'), path('productSearch/',\n views.productSearch, name='productSearch'), path('detail/', views.\n detail, name='detail')]\n",
"step-4": "from django.contrib import admin\nfrom django.urls import path\nfrom . import views\n\nurlpatterns = [\n path('', views.skincare, name=\"skin\"),\n path('productSearch/', views.productSearch, name=\"productSearch\"),\n path('detail/', views.detail, name=\"detail\"),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
_registry = []
def registry(name):
_registry.append(name)
def registry_names():
return iter(_registry)
|
normal
|
{
"blob_id": "51642dbb210600f9ca4e035fb884fbdda030fd04",
"index": 1491,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef registry_names():\n return iter(_registry)\n",
"step-3": "<mask token>\n\n\ndef registry(name):\n _registry.append(name)\n\n\ndef registry_names():\n return iter(_registry)\n",
"step-4": "_registry = []\n\n\ndef registry(name):\n _registry.append(name)\n\n\ndef registry_names():\n return iter(_registry)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def longest(s1, s2):
s = s1 + s2
st = ''.join(sorted(set(s)))
return st
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def longest(s1, s2):
s = s1 + s2
st = ''.join(sorted(set(s)))
return st
longest('xyaabbbccccdefww', 'xxxxyyyyabklmopq')
<|reserved_special_token_1|>
def longest(s1, s2):
# your code
s=s1+s2
st="".join(sorted(set(s)))
return st
longest("xyaabbbccccdefww","xxxxyyyyabklmopq")
|
flexible
|
{
"blob_id": "7d54d5fd855c7c03d2d4739e8ad4f9ab8772ca2b",
"index": 3977,
"step-1": "<mask token>\n",
"step-2": "def longest(s1, s2):\n s = s1 + s2\n st = ''.join(sorted(set(s)))\n return st\n\n\n<mask token>\n",
"step-3": "def longest(s1, s2):\n s = s1 + s2\n st = ''.join(sorted(set(s)))\n return st\n\n\nlongest('xyaabbbccccdefww', 'xxxxyyyyabklmopq')\n",
"step-4": "def longest(s1, s2):\n # your code\n s=s1+s2\n st=\"\".join(sorted(set(s))) \n return st\n \n \nlongest(\"xyaabbbccccdefww\",\"xxxxyyyyabklmopq\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import unittest
from battleline.model.Formation import Formation, FormationInvalidError
class TestFormation(unittest.TestCase):
def test_formation_with_less_than_three_cards_is_considered_invalid(self):
self.assertRaisesRegexp(
FormationInvalidError, "Formation must have 3 cards", Formation, [(1, "R"), (2, "Y")])
def test_formation_with_more_than_three_cards_is_considered_invalid(self):
self.assertRaisesRegexp(FormationInvalidError, "Formation must have 3 cards", Formation, [
(1, "R"), (2, "Y"), (3, "R"), (5, "G")])
def test_can_get_formation_numbers_in_sorted_fashion(self):
formation = Formation([(1, "R"), (3, "Y"), (2, "R")])
self.assertEquals((1, 2, 3), formation.get_numbers())
formation = Formation([(10, "R"), (9, "Y"), (8, "R")])
self.assertEquals((8, 9, 10), formation.get_numbers())
def test_can_get_formation_colors_in_sorted_fashion(self):
formation = Formation([(1, "R"), (3, "Y"), (2, "R")])
self.assertEquals(("R", "Y", "R"), formation.get_colors())
formation = Formation([(10, "G"), (9, "Y"), (8, "R")])
self.assertEquals(("G", "Y", "R"), formation.get_colors())
def test_can_get_max_number(self):
formation = Formation([(1, "R"), (3, "Y"), (2, "R")])
self.assertEquals(3, formation.get_max_number())
formation = Formation([(10, "G"), (9, "Y"), (8, "R")])
self.assertEquals(10, formation.get_max_number())
def test_formation_equality_with_self(self):
self.assertTrue(Formation([(1, "R"), (2, "R"), (3, "R")]).is_equivalent_in_strength(
Formation([(1, "R"), (2, "R"), (3, "R")])))
def test_formation_equality_with_wedge_and_host(self):
self.assertFalse(Formation([(1, "R"), (2, "R"), (3, "R")]).is_equivalent_in_strength(
Formation([(1, "B"), (2, "B"), (4, "G")])))
self.assertFalse(Formation([(5, "R"), (1, "R"), (3, "Y")]).is_equivalent_in_strength(
Formation([(2, "B"), (3, "B"), (4, "B")])))
def test_formation_equality_with_two_wedges(self):
self.assertTrue(Formation([(1, "R"), (2, "R"), (3, "R")]).is_equivalent_in_strength(
Formation([(1, "G"), (2, "G"), (3, "G")])))
def test_formation_equality_with_wedge_and_battalion(self):
self.assertFalse(Formation([(4, "R"), (2, "R"), (3, "R")]).is_equivalent_in_strength(
Formation([(5, "G"), (1, "G"), (3, "G")])))
def test_formation_equality_with_wedge_and_skirmish(self):
self.assertFalse(Formation([(1, "R"), (2, "R"), (3, "R")]).is_equivalent_in_strength(
Formation([(1, "G"), (2, "G"), (3, "B")])))
def test_formation_equality_with_two_phalanxes(self):
self.assertTrue(Formation([(1, "R"), (1, "G"), (1, "Y")]).is_equivalent_in_strength(
Formation([(1, "P"), (1, "B"), (1, "O")])))
self.assertFalse(Formation([(1, "R"), (1, "G"), (1, "Y")]).is_equivalent_in_strength(
Formation([(2, "P"), (2, "B"), (2, "O")])))
def test_formation_equality_with_two_battalions(self):
self.assertTrue(Formation([(3, "R"), (2, "R"), (5, "R")]).is_equivalent_in_strength(
Formation([(5, "B"), (2, "B"), (3, "B")])))
self.assertFalse(Formation([(6, "R"), (2, "R"), (3, "R")]).is_equivalent_in_strength(
Formation([(5, "B"), (2, "B"), (3, "B")])))
def test_formation_equality_with_two_skirmishes(self):
self.assertTrue(Formation([(1, "R"), (2, "R"), (3, "Y")]).is_equivalent_in_strength(
Formation([(1, "B"), (2, "B"), (3, "G")])))
self.assertFalse(Formation([(1, "R"), (2, "R"), (3, "Y")]).is_equivalent_in_strength(
Formation([(4, "B"), (2, "B"), (3, "G")])))
def test_formation_equality_with_two_hosts(self):
self.assertTrue(Formation([(1, "R"), (4, "Y"), (3, "R")]).is_equivalent_in_strength(
Formation([(1, "G"), (4, "G"), (3, "B")])))
self.assertFalse(Formation([(1, "R"), (2, "Y"), (3, "R")]).is_equivalent_in_strength(
Formation([(4, "G"), (2, "G"), (3, "B")])))
def test_greater_than_check_two_wedges(self):
self.assertTrue(Formation([(4, "R"), (2, "R"), (3, "R")]).is_greater_strength_than(
Formation([(1, "R"), (2, "R"), (3, "R")])))
self.assertFalse(Formation([(1, "R"), (2, "R"), (3, "R")]).is_greater_strength_than(
Formation([(1, "R"), (2, "R"), (3, "R")])))
def test_greater_than_check_wedge_and_phalanx(self):
self.assertTrue(Formation([(1, "R"), (2, "R"), (3, "R")]).is_greater_strength_than(
Formation([(2, "R"), (2, "G"), (2, "B")])))
def test_greater_than_check_two_phalanxes(self):
self.assertTrue(Formation([(2, "Y"), (2, "R"), (2, "B")]).is_greater_strength_than(
Formation([(1, "Y"), (1, "R"), (1, "B")])))
self.assertFalse(Formation([(2, "Y"), (2, "R"), (2, "B")]).is_greater_strength_than(
Formation([(2, "P"), (2, "G"), (2, "O")])))
def test_greater_than_check_phalanx_and_battalion(self):
self.assertTrue(Formation([(3, "Y"), (3, "R"), (3, "B")]).is_greater_strength_than(
Formation([(1, "G"), (3, "G"), (6, "G")])))
self.assertFalse(Formation([(1, "G"), (3, "G"), (6, "G")]).is_greater_strength_than(
Formation([(3, "Y"), (3, "R"), (3, "B")])))
def test_greater_than_check_two_battalions(self):
self.assertTrue(Formation([(1, "G"), (3, "G"), (8, "G")]).is_greater_strength_than(
Formation([(4, "G"), (5, "G"), (2, "G")])))
self.assertFalse(Formation([(1, "G"), (3, "G"), (8, "G")]).is_greater_strength_than(
Formation([(4, "G"), (6, "G"), (2, "G")])))
def test_greater_than_check_battalion_and_skirmish(self):
self.assertTrue(Formation([(3, "G"), (6, "G"), (2, "G")]).is_greater_strength_than(
Formation([(4, "G"), (3, "G"), (5, "B")])))
def test_greater_than_check_two_skirmishes(self):
self.assertTrue(Formation([(4, "G"), (2, "G"), (3, "Y")]).is_greater_strength_than(
Formation([(3, "G"), (1, "G"), (2, "Y")])))
self.assertFalse(Formation([(4, "G"), (2, "G"), (3, "Y")]).is_greater_strength_than(
Formation([(4, "Y"), (2, "B"), (3, "B")])))
def test_greater_than_check_skirmish_and_host(self):
self.assertTrue(Formation([(1, "G"), (3, "B"), (2, "G")]).is_greater_strength_than(
Formation([(4, "G"), (9, "G"), (5, "B")])))
def test_greater_than_check_two_hosts(self):
self.assertTrue(Formation([(4, "G"), (8, "G"), (3, "Y")]).is_greater_strength_than(
Formation([(1, "G"), (1, "R"), (2, "Y")])))
self.assertFalse(Formation([(4, "G"), (8, "G"), (3, "Y")]).is_greater_strength_than(
Formation([(4, "P"), (8, "P"), (3, "O")])))
|
normal
|
{
"blob_id": "0ce69b7ce99b9c01892c240d5b268a9510af4503",
"index": 1648,
"step-1": "<mask token>\n\n\nclass TestFormation(unittest.TestCase):\n <mask token>\n\n def test_formation_with_more_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(FormationInvalidError,\n 'Formation must have 3 cards', Formation, [(1, 'R'), (2, 'Y'),\n (3, 'R'), (5, 'G')])\n\n def test_can_get_formation_numbers_in_sorted_fashion(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals((1, 2, 3), formation.get_numbers())\n formation = Formation([(10, 'R'), (9, 'Y'), (8, 'R')])\n self.assertEquals((8, 9, 10), formation.get_numbers())\n <mask token>\n <mask token>\n\n def test_formation_equality_with_self(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'R'), (2, 'R'), (3,\n 'R')])))\n\n def test_formation_equality_with_wedge_and_host(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (4,\n 'G')])))\n self.assertFalse(Formation([(5, 'R'), (1, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'B'), (3, 'B'), (4,\n 'B')])))\n\n def test_formation_equality_with_two_wedges(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'G')])))\n <mask token>\n\n def test_formation_equality_with_wedge_and_skirmish(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_phalanxes(self):\n self.assertTrue(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'P'), (1, 'B'), (1,\n 'O')])))\n self.assertFalse(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'P'), (2, 'B'), (2,\n 'O')])))\n\n def test_formation_equality_with_two_battalions(self):\n self.assertTrue(Formation([(3, 'R'), (2, 'R'), (5, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n self.assertFalse(Formation([(6, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_skirmishes(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (3,\n 'G')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(4, 'B'), (2, 'B'), (3,\n 'G')])))\n\n def test_formation_equality_with_two_hosts(self):\n self.assertTrue(Formation([(1, 'R'), (4, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (4, 'G'), (3,\n 'B')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(4, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_greater_than_check_two_wedges(self):\n self.assertTrue(Formation([(4, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n\n def test_greater_than_check_wedge_and_phalanx(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(2, 'R'), (2, 'G'), (2, 'B')]))\n )\n\n def test_greater_than_check_two_phalanxes(self):\n self.assertTrue(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(1, 'Y'), (1, 'R'), (1, 'B')]))\n )\n self.assertFalse(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(2, 'P'), (2, 'G'), (2, 'O')]))\n )\n <mask token>\n\n def test_greater_than_check_two_battalions(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (5, 'G'), (2, 'G')]))\n )\n self.assertFalse(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (6, 'G'), (2, 'G')]))\n )\n <mask token>\n\n def test_greater_than_check_two_skirmishes(self):\n self.assertTrue(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(3, 'G'), (1, 'G'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'Y'), (2, 'B'), (3, 'B')]))\n )\n\n def test_greater_than_check_skirmish_and_host(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'B'), (2, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (9, 'G'), (5, 'B')]))\n )\n\n def test_greater_than_check_two_hosts(self):\n self.assertTrue(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(1, 'G'), (1, 'R'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'P'), (8, 'P'), (3, 'O')]))\n )\n",
"step-2": "<mask token>\n\n\nclass TestFormation(unittest.TestCase):\n <mask token>\n\n def test_formation_with_more_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(FormationInvalidError,\n 'Formation must have 3 cards', Formation, [(1, 'R'), (2, 'Y'),\n (3, 'R'), (5, 'G')])\n\n def test_can_get_formation_numbers_in_sorted_fashion(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals((1, 2, 3), formation.get_numbers())\n formation = Formation([(10, 'R'), (9, 'Y'), (8, 'R')])\n self.assertEquals((8, 9, 10), formation.get_numbers())\n\n def test_can_get_formation_colors_in_sorted_fashion(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals(('R', 'Y', 'R'), formation.get_colors())\n formation = Formation([(10, 'G'), (9, 'Y'), (8, 'R')])\n self.assertEquals(('G', 'Y', 'R'), formation.get_colors())\n\n def test_can_get_max_number(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals(3, formation.get_max_number())\n formation = Formation([(10, 'G'), (9, 'Y'), (8, 'R')])\n self.assertEquals(10, formation.get_max_number())\n\n def test_formation_equality_with_self(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'R'), (2, 'R'), (3,\n 'R')])))\n\n def test_formation_equality_with_wedge_and_host(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (4,\n 'G')])))\n self.assertFalse(Formation([(5, 'R'), (1, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'B'), (3, 'B'), (4,\n 'B')])))\n\n def test_formation_equality_with_two_wedges(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'G')])))\n <mask token>\n\n def test_formation_equality_with_wedge_and_skirmish(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_phalanxes(self):\n self.assertTrue(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'P'), (1, 'B'), (1,\n 'O')])))\n self.assertFalse(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'P'), (2, 'B'), (2,\n 'O')])))\n\n def test_formation_equality_with_two_battalions(self):\n self.assertTrue(Formation([(3, 'R'), (2, 'R'), (5, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n self.assertFalse(Formation([(6, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_skirmishes(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (3,\n 'G')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(4, 'B'), (2, 'B'), (3,\n 'G')])))\n\n def test_formation_equality_with_two_hosts(self):\n self.assertTrue(Formation([(1, 'R'), (4, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (4, 'G'), (3,\n 'B')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(4, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_greater_than_check_two_wedges(self):\n self.assertTrue(Formation([(4, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n\n def test_greater_than_check_wedge_and_phalanx(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(2, 'R'), (2, 'G'), (2, 'B')]))\n )\n\n def test_greater_than_check_two_phalanxes(self):\n self.assertTrue(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(1, 'Y'), (1, 'R'), (1, 'B')]))\n )\n self.assertFalse(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(2, 'P'), (2, 'G'), (2, 'O')]))\n )\n <mask token>\n\n def test_greater_than_check_two_battalions(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (5, 'G'), (2, 'G')]))\n )\n self.assertFalse(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (6, 'G'), (2, 'G')]))\n )\n\n def test_greater_than_check_battalion_and_skirmish(self):\n self.assertTrue(Formation([(3, 'G'), (6, 'G'), (2, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (3, 'G'), (5, 'B')]))\n )\n\n def test_greater_than_check_two_skirmishes(self):\n self.assertTrue(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(3, 'G'), (1, 'G'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'Y'), (2, 'B'), (3, 'B')]))\n )\n\n def test_greater_than_check_skirmish_and_host(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'B'), (2, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (9, 'G'), (5, 'B')]))\n )\n\n def test_greater_than_check_two_hosts(self):\n self.assertTrue(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(1, 'G'), (1, 'R'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'P'), (8, 'P'), (3, 'O')]))\n )\n",
"step-3": "<mask token>\n\n\nclass TestFormation(unittest.TestCase):\n\n def test_formation_with_less_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(FormationInvalidError,\n 'Formation must have 3 cards', Formation, [(1, 'R'), (2, 'Y')])\n\n def test_formation_with_more_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(FormationInvalidError,\n 'Formation must have 3 cards', Formation, [(1, 'R'), (2, 'Y'),\n (3, 'R'), (5, 'G')])\n\n def test_can_get_formation_numbers_in_sorted_fashion(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals((1, 2, 3), formation.get_numbers())\n formation = Formation([(10, 'R'), (9, 'Y'), (8, 'R')])\n self.assertEquals((8, 9, 10), formation.get_numbers())\n\n def test_can_get_formation_colors_in_sorted_fashion(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals(('R', 'Y', 'R'), formation.get_colors())\n formation = Formation([(10, 'G'), (9, 'Y'), (8, 'R')])\n self.assertEquals(('G', 'Y', 'R'), formation.get_colors())\n\n def test_can_get_max_number(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals(3, formation.get_max_number())\n formation = Formation([(10, 'G'), (9, 'Y'), (8, 'R')])\n self.assertEquals(10, formation.get_max_number())\n\n def test_formation_equality_with_self(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'R'), (2, 'R'), (3,\n 'R')])))\n\n def test_formation_equality_with_wedge_and_host(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (4,\n 'G')])))\n self.assertFalse(Formation([(5, 'R'), (1, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'B'), (3, 'B'), (4,\n 'B')])))\n\n def test_formation_equality_with_two_wedges(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'G')])))\n <mask token>\n\n def test_formation_equality_with_wedge_and_skirmish(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_phalanxes(self):\n self.assertTrue(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'P'), (1, 'B'), (1,\n 'O')])))\n self.assertFalse(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'P'), (2, 'B'), (2,\n 'O')])))\n\n def test_formation_equality_with_two_battalions(self):\n self.assertTrue(Formation([(3, 'R'), (2, 'R'), (5, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n self.assertFalse(Formation([(6, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_skirmishes(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (3,\n 'G')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(4, 'B'), (2, 'B'), (3,\n 'G')])))\n\n def test_formation_equality_with_two_hosts(self):\n self.assertTrue(Formation([(1, 'R'), (4, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (4, 'G'), (3,\n 'B')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(4, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_greater_than_check_two_wedges(self):\n self.assertTrue(Formation([(4, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n\n def test_greater_than_check_wedge_and_phalanx(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(2, 'R'), (2, 'G'), (2, 'B')]))\n )\n\n def test_greater_than_check_two_phalanxes(self):\n self.assertTrue(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(1, 'Y'), (1, 'R'), (1, 'B')]))\n )\n self.assertFalse(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(2, 'P'), (2, 'G'), (2, 'O')]))\n )\n <mask token>\n\n def test_greater_than_check_two_battalions(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (5, 'G'), (2, 'G')]))\n )\n self.assertFalse(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (6, 'G'), (2, 'G')]))\n )\n\n def test_greater_than_check_battalion_and_skirmish(self):\n self.assertTrue(Formation([(3, 'G'), (6, 'G'), (2, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (3, 'G'), (5, 'B')]))\n )\n\n def test_greater_than_check_two_skirmishes(self):\n self.assertTrue(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(3, 'G'), (1, 'G'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'Y'), (2, 'B'), (3, 'B')]))\n )\n\n def test_greater_than_check_skirmish_and_host(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'B'), (2, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (9, 'G'), (5, 'B')]))\n )\n\n def test_greater_than_check_two_hosts(self):\n self.assertTrue(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(1, 'G'), (1, 'R'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'P'), (8, 'P'), (3, 'O')]))\n )\n",
"step-4": "import unittest\nfrom battleline.model.Formation import Formation, FormationInvalidError\n\n\nclass TestFormation(unittest.TestCase):\n\n def test_formation_with_less_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(FormationInvalidError,\n 'Formation must have 3 cards', Formation, [(1, 'R'), (2, 'Y')])\n\n def test_formation_with_more_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(FormationInvalidError,\n 'Formation must have 3 cards', Formation, [(1, 'R'), (2, 'Y'),\n (3, 'R'), (5, 'G')])\n\n def test_can_get_formation_numbers_in_sorted_fashion(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals((1, 2, 3), formation.get_numbers())\n formation = Formation([(10, 'R'), (9, 'Y'), (8, 'R')])\n self.assertEquals((8, 9, 10), formation.get_numbers())\n\n def test_can_get_formation_colors_in_sorted_fashion(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals(('R', 'Y', 'R'), formation.get_colors())\n formation = Formation([(10, 'G'), (9, 'Y'), (8, 'R')])\n self.assertEquals(('G', 'Y', 'R'), formation.get_colors())\n\n def test_can_get_max_number(self):\n formation = Formation([(1, 'R'), (3, 'Y'), (2, 'R')])\n self.assertEquals(3, formation.get_max_number())\n formation = Formation([(10, 'G'), (9, 'Y'), (8, 'R')])\n self.assertEquals(10, formation.get_max_number())\n\n def test_formation_equality_with_self(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'R'), (2, 'R'), (3,\n 'R')])))\n\n def test_formation_equality_with_wedge_and_host(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (4,\n 'G')])))\n self.assertFalse(Formation([(5, 'R'), (1, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'B'), (3, 'B'), (4,\n 'B')])))\n\n def test_formation_equality_with_two_wedges(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'G')])))\n\n def test_formation_equality_with_wedge_and_battalion(self):\n self.assertFalse(Formation([(4, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'G'), (1, 'G'), (3,\n 'G')])))\n\n def test_formation_equality_with_wedge_and_skirmish(self):\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_phalanxes(self):\n self.assertTrue(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'P'), (1, 'B'), (1,\n 'O')])))\n self.assertFalse(Formation([(1, 'R'), (1, 'G'), (1, 'Y')]).\n is_equivalent_in_strength(Formation([(2, 'P'), (2, 'B'), (2,\n 'O')])))\n\n def test_formation_equality_with_two_battalions(self):\n self.assertTrue(Formation([(3, 'R'), (2, 'R'), (5, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n self.assertFalse(Formation([(6, 'R'), (2, 'R'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(5, 'B'), (2, 'B'), (3,\n 'B')])))\n\n def test_formation_equality_with_two_skirmishes(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(1, 'B'), (2, 'B'), (3,\n 'G')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'Y')]).\n is_equivalent_in_strength(Formation([(4, 'B'), (2, 'B'), (3,\n 'G')])))\n\n def test_formation_equality_with_two_hosts(self):\n self.assertTrue(Formation([(1, 'R'), (4, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(1, 'G'), (4, 'G'), (3,\n 'B')])))\n self.assertFalse(Formation([(1, 'R'), (2, 'Y'), (3, 'R')]).\n is_equivalent_in_strength(Formation([(4, 'G'), (2, 'G'), (3,\n 'B')])))\n\n def test_greater_than_check_two_wedges(self):\n self.assertTrue(Formation([(4, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n self.assertFalse(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(1, 'R'), (2, 'R'), (3, 'R')]))\n )\n\n def test_greater_than_check_wedge_and_phalanx(self):\n self.assertTrue(Formation([(1, 'R'), (2, 'R'), (3, 'R')]).\n is_greater_strength_than(Formation([(2, 'R'), (2, 'G'), (2, 'B')]))\n )\n\n def test_greater_than_check_two_phalanxes(self):\n self.assertTrue(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(1, 'Y'), (1, 'R'), (1, 'B')]))\n )\n self.assertFalse(Formation([(2, 'Y'), (2, 'R'), (2, 'B')]).\n is_greater_strength_than(Formation([(2, 'P'), (2, 'G'), (2, 'O')]))\n )\n\n def test_greater_than_check_phalanx_and_battalion(self):\n self.assertTrue(Formation([(3, 'Y'), (3, 'R'), (3, 'B')]).\n is_greater_strength_than(Formation([(1, 'G'), (3, 'G'), (6, 'G')]))\n )\n self.assertFalse(Formation([(1, 'G'), (3, 'G'), (6, 'G')]).\n is_greater_strength_than(Formation([(3, 'Y'), (3, 'R'), (3, 'B')]))\n )\n\n def test_greater_than_check_two_battalions(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (5, 'G'), (2, 'G')]))\n )\n self.assertFalse(Formation([(1, 'G'), (3, 'G'), (8, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (6, 'G'), (2, 'G')]))\n )\n\n def test_greater_than_check_battalion_and_skirmish(self):\n self.assertTrue(Formation([(3, 'G'), (6, 'G'), (2, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (3, 'G'), (5, 'B')]))\n )\n\n def test_greater_than_check_two_skirmishes(self):\n self.assertTrue(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(3, 'G'), (1, 'G'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (2, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'Y'), (2, 'B'), (3, 'B')]))\n )\n\n def test_greater_than_check_skirmish_and_host(self):\n self.assertTrue(Formation([(1, 'G'), (3, 'B'), (2, 'G')]).\n is_greater_strength_than(Formation([(4, 'G'), (9, 'G'), (5, 'B')]))\n )\n\n def test_greater_than_check_two_hosts(self):\n self.assertTrue(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(1, 'G'), (1, 'R'), (2, 'Y')]))\n )\n self.assertFalse(Formation([(4, 'G'), (8, 'G'), (3, 'Y')]).\n is_greater_strength_than(Formation([(4, 'P'), (8, 'P'), (3, 'O')]))\n )\n",
"step-5": "import unittest\nfrom battleline.model.Formation import Formation, FormationInvalidError\n\n\nclass TestFormation(unittest.TestCase):\n\n def test_formation_with_less_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(\n FormationInvalidError, \"Formation must have 3 cards\", Formation, [(1, \"R\"), (2, \"Y\")])\n\n def test_formation_with_more_than_three_cards_is_considered_invalid(self):\n self.assertRaisesRegexp(FormationInvalidError, \"Formation must have 3 cards\", Formation, [\n (1, \"R\"), (2, \"Y\"), (3, \"R\"), (5, \"G\")])\n\n def test_can_get_formation_numbers_in_sorted_fashion(self):\n formation = Formation([(1, \"R\"), (3, \"Y\"), (2, \"R\")])\n self.assertEquals((1, 2, 3), formation.get_numbers())\n\n formation = Formation([(10, \"R\"), (9, \"Y\"), (8, \"R\")])\n self.assertEquals((8, 9, 10), formation.get_numbers())\n\n def test_can_get_formation_colors_in_sorted_fashion(self):\n formation = Formation([(1, \"R\"), (3, \"Y\"), (2, \"R\")])\n self.assertEquals((\"R\", \"Y\", \"R\"), formation.get_colors())\n\n formation = Formation([(10, \"G\"), (9, \"Y\"), (8, \"R\")])\n self.assertEquals((\"G\", \"Y\", \"R\"), formation.get_colors())\n\n def test_can_get_max_number(self):\n formation = Formation([(1, \"R\"), (3, \"Y\"), (2, \"R\")])\n self.assertEquals(3, formation.get_max_number())\n\n formation = Formation([(10, \"G\"), (9, \"Y\"), (8, \"R\")])\n self.assertEquals(10, formation.get_max_number())\n\n def test_formation_equality_with_self(self):\n self.assertTrue(Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")])))\n\n def test_formation_equality_with_wedge_and_host(self):\n self.assertFalse(Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(1, \"B\"), (2, \"B\"), (4, \"G\")])))\n self.assertFalse(Formation([(5, \"R\"), (1, \"R\"), (3, \"Y\")]).is_equivalent_in_strength(\n Formation([(2, \"B\"), (3, \"B\"), (4, \"B\")])))\n\n def test_formation_equality_with_two_wedges(self):\n self.assertTrue(Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(1, \"G\"), (2, \"G\"), (3, \"G\")])))\n\n def test_formation_equality_with_wedge_and_battalion(self):\n self.assertFalse(Formation([(4, \"R\"), (2, \"R\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(5, \"G\"), (1, \"G\"), (3, \"G\")])))\n\n def test_formation_equality_with_wedge_and_skirmish(self):\n self.assertFalse(Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(1, \"G\"), (2, \"G\"), (3, \"B\")])))\n\n def test_formation_equality_with_two_phalanxes(self):\n self.assertTrue(Formation([(1, \"R\"), (1, \"G\"), (1, \"Y\")]).is_equivalent_in_strength(\n Formation([(1, \"P\"), (1, \"B\"), (1, \"O\")])))\n self.assertFalse(Formation([(1, \"R\"), (1, \"G\"), (1, \"Y\")]).is_equivalent_in_strength(\n Formation([(2, \"P\"), (2, \"B\"), (2, \"O\")])))\n\n def test_formation_equality_with_two_battalions(self):\n self.assertTrue(Formation([(3, \"R\"), (2, \"R\"), (5, \"R\")]).is_equivalent_in_strength(\n Formation([(5, \"B\"), (2, \"B\"), (3, \"B\")])))\n self.assertFalse(Formation([(6, \"R\"), (2, \"R\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(5, \"B\"), (2, \"B\"), (3, \"B\")])))\n\n def test_formation_equality_with_two_skirmishes(self):\n self.assertTrue(Formation([(1, \"R\"), (2, \"R\"), (3, \"Y\")]).is_equivalent_in_strength(\n Formation([(1, \"B\"), (2, \"B\"), (3, \"G\")])))\n self.assertFalse(Formation([(1, \"R\"), (2, \"R\"), (3, \"Y\")]).is_equivalent_in_strength(\n Formation([(4, \"B\"), (2, \"B\"), (3, \"G\")])))\n\n def test_formation_equality_with_two_hosts(self):\n self.assertTrue(Formation([(1, \"R\"), (4, \"Y\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(1, \"G\"), (4, \"G\"), (3, \"B\")])))\n self.assertFalse(Formation([(1, \"R\"), (2, \"Y\"), (3, \"R\")]).is_equivalent_in_strength(\n Formation([(4, \"G\"), (2, \"G\"), (3, \"B\")])))\n\n def test_greater_than_check_two_wedges(self):\n self.assertTrue(Formation([(4, \"R\"), (2, \"R\"), (3, \"R\")]).is_greater_strength_than(\n Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")])))\n self.assertFalse(Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")]).is_greater_strength_than(\n Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")])))\n\n def test_greater_than_check_wedge_and_phalanx(self):\n self.assertTrue(Formation([(1, \"R\"), (2, \"R\"), (3, \"R\")]).is_greater_strength_than(\n Formation([(2, \"R\"), (2, \"G\"), (2, \"B\")])))\n\n def test_greater_than_check_two_phalanxes(self):\n self.assertTrue(Formation([(2, \"Y\"), (2, \"R\"), (2, \"B\")]).is_greater_strength_than(\n Formation([(1, \"Y\"), (1, \"R\"), (1, \"B\")])))\n self.assertFalse(Formation([(2, \"Y\"), (2, \"R\"), (2, \"B\")]).is_greater_strength_than(\n Formation([(2, \"P\"), (2, \"G\"), (2, \"O\")])))\n\n def test_greater_than_check_phalanx_and_battalion(self):\n self.assertTrue(Formation([(3, \"Y\"), (3, \"R\"), (3, \"B\")]).is_greater_strength_than(\n Formation([(1, \"G\"), (3, \"G\"), (6, \"G\")])))\n self.assertFalse(Formation([(1, \"G\"), (3, \"G\"), (6, \"G\")]).is_greater_strength_than(\n Formation([(3, \"Y\"), (3, \"R\"), (3, \"B\")])))\n\n def test_greater_than_check_two_battalions(self):\n self.assertTrue(Formation([(1, \"G\"), (3, \"G\"), (8, \"G\")]).is_greater_strength_than(\n Formation([(4, \"G\"), (5, \"G\"), (2, \"G\")])))\n self.assertFalse(Formation([(1, \"G\"), (3, \"G\"), (8, \"G\")]).is_greater_strength_than(\n Formation([(4, \"G\"), (6, \"G\"), (2, \"G\")])))\n\n def test_greater_than_check_battalion_and_skirmish(self):\n self.assertTrue(Formation([(3, \"G\"), (6, \"G\"), (2, \"G\")]).is_greater_strength_than(\n Formation([(4, \"G\"), (3, \"G\"), (5, \"B\")])))\n\n def test_greater_than_check_two_skirmishes(self):\n self.assertTrue(Formation([(4, \"G\"), (2, \"G\"), (3, \"Y\")]).is_greater_strength_than(\n Formation([(3, \"G\"), (1, \"G\"), (2, \"Y\")])))\n self.assertFalse(Formation([(4, \"G\"), (2, \"G\"), (3, \"Y\")]).is_greater_strength_than(\n Formation([(4, \"Y\"), (2, \"B\"), (3, \"B\")])))\n\n def test_greater_than_check_skirmish_and_host(self):\n self.assertTrue(Formation([(1, \"G\"), (3, \"B\"), (2, \"G\")]).is_greater_strength_than(\n Formation([(4, \"G\"), (9, \"G\"), (5, \"B\")])))\n\n def test_greater_than_check_two_hosts(self):\n self.assertTrue(Formation([(4, \"G\"), (8, \"G\"), (3, \"Y\")]).is_greater_strength_than(\n Formation([(1, \"G\"), (1, \"R\"), (2, \"Y\")])))\n self.assertFalse(Formation([(4, \"G\"), (8, \"G\"), (3, \"Y\")]).is_greater_strength_than(\n Formation([(4, \"P\"), (8, \"P\"), (3, \"O\")])))\n",
"step-ids": [
18,
21,
22,
25,
26
]
}
|
[
18,
21,
22,
25,
26
] |
from tqdm import trange
import numpy as np
class GPTD_fixedGrid:
def __init__(self, env, sigma0, gamma, kernel, D, V_mu=[]):
self.env = env
self.gamma = gamma
self.sigma0 = sigma0
self.kernel = kernel.kernel
if (not V_mu):
V_mu = lambda s: np.zeros((s.shape[1],1))
self.V_mu = V_mu
self.V_D = self.V_mu(D)
self.D = D
# self.D = np.concatenate((self.D, self.V_D.T), axis=0) # Use V_mu in computing distances!
self.A = np.zeros((self.D.shape[1],1), dtype=np.float64, order='C')
self.A[-1,0] = 1
K = self.kernel(self.D, self.D)
self.K_inv = np.linalg.inv(K)
self.alpha_ = np.zeros((self.D.shape[1],1), dtype=np.float64, order='C')
self.C_ = np.zeros((self.D.shape[1],self.D.shape[1]), dtype=np.float64, order='C')
self.diff_alpha_CV_D = np.empty((self.D.shape[1],1), dtype=np.float64, order='C')
def k_(self,x):
if (len(x.shape)==1):
x = x[:,np.newaxis]
assert len(x.shape)==2, "Check state dimensions"
return self.kernel(self.D, np.repeat(x, self.D.shape[1], axis=1))
def update(self, state_sequence, reward_sequence):
"""
Update GP after observing states (state_sequence) and rewards (reward_sequence)
"""
for i in range(reward_sequence.shape[0]):
trajt_1 = state_sequence[:,i][:,np.newaxis] # No use of V_mu in computing distances!
trajt = state_sequence[:,i+1][:,np.newaxis]
# trajt_1 = np.concatenate((trajt_1, self.V_mu(trajt_1)), axis=0) # Use V_mu as well
# trajt = np.concatenate((trajt, self.V_mu(trajt)), axis=0)
k_t_1 = self.kernel(self.D, trajt_1)
k_t = self.kernel(self.D, trajt)
ktt = self.kernel(trajt, trajt)
at = np.dot(self.K_inv, k_t)
delk_t_1 = k_t_1 - self.gamma*k_t
ct = np.dot(self.C_, delk_t_1) - (self.A - self.gamma*at)
st = self.sigma0**2 - np.dot(ct.T, delk_t_1)
diff_r = np.dot(delk_t_1.T, self.alpha_)[0,0] - reward_sequence[i]
self.alpha_ = self.alpha_ + ct/st*diff_r
self.C_ = self.C_ + np.dot(ct, ct.T)/st
self.A = at
assert (not np.isnan(self.alpha_).any()), "Check alpha for NaN values"
self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)
def build_posterior(self, policy, num_episodes, max_episode_length, test_every=np.inf, states_V_target=()):
"""
policy is a function that take state as input and returns an action
"""
statistics = trange(num_episodes)
test_error = np.array([])
for e in statistics:
is_terminal = False
num_steps = 0
state = self.env.reset()
action = policy(state)
state_sequence = np.empty((state.shape[0], max_episode_length+1), dtype=np.float64, order='C')
state_sequence[:, 0] = state[:,0]
reward_sequence = np.empty(max_episode_length, dtype=np.float64, order='C')
while ((num_steps < max_episode_length) and (not is_terminal)):
num_steps+=1
state, reward, is_terminal = self.env.step(action)
action = policy(state)
state_sequence[:, num_steps] = state[:,0]
reward_sequence[num_steps-1] = reward
state_sequence = state_sequence[:, 0:(num_steps+1)]
reward_sequence = reward_sequence[0:num_steps]
if (self.D.shape[1]==0):
traj = state_sequence[:,0][:,np.newaxis]
self.D = traj
self.V_D = self.V_mu(state_sequence[:,0][:,np.newaxis])
self.K_inv = 1/self.kernel(traj, traj)
self.A = np.array([[1]])
self.alpha_ = np.array([[0]])
self.C_= np.array([[0]])
self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)
self.update(state_sequence, reward_sequence)
statistics.set_postfix(epi_length=num_steps, dict_size=self.D.shape[1], cumm_cost=np.sum(reward_sequence))
if (e%test_every==0 and len(states_V_target)==2):
V = self.get_value_function(states_V_target[0])
test_error = np.concatenate((test_error, np.array([np.mean(np.abs(V - states_V_target[1]))])))
return test_error
def get_value_function(self, states):
if (self.D.shape[1]==0):
return self.V_mu(states)
else:
return self.V_mu(states) + np.dot(self.kernel(self.D, states).T, self.diff_alpha_CV_D)
|
normal
|
{
"blob_id": "92eaceb46974ba3a5944300139d5929d44673181",
"index": 1223,
"step-1": "<mask token>\n\n\nclass GPTD_fixedGrid:\n\n def __init__(self, env, sigma0, gamma, kernel, D, V_mu=[]):\n self.env = env\n self.gamma = gamma\n self.sigma0 = sigma0\n self.kernel = kernel.kernel\n if not V_mu:\n V_mu = lambda s: np.zeros((s.shape[1], 1))\n self.V_mu = V_mu\n self.V_D = self.V_mu(D)\n self.D = D\n self.A = np.zeros((self.D.shape[1], 1), dtype=np.float64, order='C')\n self.A[-1, 0] = 1\n K = self.kernel(self.D, self.D)\n self.K_inv = np.linalg.inv(K)\n self.alpha_ = np.zeros((self.D.shape[1], 1), dtype=np.float64,\n order='C')\n self.C_ = np.zeros((self.D.shape[1], self.D.shape[1]), dtype=np.\n float64, order='C')\n self.diff_alpha_CV_D = np.empty((self.D.shape[1], 1), dtype=np.\n float64, order='C')\n\n def k_(self, x):\n if len(x.shape) == 1:\n x = x[:, np.newaxis]\n assert len(x.shape) == 2, 'Check state dimensions'\n return self.kernel(self.D, np.repeat(x, self.D.shape[1], axis=1))\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass GPTD_fixedGrid:\n\n def __init__(self, env, sigma0, gamma, kernel, D, V_mu=[]):\n self.env = env\n self.gamma = gamma\n self.sigma0 = sigma0\n self.kernel = kernel.kernel\n if not V_mu:\n V_mu = lambda s: np.zeros((s.shape[1], 1))\n self.V_mu = V_mu\n self.V_D = self.V_mu(D)\n self.D = D\n self.A = np.zeros((self.D.shape[1], 1), dtype=np.float64, order='C')\n self.A[-1, 0] = 1\n K = self.kernel(self.D, self.D)\n self.K_inv = np.linalg.inv(K)\n self.alpha_ = np.zeros((self.D.shape[1], 1), dtype=np.float64,\n order='C')\n self.C_ = np.zeros((self.D.shape[1], self.D.shape[1]), dtype=np.\n float64, order='C')\n self.diff_alpha_CV_D = np.empty((self.D.shape[1], 1), dtype=np.\n float64, order='C')\n\n def k_(self, x):\n if len(x.shape) == 1:\n x = x[:, np.newaxis]\n assert len(x.shape) == 2, 'Check state dimensions'\n return self.kernel(self.D, np.repeat(x, self.D.shape[1], axis=1))\n\n def update(self, state_sequence, reward_sequence):\n \"\"\"\n Update GP after observing states (state_sequence) and rewards (reward_sequence)\n \"\"\"\n for i in range(reward_sequence.shape[0]):\n trajt_1 = state_sequence[:, i][:, np.newaxis]\n trajt = state_sequence[:, i + 1][:, np.newaxis]\n k_t_1 = self.kernel(self.D, trajt_1)\n k_t = self.kernel(self.D, trajt)\n ktt = self.kernel(trajt, trajt)\n at = np.dot(self.K_inv, k_t)\n delk_t_1 = k_t_1 - self.gamma * k_t\n ct = np.dot(self.C_, delk_t_1) - (self.A - self.gamma * at)\n st = self.sigma0 ** 2 - np.dot(ct.T, delk_t_1)\n diff_r = np.dot(delk_t_1.T, self.alpha_)[0, 0] - reward_sequence[i]\n self.alpha_ = self.alpha_ + ct / st * diff_r\n self.C_ = self.C_ + np.dot(ct, ct.T) / st\n self.A = at\n assert not np.isnan(self.alpha_).any(\n ), 'Check alpha for NaN values'\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n <mask token>\n\n def get_value_function(self, states):\n if self.D.shape[1] == 0:\n return self.V_mu(states)\n else:\n return self.V_mu(states) + np.dot(self.kernel(self.D, states).T,\n self.diff_alpha_CV_D)\n",
"step-3": "<mask token>\n\n\nclass GPTD_fixedGrid:\n\n def __init__(self, env, sigma0, gamma, kernel, D, V_mu=[]):\n self.env = env\n self.gamma = gamma\n self.sigma0 = sigma0\n self.kernel = kernel.kernel\n if not V_mu:\n V_mu = lambda s: np.zeros((s.shape[1], 1))\n self.V_mu = V_mu\n self.V_D = self.V_mu(D)\n self.D = D\n self.A = np.zeros((self.D.shape[1], 1), dtype=np.float64, order='C')\n self.A[-1, 0] = 1\n K = self.kernel(self.D, self.D)\n self.K_inv = np.linalg.inv(K)\n self.alpha_ = np.zeros((self.D.shape[1], 1), dtype=np.float64,\n order='C')\n self.C_ = np.zeros((self.D.shape[1], self.D.shape[1]), dtype=np.\n float64, order='C')\n self.diff_alpha_CV_D = np.empty((self.D.shape[1], 1), dtype=np.\n float64, order='C')\n\n def k_(self, x):\n if len(x.shape) == 1:\n x = x[:, np.newaxis]\n assert len(x.shape) == 2, 'Check state dimensions'\n return self.kernel(self.D, np.repeat(x, self.D.shape[1], axis=1))\n\n def update(self, state_sequence, reward_sequence):\n \"\"\"\n Update GP after observing states (state_sequence) and rewards (reward_sequence)\n \"\"\"\n for i in range(reward_sequence.shape[0]):\n trajt_1 = state_sequence[:, i][:, np.newaxis]\n trajt = state_sequence[:, i + 1][:, np.newaxis]\n k_t_1 = self.kernel(self.D, trajt_1)\n k_t = self.kernel(self.D, trajt)\n ktt = self.kernel(trajt, trajt)\n at = np.dot(self.K_inv, k_t)\n delk_t_1 = k_t_1 - self.gamma * k_t\n ct = np.dot(self.C_, delk_t_1) - (self.A - self.gamma * at)\n st = self.sigma0 ** 2 - np.dot(ct.T, delk_t_1)\n diff_r = np.dot(delk_t_1.T, self.alpha_)[0, 0] - reward_sequence[i]\n self.alpha_ = self.alpha_ + ct / st * diff_r\n self.C_ = self.C_ + np.dot(ct, ct.T) / st\n self.A = at\n assert not np.isnan(self.alpha_).any(\n ), 'Check alpha for NaN values'\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n def build_posterior(self, policy, num_episodes, max_episode_length,\n test_every=np.inf, states_V_target=()):\n \"\"\"\n policy is a function that take state as input and returns an action\n \"\"\"\n statistics = trange(num_episodes)\n test_error = np.array([])\n for e in statistics:\n is_terminal = False\n num_steps = 0\n state = self.env.reset()\n action = policy(state)\n state_sequence = np.empty((state.shape[0], max_episode_length +\n 1), dtype=np.float64, order='C')\n state_sequence[:, 0] = state[:, 0]\n reward_sequence = np.empty(max_episode_length, dtype=np.float64,\n order='C')\n while num_steps < max_episode_length and not is_terminal:\n num_steps += 1\n state, reward, is_terminal = self.env.step(action)\n action = policy(state)\n state_sequence[:, num_steps] = state[:, 0]\n reward_sequence[num_steps - 1] = reward\n state_sequence = state_sequence[:, 0:num_steps + 1]\n reward_sequence = reward_sequence[0:num_steps]\n if self.D.shape[1] == 0:\n traj = state_sequence[:, 0][:, np.newaxis]\n self.D = traj\n self.V_D = self.V_mu(state_sequence[:, 0][:, np.newaxis])\n self.K_inv = 1 / self.kernel(traj, traj)\n self.A = np.array([[1]])\n self.alpha_ = np.array([[0]])\n self.C_ = np.array([[0]])\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n self.update(state_sequence, reward_sequence)\n statistics.set_postfix(epi_length=num_steps, dict_size=self.D.\n shape[1], cumm_cost=np.sum(reward_sequence))\n if e % test_every == 0 and len(states_V_target) == 2:\n V = self.get_value_function(states_V_target[0])\n test_error = np.concatenate((test_error, np.array([np.mean(\n np.abs(V - states_V_target[1]))])))\n return test_error\n\n def get_value_function(self, states):\n if self.D.shape[1] == 0:\n return self.V_mu(states)\n else:\n return self.V_mu(states) + np.dot(self.kernel(self.D, states).T,\n self.diff_alpha_CV_D)\n",
"step-4": "from tqdm import trange\nimport numpy as np\n\n\nclass GPTD_fixedGrid:\n\n def __init__(self, env, sigma0, gamma, kernel, D, V_mu=[]):\n self.env = env\n self.gamma = gamma\n self.sigma0 = sigma0\n self.kernel = kernel.kernel\n if not V_mu:\n V_mu = lambda s: np.zeros((s.shape[1], 1))\n self.V_mu = V_mu\n self.V_D = self.V_mu(D)\n self.D = D\n self.A = np.zeros((self.D.shape[1], 1), dtype=np.float64, order='C')\n self.A[-1, 0] = 1\n K = self.kernel(self.D, self.D)\n self.K_inv = np.linalg.inv(K)\n self.alpha_ = np.zeros((self.D.shape[1], 1), dtype=np.float64,\n order='C')\n self.C_ = np.zeros((self.D.shape[1], self.D.shape[1]), dtype=np.\n float64, order='C')\n self.diff_alpha_CV_D = np.empty((self.D.shape[1], 1), dtype=np.\n float64, order='C')\n\n def k_(self, x):\n if len(x.shape) == 1:\n x = x[:, np.newaxis]\n assert len(x.shape) == 2, 'Check state dimensions'\n return self.kernel(self.D, np.repeat(x, self.D.shape[1], axis=1))\n\n def update(self, state_sequence, reward_sequence):\n \"\"\"\n Update GP after observing states (state_sequence) and rewards (reward_sequence)\n \"\"\"\n for i in range(reward_sequence.shape[0]):\n trajt_1 = state_sequence[:, i][:, np.newaxis]\n trajt = state_sequence[:, i + 1][:, np.newaxis]\n k_t_1 = self.kernel(self.D, trajt_1)\n k_t = self.kernel(self.D, trajt)\n ktt = self.kernel(trajt, trajt)\n at = np.dot(self.K_inv, k_t)\n delk_t_1 = k_t_1 - self.gamma * k_t\n ct = np.dot(self.C_, delk_t_1) - (self.A - self.gamma * at)\n st = self.sigma0 ** 2 - np.dot(ct.T, delk_t_1)\n diff_r = np.dot(delk_t_1.T, self.alpha_)[0, 0] - reward_sequence[i]\n self.alpha_ = self.alpha_ + ct / st * diff_r\n self.C_ = self.C_ + np.dot(ct, ct.T) / st\n self.A = at\n assert not np.isnan(self.alpha_).any(\n ), 'Check alpha for NaN values'\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n def build_posterior(self, policy, num_episodes, max_episode_length,\n test_every=np.inf, states_V_target=()):\n \"\"\"\n policy is a function that take state as input and returns an action\n \"\"\"\n statistics = trange(num_episodes)\n test_error = np.array([])\n for e in statistics:\n is_terminal = False\n num_steps = 0\n state = self.env.reset()\n action = policy(state)\n state_sequence = np.empty((state.shape[0], max_episode_length +\n 1), dtype=np.float64, order='C')\n state_sequence[:, 0] = state[:, 0]\n reward_sequence = np.empty(max_episode_length, dtype=np.float64,\n order='C')\n while num_steps < max_episode_length and not is_terminal:\n num_steps += 1\n state, reward, is_terminal = self.env.step(action)\n action = policy(state)\n state_sequence[:, num_steps] = state[:, 0]\n reward_sequence[num_steps - 1] = reward\n state_sequence = state_sequence[:, 0:num_steps + 1]\n reward_sequence = reward_sequence[0:num_steps]\n if self.D.shape[1] == 0:\n traj = state_sequence[:, 0][:, np.newaxis]\n self.D = traj\n self.V_D = self.V_mu(state_sequence[:, 0][:, np.newaxis])\n self.K_inv = 1 / self.kernel(traj, traj)\n self.A = np.array([[1]])\n self.alpha_ = np.array([[0]])\n self.C_ = np.array([[0]])\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n self.update(state_sequence, reward_sequence)\n statistics.set_postfix(epi_length=num_steps, dict_size=self.D.\n shape[1], cumm_cost=np.sum(reward_sequence))\n if e % test_every == 0 and len(states_V_target) == 2:\n V = self.get_value_function(states_V_target[0])\n test_error = np.concatenate((test_error, np.array([np.mean(\n np.abs(V - states_V_target[1]))])))\n return test_error\n\n def get_value_function(self, states):\n if self.D.shape[1] == 0:\n return self.V_mu(states)\n else:\n return self.V_mu(states) + np.dot(self.kernel(self.D, states).T,\n self.diff_alpha_CV_D)\n",
"step-5": "from tqdm import trange\nimport numpy as np\n\nclass GPTD_fixedGrid:\n def __init__(self, env, sigma0, gamma, kernel, D, V_mu=[]):\n \n self.env = env\n self.gamma = gamma\n self.sigma0 = sigma0\n self.kernel = kernel.kernel\n if (not V_mu):\n V_mu = lambda s: np.zeros((s.shape[1],1))\n self.V_mu = V_mu\n self.V_D = self.V_mu(D)\n self.D = D\n # self.D = np.concatenate((self.D, self.V_D.T), axis=0) # Use V_mu in computing distances!\n self.A = np.zeros((self.D.shape[1],1), dtype=np.float64, order='C')\n self.A[-1,0] = 1\n K = self.kernel(self.D, self.D)\n self.K_inv = np.linalg.inv(K)\n self.alpha_ = np.zeros((self.D.shape[1],1), dtype=np.float64, order='C')\n self.C_ = np.zeros((self.D.shape[1],self.D.shape[1]), dtype=np.float64, order='C')\n self.diff_alpha_CV_D = np.empty((self.D.shape[1],1), dtype=np.float64, order='C')\n \n def k_(self,x):\n\n if (len(x.shape)==1):\n x = x[:,np.newaxis]\n assert len(x.shape)==2, \"Check state dimensions\"\n\n return self.kernel(self.D, np.repeat(x, self.D.shape[1], axis=1))\n\n def update(self, state_sequence, reward_sequence):\n \"\"\"\n Update GP after observing states (state_sequence) and rewards (reward_sequence)\n \"\"\"\n\n for i in range(reward_sequence.shape[0]):\n\n trajt_1 = state_sequence[:,i][:,np.newaxis] # No use of V_mu in computing distances!\n trajt = state_sequence[:,i+1][:,np.newaxis]\n # trajt_1 = np.concatenate((trajt_1, self.V_mu(trajt_1)), axis=0) # Use V_mu as well\n # trajt = np.concatenate((trajt, self.V_mu(trajt)), axis=0)\n k_t_1 = self.kernel(self.D, trajt_1)\n k_t = self.kernel(self.D, trajt)\n ktt = self.kernel(trajt, trajt)\n at = np.dot(self.K_inv, k_t)\n delk_t_1 = k_t_1 - self.gamma*k_t\n\n ct = np.dot(self.C_, delk_t_1) - (self.A - self.gamma*at)\n st = self.sigma0**2 - np.dot(ct.T, delk_t_1)\n\n diff_r = np.dot(delk_t_1.T, self.alpha_)[0,0] - reward_sequence[i]\n self.alpha_ = self.alpha_ + ct/st*diff_r\n\n self.C_ = self.C_ + np.dot(ct, ct.T)/st\n\n self.A = at\n\n assert (not np.isnan(self.alpha_).any()), \"Check alpha for NaN values\"\n\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n def build_posterior(self, policy, num_episodes, max_episode_length, test_every=np.inf, states_V_target=()):\n \"\"\"\n policy is a function that take state as input and returns an action\n \"\"\"\n\n statistics = trange(num_episodes)\n test_error = np.array([])\n\n for e in statistics:\n is_terminal = False\n num_steps = 0\n state = self.env.reset()\n action = policy(state)\n \n state_sequence = np.empty((state.shape[0], max_episode_length+1), dtype=np.float64, order='C')\n state_sequence[:, 0] = state[:,0]\n reward_sequence = np.empty(max_episode_length, dtype=np.float64, order='C')\n \n while ((num_steps < max_episode_length) and (not is_terminal)):\n num_steps+=1\n state, reward, is_terminal = self.env.step(action)\n action = policy(state)\n\n state_sequence[:, num_steps] = state[:,0]\n reward_sequence[num_steps-1] = reward\n\n state_sequence = state_sequence[:, 0:(num_steps+1)]\n reward_sequence = reward_sequence[0:num_steps]\n\n if (self.D.shape[1]==0):\n\n traj = state_sequence[:,0][:,np.newaxis]\n self.D = traj\n self.V_D = self.V_mu(state_sequence[:,0][:,np.newaxis])\n self.K_inv = 1/self.kernel(traj, traj)\n self.A = np.array([[1]])\n self.alpha_ = np.array([[0]])\n self.C_= np.array([[0]])\n self.diff_alpha_CV_D = self.alpha_ - np.dot(self.C_, self.V_D)\n\n self.update(state_sequence, reward_sequence)\n statistics.set_postfix(epi_length=num_steps, dict_size=self.D.shape[1], cumm_cost=np.sum(reward_sequence))\n if (e%test_every==0 and len(states_V_target)==2):\n V = self.get_value_function(states_V_target[0])\n test_error = np.concatenate((test_error, np.array([np.mean(np.abs(V - states_V_target[1]))])))\n\n return test_error\n \n def get_value_function(self, states):\n\n if (self.D.shape[1]==0):\n return self.V_mu(states) \n\n else:\n return self.V_mu(states) + np.dot(self.kernel(self.D, states).T, self.diff_alpha_CV_D)",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
from sklearn.datasets import fetch_20newsgroups
from sklearn.decomposition import TruncatedSVD
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import Normalizer
from sklearn import metrics
from sklearn.cluster import KMeans, MiniBatchKMeans
from random import randint
import sys
import pandas as pd
import pickle
import nltk
import os
import numpy as np
import string
replace_punctuation = string.maketrans(string.punctuation, ' '*len(string.punctuation))
dir_doc = sys.argv[1] + 'docs.txt'
dir_titles = sys.argv[1] + 'title_StackOverflow.txt'
with open(dir_doc) as f:
docs = f.read().splitlines()
with open(dir_titles) as f:
titles = f.read().splitlines()
with open('stopwords.txt') as f:
stopwords = f.read().splitlines()
print "Eliminating stopwords from docs and titles"
for i in range(len(docs)):
docs[i] = docs[i].translate(replace_punctuation)
docs[i] = ' '.join([''.join([c for c in word if not c.isdigit()]) for word in docs[i].split()])
docs[i] = ' '.join([word.lower() for word in docs[i].split() if word.lower() not in stopwords])
for i in range(len(titles)):
titles[i] = titles[i].translate(replace_punctuation)
titles[i] = ' '.join([''.join([c for c in word if not c.isdigit()]) for word in titles[i].split()])
titles[i] = ' '.join([word.lower() for word in titles[i].split() if word.lower() not in stopwords])
total = docs + titles
print "Extracting features from the training dataset using a sparse vectorizer"
vectorizer = TfidfVectorizer(max_df=0.5, max_features=10000, min_df=2, stop_words='english', use_idf=True)
vectorizer.fit(titles)
X = vectorizer.transform(titles)
print "n_samples: %d, n_features: %d" % X.shape
print "Performing dimensionality reduction using LSA"
# Vectorizer results are normalized, which makes KMeans behave as
# spherical k-means for better results. Since LSA/SVD results are
# not normalized, we have to redo the normalization.
r1 = 1#randint(0,10000)
r2 = 1#randint(0,10000)
true_k = 53
svd = TruncatedSVD(n_components=20, random_state=r1)
normalizer = Normalizer(copy=False)
lsa = make_pipeline(svd, normalizer)
X = lsa.fit_transform(X)
explained_variance = svd.explained_variance_ratio_.sum()
print "Explained variance of the SVD step: {}%".format(int(explained_variance * 100))
km = KMeans(n_clusters=true_k, init='k-means++', n_jobs=-1, max_iter=1000, n_init=100, verbose=False, random_state=r2)
print "Clustering sparse data with %s" % km
km.fit(X)
ids = range(len(titles))
clusters = km.labels_.tolist()
stack = { 'title': titles, 'indexes': ids, 'cluster': clusters }
frame = pd.DataFrame(stack, index = [clusters] , columns = ['title', 'indexes', 'cluster'])
#sort cluster centers by proximity to centroid
original_space_centroids = svd.inverse_transform(km.cluster_centers_)
order_centroids = original_space_centroids.argsort()[:, ::-1]
terms = vectorizer.get_feature_names()
for i in range(true_k):
print "Cluster %d words:" % i
for ind in order_centroids[i, :5]: #replace 6 with n words per cluster
print "\t\t%s" % terms[ind]
print "Cluster %d titles:" % i
for ind in range(5):
print "\t\t[ %d" % frame.ix[i]['indexes'].values.tolist()[ind], "] %s" % frame.ix[i]['title'].values.tolist()[ind]
# Check clusters' distribution
a = frame['cluster'].value_counts() #number of titles per cluster
print a
id_cluster = np.array(frame['cluster'])
dir_check = sys.argv[1] + 'check_index.csv'
with open(dir_check) as f:
check = f.read().splitlines()
check = check[1:]
output = np.zeros((len(check)))
for i in range(len(check)):
word = check[i].split(',')
id1 = int(word[1])
id2 = int(word[2])
output[i] = (id_cluster[id1] == id_cluster[id2])
f = open(sys.argv[2], 'w')
f.write("ID,Ans\n")
for i in range(len(check)):
f.write(str(i) + "," + str(int(output[i])) + "\n")
|
normal
|
{
"blob_id": "53cbc3ca3a34a8aafa97d6964337cfabb1bebac5",
"index": 8957,
"step-1": "from sklearn.datasets import fetch_20newsgroups\nfrom sklearn.decomposition import TruncatedSVD\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom sklearn.feature_extraction.text import HashingVectorizer\nfrom sklearn.feature_extraction.text import TfidfTransformer\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.preprocessing import Normalizer\nfrom sklearn import metrics\nfrom sklearn.cluster import KMeans, MiniBatchKMeans\nfrom random import randint\n\n\n\nimport sys\nimport pandas as pd\nimport pickle\nimport nltk\nimport os\nimport numpy as np\nimport string\nreplace_punctuation = string.maketrans(string.punctuation, ' '*len(string.punctuation))\n\ndir_doc = sys.argv[1] + 'docs.txt'\ndir_titles = sys.argv[1] + 'title_StackOverflow.txt'\nwith open(dir_doc) as f:\n docs = f.read().splitlines()\nwith open(dir_titles) as f:\n titles = f.read().splitlines()\nwith open('stopwords.txt') as f:\n stopwords = f.read().splitlines()\n\n\n\nprint \"Eliminating stopwords from docs and titles\"\nfor i in range(len(docs)):\n docs[i] = docs[i].translate(replace_punctuation)\n docs[i] = ' '.join([''.join([c for c in word if not c.isdigit()]) for word in docs[i].split()])\n docs[i] = ' '.join([word.lower() for word in docs[i].split() if word.lower() not in stopwords])\n \nfor i in range(len(titles)):\n titles[i] = titles[i].translate(replace_punctuation)\n titles[i] = ' '.join([''.join([c for c in word if not c.isdigit()]) for word in titles[i].split()])\n titles[i] = ' '.join([word.lower() for word in titles[i].split() if word.lower() not in stopwords])\n \n\ntotal = docs + titles\n\n\nprint \"Extracting features from the training dataset using a sparse vectorizer\"\nvectorizer = TfidfVectorizer(max_df=0.5, max_features=10000, min_df=2, stop_words='english', use_idf=True)\nvectorizer.fit(titles)\nX = vectorizer.transform(titles)\nprint \"n_samples: %d, n_features: %d\" % X.shape\n\nprint \"Performing dimensionality reduction using LSA\"\n# Vectorizer results are normalized, which makes KMeans behave as\n# spherical k-means for better results. Since LSA/SVD results are\n# not normalized, we have to redo the normalization.\nr1 = 1#randint(0,10000)\nr2 = 1#randint(0,10000)\ntrue_k = 53\nsvd = TruncatedSVD(n_components=20, random_state=r1)\nnormalizer = Normalizer(copy=False)\nlsa = make_pipeline(svd, normalizer)\n\nX = lsa.fit_transform(X) \n\nexplained_variance = svd.explained_variance_ratio_.sum()\nprint \"Explained variance of the SVD step: {}%\".format(int(explained_variance * 100))\n\n\nkm = KMeans(n_clusters=true_k, init='k-means++', n_jobs=-1, max_iter=1000, n_init=100, verbose=False, random_state=r2)\nprint \"Clustering sparse data with %s\" % km\nkm.fit(X)\n\nids = range(len(titles))\nclusters = km.labels_.tolist()\nstack = { 'title': titles, 'indexes': ids, 'cluster': clusters }\n\nframe = pd.DataFrame(stack, index = [clusters] , columns = ['title', 'indexes', 'cluster'])\n\n#sort cluster centers by proximity to centroid\noriginal_space_centroids = svd.inverse_transform(km.cluster_centers_)\norder_centroids = original_space_centroids.argsort()[:, ::-1]\nterms = vectorizer.get_feature_names()\n\nfor i in range(true_k):\n print \"Cluster %d words:\" % i\n for ind in order_centroids[i, :5]: #replace 6 with n words per cluster\n print \"\\t\\t%s\" % terms[ind]\n \n print \"Cluster %d titles:\" % i\n for ind in range(5):\n print \"\\t\\t[ %d\" % frame.ix[i]['indexes'].values.tolist()[ind], \"] %s\" % frame.ix[i]['title'].values.tolist()[ind]\n \n# Check clusters' distribution\na = frame['cluster'].value_counts() #number of titles per cluster\nprint a\n\n\n\n\n\n\n\nid_cluster = np.array(frame['cluster'])\ndir_check = sys.argv[1] + 'check_index.csv'\nwith open(dir_check) as f:\n check = f.read().splitlines()\ncheck = check[1:]\noutput = np.zeros((len(check)))\nfor i in range(len(check)):\n word = check[i].split(',')\n id1 = int(word[1])\n id2 = int(word[2])\n output[i] = (id_cluster[id1] == id_cluster[id2])\n\nf = open(sys.argv[2], 'w')\nf.write(\"ID,Ans\\n\")\nfor i in range(len(check)):\n f.write(str(i) + \",\" + str(int(output[i])) + \"\\n\")\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# Generated by Django 3.1.5 on 2021-02-24 18:34
from django.db import migrations, models
import stdimage.models
class Migration(migrations.Migration):
dependencies = [
('Site', '0004_arquivopdf'),
]
operations = [
migrations.CreateModel(
name='historico',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('criados', models.DateField(auto_now_add=True, verbose_name='Criação')),
('modificado', models.DateField(auto_now=True, verbose_name='Atualização')),
('ativo', models.BooleanField(default=True, verbose_name='Ativo?')),
('titulo', models.CharField(max_length=100, verbose_name='Título')),
('imagem', stdimage.models.StdImageField(upload_to='img_historico', verbose_name='Imagem')),
('subtitulo01', models.CharField(max_length=100, verbose_name='Subtítulo01')),
('descricao01', models.TextField(max_length=200, verbose_name='Subtítulo01 Descrição')),
('subtitulo02', models.CharField(max_length=100, verbose_name='Subtítulo02')),
('descricao02', models.TextField(max_length=200, verbose_name='Subtítulo02 Descrição')),
('contador01', models.CharField(max_length=50, verbose_name='contador01')),
('valor01', models.TextField(max_length=6, verbose_name='valor contador01')),
('contador02', models.CharField(max_length=50, verbose_name='contador02')),
('valor02', models.TextField(max_length=6, verbose_name='valor contador02')),
('contador03', models.CharField(max_length=50, verbose_name='contador03')),
('valor03', models.TextField(max_length=6, verbose_name='valor contador03')),
('subtitulo03', models.CharField(max_length=100, verbose_name='Subtítulo03')),
('descricao03', models.TextField(max_length=200, verbose_name='Subtítulo03 Descrição')),
],
options={
'verbose_name': 'Notícia',
'verbose_name_plural': 'Noticias',
},
),
migrations.AddField(
model_name='arquivopdf',
name='descricao',
field=models.TextField(default=1, max_length=200, verbose_name='Descrição'),
preserve_default=False,
),
migrations.AddField(
model_name='arquivopdf',
name='titulo',
field=models.CharField(default=1, max_length=100, verbose_name='Título'),
preserve_default=False,
),
]
|
normal
|
{
"blob_id": "321147f2e2d8caf6d9224e2a8969f51ded48baf7",
"index": 8130,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Site', '0004_arquivopdf')]\n operations = [migrations.CreateModel(name='historico', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('criados', models.DateField(\n auto_now_add=True, verbose_name='Criação')), ('modificado', models.\n DateField(auto_now=True, verbose_name='Atualização')), ('ativo',\n models.BooleanField(default=True, verbose_name='Ativo?')), (\n 'titulo', models.CharField(max_length=100, verbose_name='Título')),\n ('imagem', stdimage.models.StdImageField(upload_to='img_historico',\n verbose_name='Imagem')), ('subtitulo01', models.CharField(\n max_length=100, verbose_name='Subtítulo01')), ('descricao01',\n models.TextField(max_length=200, verbose_name=\n 'Subtítulo01 Descrição')), ('subtitulo02', models.CharField(\n max_length=100, verbose_name='Subtítulo02')), ('descricao02',\n models.TextField(max_length=200, verbose_name=\n 'Subtítulo02 Descrição')), ('contador01', models.CharField(\n max_length=50, verbose_name='contador01')), ('valor01', models.\n TextField(max_length=6, verbose_name='valor contador01')), (\n 'contador02', models.CharField(max_length=50, verbose_name=\n 'contador02')), ('valor02', models.TextField(max_length=6,\n verbose_name='valor contador02')), ('contador03', models.CharField(\n max_length=50, verbose_name='contador03')), ('valor03', models.\n TextField(max_length=6, verbose_name='valor contador03')), (\n 'subtitulo03', models.CharField(max_length=100, verbose_name=\n 'Subtítulo03')), ('descricao03', models.TextField(max_length=200,\n verbose_name='Subtítulo03 Descrição'))], options={'verbose_name':\n 'Notícia', 'verbose_name_plural': 'Noticias'}), migrations.AddField\n (model_name='arquivopdf', name='descricao', field=models.TextField(\n default=1, max_length=200, verbose_name='Descrição'),\n preserve_default=False), migrations.AddField(model_name=\n 'arquivopdf', name='titulo', field=models.CharField(default=1,\n max_length=100, verbose_name='Título'), preserve_default=False)]\n",
"step-4": "from django.db import migrations, models\nimport stdimage.models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Site', '0004_arquivopdf')]\n operations = [migrations.CreateModel(name='historico', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('criados', models.DateField(\n auto_now_add=True, verbose_name='Criação')), ('modificado', models.\n DateField(auto_now=True, verbose_name='Atualização')), ('ativo',\n models.BooleanField(default=True, verbose_name='Ativo?')), (\n 'titulo', models.CharField(max_length=100, verbose_name='Título')),\n ('imagem', stdimage.models.StdImageField(upload_to='img_historico',\n verbose_name='Imagem')), ('subtitulo01', models.CharField(\n max_length=100, verbose_name='Subtítulo01')), ('descricao01',\n models.TextField(max_length=200, verbose_name=\n 'Subtítulo01 Descrição')), ('subtitulo02', models.CharField(\n max_length=100, verbose_name='Subtítulo02')), ('descricao02',\n models.TextField(max_length=200, verbose_name=\n 'Subtítulo02 Descrição')), ('contador01', models.CharField(\n max_length=50, verbose_name='contador01')), ('valor01', models.\n TextField(max_length=6, verbose_name='valor contador01')), (\n 'contador02', models.CharField(max_length=50, verbose_name=\n 'contador02')), ('valor02', models.TextField(max_length=6,\n verbose_name='valor contador02')), ('contador03', models.CharField(\n max_length=50, verbose_name='contador03')), ('valor03', models.\n TextField(max_length=6, verbose_name='valor contador03')), (\n 'subtitulo03', models.CharField(max_length=100, verbose_name=\n 'Subtítulo03')), ('descricao03', models.TextField(max_length=200,\n verbose_name='Subtítulo03 Descrição'))], options={'verbose_name':\n 'Notícia', 'verbose_name_plural': 'Noticias'}), migrations.AddField\n (model_name='arquivopdf', name='descricao', field=models.TextField(\n default=1, max_length=200, verbose_name='Descrição'),\n preserve_default=False), migrations.AddField(model_name=\n 'arquivopdf', name='titulo', field=models.CharField(default=1,\n max_length=100, verbose_name='Título'), preserve_default=False)]\n",
"step-5": "# Generated by Django 3.1.5 on 2021-02-24 18:34\n\nfrom django.db import migrations, models\nimport stdimage.models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Site', '0004_arquivopdf'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='historico',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('criados', models.DateField(auto_now_add=True, verbose_name='Criação')),\n ('modificado', models.DateField(auto_now=True, verbose_name='Atualização')),\n ('ativo', models.BooleanField(default=True, verbose_name='Ativo?')),\n ('titulo', models.CharField(max_length=100, verbose_name='Título')),\n ('imagem', stdimage.models.StdImageField(upload_to='img_historico', verbose_name='Imagem')),\n ('subtitulo01', models.CharField(max_length=100, verbose_name='Subtítulo01')),\n ('descricao01', models.TextField(max_length=200, verbose_name='Subtítulo01 Descrição')),\n ('subtitulo02', models.CharField(max_length=100, verbose_name='Subtítulo02')),\n ('descricao02', models.TextField(max_length=200, verbose_name='Subtítulo02 Descrição')),\n ('contador01', models.CharField(max_length=50, verbose_name='contador01')),\n ('valor01', models.TextField(max_length=6, verbose_name='valor contador01')),\n ('contador02', models.CharField(max_length=50, verbose_name='contador02')),\n ('valor02', models.TextField(max_length=6, verbose_name='valor contador02')),\n ('contador03', models.CharField(max_length=50, verbose_name='contador03')),\n ('valor03', models.TextField(max_length=6, verbose_name='valor contador03')),\n ('subtitulo03', models.CharField(max_length=100, verbose_name='Subtítulo03')),\n ('descricao03', models.TextField(max_length=200, verbose_name='Subtítulo03 Descrição')),\n ],\n options={\n 'verbose_name': 'Notícia',\n 'verbose_name_plural': 'Noticias',\n },\n ),\n migrations.AddField(\n model_name='arquivopdf',\n name='descricao',\n field=models.TextField(default=1, max_length=200, verbose_name='Descrição'),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='arquivopdf',\n name='titulo',\n field=models.CharField(default=1, max_length=100, verbose_name='Título'),\n preserve_default=False,\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class CreateArchive(QtWidgets.QDialog):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def create_components(self):
self.option_widget = QtWidgets.QWidget()
self.name_lbl = QtWidgets.QLabel('Nazwa')
self.name_edit = QtWidgets.QLineEdit('untitled')
self.name_edit.setMaxLength(30)
self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(
'\\w{30}'), self.name_edit))
self.archive_type_cb = QtWidgets.QComboBox()
self.archive_type_cb.addItem('.zip')
self.archive_type_cb.addItem('.tar')
self.path_lbl = QtWidgets.QLabel(self.path)
self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,
QtWidgets.QSizePolicy.Preferred)
self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)
self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.
set_path)
self.file_list = ListView('Pliki do zapakowania')
self.file_list.add_element(self.index)
self.file_list.add_to_model(self.create_item(self.index))
self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',
clicked=self.add_catalog)
self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=
self.add_file)
self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',
clicked=self.file_list.remove_selected)
self.progress_bar = QtWidgets.QProgressBar()
self.progress_bar.setMinimum(0)
self.progress_lbl = QtWidgets.QLabel()
self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.
pack_files)
def set_path(self):
path = QtWidgets.QFileDialog.getExistingDirectory(self,
'Wybierz katalog', QtCore.QDir.homePath())
if path:
self.path = path
self.path_lbl.setText(self.path)
def create_layout(self):
option_layout = QtWidgets.QGridLayout()
v_option_layout = QtWidgets.QVBoxLayout()
main_layout = QtWidgets.QGridLayout()
v_main_layout = QtWidgets.QVBoxLayout()
h_name_layout = QtWidgets.QHBoxLayout()
h_name_layout.addWidget(self.name_lbl)
h_name_layout.addWidget(self.name_edit)
h_name_layout.addWidget(self.archive_type_cb)
v_option_layout.addLayout(h_name_layout)
h_path_layout = QtWidgets.QHBoxLayout()
h_path_layout.addWidget(self.path_lbl)
h_path_layout.addWidget(self.set_path_btn)
v_option_layout.addLayout(h_path_layout)
v_option_layout.addWidget(self.file_list)
h_remove_layout = QtWidgets.QHBoxLayout()
h_remove_layout.addWidget(self.add_folder_btn)
h_remove_layout.addWidget(self.add_file_btn)
h_remove_layout.addWidget(self.remove_selected_btn)
v_option_layout.addLayout(h_remove_layout)
option_layout.addLayout(v_option_layout, 0, 0, 1, 1)
self.option_widget.setLayout(option_layout)
v_main_layout.addWidget(self.option_widget)
v_main_layout.addWidget(self.progress_bar)
v_main_layout.addWidget(self.pack_btn)
main_layout.addLayout(v_main_layout, 0, 0, 1, 1)
self.setLayout(main_layout)
def pack_files(self):
if not self.name_edit.text():
return
if not self.file_list.get_quantity():
return
self.option_widget.setEnabled(False)
self.progress_bar.setMaximum(0)
name = self.name_edit.text() + self.archive_type_cb.itemData(self.
archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)
path = self.path_lbl.text()
list_index = self.file_list.get_index_list()
path_list = [self.file_model.filePath(index) for index in list_index]
if self.archive_type_cb.currentText() == '.zip':
self.pack_thread.set(pack_zip, name, path, path_list)
elif self.archive_type_cb.currentText() == '.tar':
self.pack_thread.set(pack_tar, name, path, path_list)
self.pack_thread.start()
<|reserved_special_token_0|>
def add_file(self):
file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,
'Wybierz plik', QtCore.QDir.homePath())
if file:
index = self.file_model.index(file)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def ended(self):
self.parent().trayIcon.showMessage('Zakonczono',
'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.
name), QtWidgets.QSystemTrayIcon.Information, 2000)
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
<|reserved_special_token_0|>
def progress(self, info):
print('info', info)
self.setWindowTitle(info)
def closeEvent(self, QCloseEvent):
if not self.pack_thread.ended:
QCloseEvent.ignore()
self.parent().catalog_list.setRootIndex(self.parent().catalog_list.
rootIndex())
self.parent().catalog_list.scrollTo(self.parent().catalog_list.
currentIndex())
self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CreateArchive(QtWidgets.QDialog):
<|reserved_special_token_0|>
def create_item(self, index):
path = os.path.abspath(self.file_model.filePath(index))
item = QtGui.QStandardItem(os.path.basename(path))
item.setIcon(self.file_model.fileIcon(index))
item.setCheckable(True)
item.setEditable(False)
return item
def create_components(self):
self.option_widget = QtWidgets.QWidget()
self.name_lbl = QtWidgets.QLabel('Nazwa')
self.name_edit = QtWidgets.QLineEdit('untitled')
self.name_edit.setMaxLength(30)
self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(
'\\w{30}'), self.name_edit))
self.archive_type_cb = QtWidgets.QComboBox()
self.archive_type_cb.addItem('.zip')
self.archive_type_cb.addItem('.tar')
self.path_lbl = QtWidgets.QLabel(self.path)
self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,
QtWidgets.QSizePolicy.Preferred)
self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)
self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.
set_path)
self.file_list = ListView('Pliki do zapakowania')
self.file_list.add_element(self.index)
self.file_list.add_to_model(self.create_item(self.index))
self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',
clicked=self.add_catalog)
self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=
self.add_file)
self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',
clicked=self.file_list.remove_selected)
self.progress_bar = QtWidgets.QProgressBar()
self.progress_bar.setMinimum(0)
self.progress_lbl = QtWidgets.QLabel()
self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.
pack_files)
def set_path(self):
path = QtWidgets.QFileDialog.getExistingDirectory(self,
'Wybierz katalog', QtCore.QDir.homePath())
if path:
self.path = path
self.path_lbl.setText(self.path)
def create_layout(self):
option_layout = QtWidgets.QGridLayout()
v_option_layout = QtWidgets.QVBoxLayout()
main_layout = QtWidgets.QGridLayout()
v_main_layout = QtWidgets.QVBoxLayout()
h_name_layout = QtWidgets.QHBoxLayout()
h_name_layout.addWidget(self.name_lbl)
h_name_layout.addWidget(self.name_edit)
h_name_layout.addWidget(self.archive_type_cb)
v_option_layout.addLayout(h_name_layout)
h_path_layout = QtWidgets.QHBoxLayout()
h_path_layout.addWidget(self.path_lbl)
h_path_layout.addWidget(self.set_path_btn)
v_option_layout.addLayout(h_path_layout)
v_option_layout.addWidget(self.file_list)
h_remove_layout = QtWidgets.QHBoxLayout()
h_remove_layout.addWidget(self.add_folder_btn)
h_remove_layout.addWidget(self.add_file_btn)
h_remove_layout.addWidget(self.remove_selected_btn)
v_option_layout.addLayout(h_remove_layout)
option_layout.addLayout(v_option_layout, 0, 0, 1, 1)
self.option_widget.setLayout(option_layout)
v_main_layout.addWidget(self.option_widget)
v_main_layout.addWidget(self.progress_bar)
v_main_layout.addWidget(self.pack_btn)
main_layout.addLayout(v_main_layout, 0, 0, 1, 1)
self.setLayout(main_layout)
def pack_files(self):
if not self.name_edit.text():
return
if not self.file_list.get_quantity():
return
self.option_widget.setEnabled(False)
self.progress_bar.setMaximum(0)
name = self.name_edit.text() + self.archive_type_cb.itemData(self.
archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)
path = self.path_lbl.text()
list_index = self.file_list.get_index_list()
path_list = [self.file_model.filePath(index) for index in list_index]
if self.archive_type_cb.currentText() == '.zip':
self.pack_thread.set(pack_zip, name, path, path_list)
elif self.archive_type_cb.currentText() == '.tar':
self.pack_thread.set(pack_tar, name, path, path_list)
self.pack_thread.start()
<|reserved_special_token_0|>
def add_file(self):
file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,
'Wybierz plik', QtCore.QDir.homePath())
if file:
index = self.file_model.index(file)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def ended(self):
self.parent().trayIcon.showMessage('Zakonczono',
'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.
name), QtWidgets.QSystemTrayIcon.Information, 2000)
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
<|reserved_special_token_0|>
def progress(self, info):
print('info', info)
self.setWindowTitle(info)
def closeEvent(self, QCloseEvent):
if not self.pack_thread.ended:
QCloseEvent.ignore()
self.parent().catalog_list.setRootIndex(self.parent().catalog_list.
rootIndex())
self.parent().catalog_list.scrollTo(self.parent().catalog_list.
currentIndex())
self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CreateArchive(QtWidgets.QDialog):
def __init__(self, model, index, path, parent=None):
super().__init__(parent)
self.setWindowTitle('Utworz archiwum')
self.setWindowModality(QtCore.Qt.WindowModal)
self.resize(350, 400)
self.path = path
self.file_model = model
self.index = index
self.create_components()
self.create_layout()
self.pack_thread = PackThread()
self.pack_thread.status_signal.connect(self.ended)
self.pack_thread.progress_signal.connect(self.progress)
self.pack_thread.access_signal.connect(self.access)
def create_item(self, index):
path = os.path.abspath(self.file_model.filePath(index))
item = QtGui.QStandardItem(os.path.basename(path))
item.setIcon(self.file_model.fileIcon(index))
item.setCheckable(True)
item.setEditable(False)
return item
def create_components(self):
self.option_widget = QtWidgets.QWidget()
self.name_lbl = QtWidgets.QLabel('Nazwa')
self.name_edit = QtWidgets.QLineEdit('untitled')
self.name_edit.setMaxLength(30)
self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(
'\\w{30}'), self.name_edit))
self.archive_type_cb = QtWidgets.QComboBox()
self.archive_type_cb.addItem('.zip')
self.archive_type_cb.addItem('.tar')
self.path_lbl = QtWidgets.QLabel(self.path)
self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,
QtWidgets.QSizePolicy.Preferred)
self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)
self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.
set_path)
self.file_list = ListView('Pliki do zapakowania')
self.file_list.add_element(self.index)
self.file_list.add_to_model(self.create_item(self.index))
self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',
clicked=self.add_catalog)
self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=
self.add_file)
self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',
clicked=self.file_list.remove_selected)
self.progress_bar = QtWidgets.QProgressBar()
self.progress_bar.setMinimum(0)
self.progress_lbl = QtWidgets.QLabel()
self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.
pack_files)
def set_path(self):
path = QtWidgets.QFileDialog.getExistingDirectory(self,
'Wybierz katalog', QtCore.QDir.homePath())
if path:
self.path = path
self.path_lbl.setText(self.path)
def create_layout(self):
option_layout = QtWidgets.QGridLayout()
v_option_layout = QtWidgets.QVBoxLayout()
main_layout = QtWidgets.QGridLayout()
v_main_layout = QtWidgets.QVBoxLayout()
h_name_layout = QtWidgets.QHBoxLayout()
h_name_layout.addWidget(self.name_lbl)
h_name_layout.addWidget(self.name_edit)
h_name_layout.addWidget(self.archive_type_cb)
v_option_layout.addLayout(h_name_layout)
h_path_layout = QtWidgets.QHBoxLayout()
h_path_layout.addWidget(self.path_lbl)
h_path_layout.addWidget(self.set_path_btn)
v_option_layout.addLayout(h_path_layout)
v_option_layout.addWidget(self.file_list)
h_remove_layout = QtWidgets.QHBoxLayout()
h_remove_layout.addWidget(self.add_folder_btn)
h_remove_layout.addWidget(self.add_file_btn)
h_remove_layout.addWidget(self.remove_selected_btn)
v_option_layout.addLayout(h_remove_layout)
option_layout.addLayout(v_option_layout, 0, 0, 1, 1)
self.option_widget.setLayout(option_layout)
v_main_layout.addWidget(self.option_widget)
v_main_layout.addWidget(self.progress_bar)
v_main_layout.addWidget(self.pack_btn)
main_layout.addLayout(v_main_layout, 0, 0, 1, 1)
self.setLayout(main_layout)
def pack_files(self):
if not self.name_edit.text():
return
if not self.file_list.get_quantity():
return
self.option_widget.setEnabled(False)
self.progress_bar.setMaximum(0)
name = self.name_edit.text() + self.archive_type_cb.itemData(self.
archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)
path = self.path_lbl.text()
list_index = self.file_list.get_index_list()
path_list = [self.file_model.filePath(index) for index in list_index]
if self.archive_type_cb.currentText() == '.zip':
self.pack_thread.set(pack_zip, name, path, path_list)
elif self.archive_type_cb.currentText() == '.tar':
self.pack_thread.set(pack_tar, name, path, path_list)
self.pack_thread.start()
def add_catalog(self):
catalog = QtWidgets.QFileDialog.getExistingDirectory(self,
'Wybierz katalog', QtCore.QDir.homePath())
if catalog and not QtCore.QFileInfo(catalog).isSymLink():
index = self.file_model.index(catalog)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def add_file(self):
file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,
'Wybierz plik', QtCore.QDir.homePath())
if file:
index = self.file_model.index(file)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def ended(self):
self.parent().trayIcon.showMessage('Zakonczono',
'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.
name), QtWidgets.QSystemTrayIcon.Information, 2000)
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def access(self):
self.setWindowTitle('Brak dostepu')
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def progress(self, info):
print('info', info)
self.setWindowTitle(info)
def closeEvent(self, QCloseEvent):
if not self.pack_thread.ended:
QCloseEvent.ignore()
self.parent().catalog_list.setRootIndex(self.parent().catalog_list.
rootIndex())
self.parent().catalog_list.scrollTo(self.parent().catalog_list.
currentIndex())
self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()
)
def pack_tar(thread, name, target_path, path_list):
tar_path = os.path.join(os.path.abspath(target_path), name)
try:
with tarfile.open(tar_path, 'w') as tar_file:
for file_path in path_list:
if not os.path.isdir(file_path):
thread.progress_signal.emit(file_path)
tar_file.add(file_path, arcname=os.path.basename(file_path)
)
else:
catalog_path = os.path.dirname(os.path.abspath(file_path))
for root_folder, subfolders, files in os.walk(file_path):
for file in files:
thread.in_progress_signal.emit(os.path.join(
root_folder, file))
tar_file.add(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(
catalog_path) + 1:], file))
except IOError:
thread.access_signal.emit()
def pack_zip(thread, name, target_path, path_list):
zip_path = os.path.join(os.path.abspath(target_path), name)
try:
with zipfile.ZipFile(zip_path, 'w') as zip_file:
for path_file in path_list:
if not os.path.isdir(path_file):
thread.progress_signal.emit(path_file)
zip_file.write(path_file, arcname=os.path.basename(
path_file))
else:
path_folder = os.path.dirname(os.path.abspath(path_file))
for root_folder, subfolders, files in os.walk(path_file):
for file in files:
thread.emit(os.path.join(root_folder, file))
zip_file.write(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(
path_folder) + 1:], file))
except IOError:
thread.access_signal.emit()
<|reserved_special_token_1|>
__author__ = 'piotrek'
<|reserved_special_token_0|>
class CreateArchive(QtWidgets.QDialog):
def __init__(self, model, index, path, parent=None):
super().__init__(parent)
self.setWindowTitle('Utworz archiwum')
self.setWindowModality(QtCore.Qt.WindowModal)
self.resize(350, 400)
self.path = path
self.file_model = model
self.index = index
self.create_components()
self.create_layout()
self.pack_thread = PackThread()
self.pack_thread.status_signal.connect(self.ended)
self.pack_thread.progress_signal.connect(self.progress)
self.pack_thread.access_signal.connect(self.access)
def create_item(self, index):
path = os.path.abspath(self.file_model.filePath(index))
item = QtGui.QStandardItem(os.path.basename(path))
item.setIcon(self.file_model.fileIcon(index))
item.setCheckable(True)
item.setEditable(False)
return item
def create_components(self):
self.option_widget = QtWidgets.QWidget()
self.name_lbl = QtWidgets.QLabel('Nazwa')
self.name_edit = QtWidgets.QLineEdit('untitled')
self.name_edit.setMaxLength(30)
self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(
'\\w{30}'), self.name_edit))
self.archive_type_cb = QtWidgets.QComboBox()
self.archive_type_cb.addItem('.zip')
self.archive_type_cb.addItem('.tar')
self.path_lbl = QtWidgets.QLabel(self.path)
self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,
QtWidgets.QSizePolicy.Preferred)
self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)
self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.
set_path)
self.file_list = ListView('Pliki do zapakowania')
self.file_list.add_element(self.index)
self.file_list.add_to_model(self.create_item(self.index))
self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',
clicked=self.add_catalog)
self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=
self.add_file)
self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',
clicked=self.file_list.remove_selected)
self.progress_bar = QtWidgets.QProgressBar()
self.progress_bar.setMinimum(0)
self.progress_lbl = QtWidgets.QLabel()
self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.
pack_files)
def set_path(self):
path = QtWidgets.QFileDialog.getExistingDirectory(self,
'Wybierz katalog', QtCore.QDir.homePath())
if path:
self.path = path
self.path_lbl.setText(self.path)
def create_layout(self):
option_layout = QtWidgets.QGridLayout()
v_option_layout = QtWidgets.QVBoxLayout()
main_layout = QtWidgets.QGridLayout()
v_main_layout = QtWidgets.QVBoxLayout()
h_name_layout = QtWidgets.QHBoxLayout()
h_name_layout.addWidget(self.name_lbl)
h_name_layout.addWidget(self.name_edit)
h_name_layout.addWidget(self.archive_type_cb)
v_option_layout.addLayout(h_name_layout)
h_path_layout = QtWidgets.QHBoxLayout()
h_path_layout.addWidget(self.path_lbl)
h_path_layout.addWidget(self.set_path_btn)
v_option_layout.addLayout(h_path_layout)
v_option_layout.addWidget(self.file_list)
h_remove_layout = QtWidgets.QHBoxLayout()
h_remove_layout.addWidget(self.add_folder_btn)
h_remove_layout.addWidget(self.add_file_btn)
h_remove_layout.addWidget(self.remove_selected_btn)
v_option_layout.addLayout(h_remove_layout)
option_layout.addLayout(v_option_layout, 0, 0, 1, 1)
self.option_widget.setLayout(option_layout)
v_main_layout.addWidget(self.option_widget)
v_main_layout.addWidget(self.progress_bar)
v_main_layout.addWidget(self.pack_btn)
main_layout.addLayout(v_main_layout, 0, 0, 1, 1)
self.setLayout(main_layout)
def pack_files(self):
if not self.name_edit.text():
return
if not self.file_list.get_quantity():
return
self.option_widget.setEnabled(False)
self.progress_bar.setMaximum(0)
name = self.name_edit.text() + self.archive_type_cb.itemData(self.
archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)
path = self.path_lbl.text()
list_index = self.file_list.get_index_list()
path_list = [self.file_model.filePath(index) for index in list_index]
if self.archive_type_cb.currentText() == '.zip':
self.pack_thread.set(pack_zip, name, path, path_list)
elif self.archive_type_cb.currentText() == '.tar':
self.pack_thread.set(pack_tar, name, path, path_list)
self.pack_thread.start()
def add_catalog(self):
catalog = QtWidgets.QFileDialog.getExistingDirectory(self,
'Wybierz katalog', QtCore.QDir.homePath())
if catalog and not QtCore.QFileInfo(catalog).isSymLink():
index = self.file_model.index(catalog)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def add_file(self):
file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,
'Wybierz plik', QtCore.QDir.homePath())
if file:
index = self.file_model.index(file)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def ended(self):
self.parent().trayIcon.showMessage('Zakonczono',
'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.
name), QtWidgets.QSystemTrayIcon.Information, 2000)
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def access(self):
self.setWindowTitle('Brak dostepu')
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def progress(self, info):
print('info', info)
self.setWindowTitle(info)
def closeEvent(self, QCloseEvent):
if not self.pack_thread.ended:
QCloseEvent.ignore()
self.parent().catalog_list.setRootIndex(self.parent().catalog_list.
rootIndex())
self.parent().catalog_list.scrollTo(self.parent().catalog_list.
currentIndex())
self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()
)
def pack_tar(thread, name, target_path, path_list):
tar_path = os.path.join(os.path.abspath(target_path), name)
try:
with tarfile.open(tar_path, 'w') as tar_file:
for file_path in path_list:
if not os.path.isdir(file_path):
thread.progress_signal.emit(file_path)
tar_file.add(file_path, arcname=os.path.basename(file_path)
)
else:
catalog_path = os.path.dirname(os.path.abspath(file_path))
for root_folder, subfolders, files in os.walk(file_path):
for file in files:
thread.in_progress_signal.emit(os.path.join(
root_folder, file))
tar_file.add(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(
catalog_path) + 1:], file))
except IOError:
thread.access_signal.emit()
def pack_zip(thread, name, target_path, path_list):
zip_path = os.path.join(os.path.abspath(target_path), name)
try:
with zipfile.ZipFile(zip_path, 'w') as zip_file:
for path_file in path_list:
if not os.path.isdir(path_file):
thread.progress_signal.emit(path_file)
zip_file.write(path_file, arcname=os.path.basename(
path_file))
else:
path_folder = os.path.dirname(os.path.abspath(path_file))
for root_folder, subfolders, files in os.walk(path_file):
for file in files:
thread.emit(os.path.join(root_folder, file))
zip_file.write(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(
path_folder) + 1:], file))
except IOError:
thread.access_signal.emit()
<|reserved_special_token_1|>
__author__ = 'piotrek'
import os
import zipfile
import tarfile
from PyQt5 import QtWidgets
from PyQt5 import QtGui
from PyQt5 import QtCore
from Widgets.list_view import ListView
from Threads.PackThread import PackThread
class CreateArchive(QtWidgets.QDialog):
def __init__(self, model, index, path, parent=None):
super().__init__(parent)
self.setWindowTitle('Utworz archiwum')
self.setWindowModality(QtCore.Qt.WindowModal)
self.resize(350, 400)
self.path = path
self.file_model = model
self.index = index
self.create_components()
self.create_layout()
self.pack_thread = PackThread()
self.pack_thread.status_signal.connect(self.ended)
self.pack_thread.progress_signal.connect(self.progress)
self.pack_thread.access_signal.connect(self.access)
def create_item(self, index):
path = os.path.abspath(self.file_model.filePath(index))
item = QtGui.QStandardItem(os.path.basename(path))
item.setIcon(self.file_model.fileIcon(index))
item.setCheckable(True)
item.setEditable(False)
return item
def create_components(self):
self.option_widget = QtWidgets.QWidget()
self.name_lbl = QtWidgets.QLabel('Nazwa')
self.name_edit = QtWidgets.QLineEdit('untitled')
self.name_edit.setMaxLength(30)
self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp('\w{30}'), self.name_edit))
self.archive_type_cb = QtWidgets.QComboBox()
self.archive_type_cb.addItem('.zip')
self.archive_type_cb.addItem('.tar')
self.path_lbl = QtWidgets.QLabel(self.path)
self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)
self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.set_path)
self.file_list = ListView('Pliki do zapakowania')
self.file_list.add_element(self.index)
self.file_list.add_to_model(self.create_item(self.index))
self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog', clicked=self.add_catalog)
self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=self.add_file)
self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone', clicked=self.file_list.remove_selected)
self.progress_bar = QtWidgets.QProgressBar()
self.progress_bar.setMinimum(0)
self.progress_lbl = QtWidgets.QLabel()
self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.pack_files)
def set_path(self):
path = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())
if path:
self.path = path
self.path_lbl.setText(self.path)
def create_layout(self):
option_layout = QtWidgets.QGridLayout()
v_option_layout = QtWidgets.QVBoxLayout()
main_layout = QtWidgets.QGridLayout()
v_main_layout = QtWidgets.QVBoxLayout()
h_name_layout = QtWidgets.QHBoxLayout()
h_name_layout.addWidget(self.name_lbl)
h_name_layout.addWidget(self.name_edit)
h_name_layout.addWidget(self.archive_type_cb)
v_option_layout.addLayout(h_name_layout)
h_path_layout = QtWidgets.QHBoxLayout()
h_path_layout.addWidget(self.path_lbl)
h_path_layout.addWidget(self.set_path_btn)
v_option_layout.addLayout(h_path_layout)
v_option_layout.addWidget(self.file_list)
h_remove_layout = QtWidgets.QHBoxLayout()
h_remove_layout.addWidget(self.add_folder_btn)
h_remove_layout.addWidget(self.add_file_btn)
h_remove_layout.addWidget(self.remove_selected_btn)
v_option_layout.addLayout(h_remove_layout)
option_layout.addLayout(v_option_layout, 0, 0, 1, 1)
self.option_widget.setLayout(option_layout)
v_main_layout.addWidget(self.option_widget)
v_main_layout.addWidget(self.progress_bar)
v_main_layout.addWidget(self.pack_btn)
main_layout.addLayout(v_main_layout, 0, 0, 1, 1)
self.setLayout(main_layout)
def pack_files(self):
if not self.name_edit.text():
return
if not self.file_list.get_quantity():
return
self.option_widget.setEnabled(False)
self.progress_bar.setMaximum(0)
name = self.name_edit.text() + self.archive_type_cb.itemData(self.archive_type_cb.currentIndex(),
QtCore.Qt.DisplayRole)
path = self.path_lbl.text()
list_index = self.file_list.get_index_list()
path_list = [self.file_model.filePath(index) for index in list_index]
if self.archive_type_cb.currentText() == '.zip':
self.pack_thread.set(pack_zip, name, path, path_list)
elif self.archive_type_cb.currentText() == '.tar':
self.pack_thread.set(pack_tar, name, path, path_list)
self.pack_thread.start()
def add_catalog(self):
catalog = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())
if catalog and not QtCore.QFileInfo(catalog).isSymLink():
index = self.file_model.index(catalog)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def add_file(self):
file, _filter = QtWidgets.QFileDialog.getOpenFileName(self, 'Wybierz plik', QtCore.QDir.homePath())
if file:
index = self.file_model.index(file)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def ended(self):
self.parent().trayIcon.showMessage('Zakonczono',
'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.name),
QtWidgets.QSystemTrayIcon.Information, 2000)
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def access(self):
self.setWindowTitle('Brak dostepu')
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def progress(self, info):
print('info', info) # remove
self.setWindowTitle(info)
def closeEvent(self, QCloseEvent):
if not self.pack_thread.ended:
QCloseEvent.ignore()
self.parent().catalog_list.setRootIndex(self.parent().catalog_list.rootIndex())
self.parent().catalog_list.scrollTo(self.parent().catalog_list.currentIndex())
self.parent().model_list.refresh(self.parent().catalog_list.rootIndex())
def pack_tar(thread, name, target_path, path_list):
tar_path = os.path.join(os.path.abspath(target_path), name)
try:
with tarfile.open(tar_path, 'w') as tar_file:
for file_path in path_list:
if not os.path.isdir(file_path):
thread.progress_signal.emit(file_path)
tar_file.add(file_path, arcname=os.path.basename(file_path))
else:
catalog_path = os.path.dirname(os.path.abspath(file_path))
for root_folder, subfolders, files in os.walk(file_path):
for file in files:
thread.in_progress_signal.emit(os.path.join(root_folder, file))
tar_file.add(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(catalog_path) + 1:], file))
except IOError:
thread.access_signal.emit()
def pack_zip(thread, name, target_path, path_list):
zip_path = os.path.join(os.path.abspath(target_path), name)
try:
with zipfile.ZipFile(zip_path, 'w') as zip_file:
for path_file in path_list:
if not os.path.isdir(path_file):
thread.progress_signal.emit(path_file)
zip_file.write(path_file, arcname=os.path.basename(path_file))
else:
path_folder = os.path.dirname(os.path.abspath(path_file))
for root_folder, subfolders, files in os.walk(path_file):
for file in files:
thread.emit(os.path.join(root_folder, file))
zip_file.write(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(path_folder) + 1:], file))
except IOError:
thread.access_signal.emit()
|
flexible
|
{
"blob_id": "7a41826f65f2f55b4c678df2ac06027df6ca50d4",
"index": 3623,
"step-1": "<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n <mask token>\n <mask token>\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n <mask token>\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n <mask token>\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n <mask token>\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n <mask token>\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n <mask token>\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n\n def __init__(self, model, index, path, parent=None):\n super().__init__(parent)\n self.setWindowTitle('Utworz archiwum')\n self.setWindowModality(QtCore.Qt.WindowModal)\n self.resize(350, 400)\n self.path = path\n self.file_model = model\n self.index = index\n self.create_components()\n self.create_layout()\n self.pack_thread = PackThread()\n self.pack_thread.status_signal.connect(self.ended)\n self.pack_thread.progress_signal.connect(self.progress)\n self.pack_thread.access_signal.connect(self.access)\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n\n def add_catalog(self):\n catalog = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if catalog and not QtCore.QFileInfo(catalog).isSymLink():\n index = self.file_model.index(catalog)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def access(self):\n self.setWindowTitle('Brak dostepu')\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\ndef pack_tar(thread, name, target_path, path_list):\n tar_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with tarfile.open(tar_path, 'w') as tar_file:\n for file_path in path_list:\n if not os.path.isdir(file_path):\n thread.progress_signal.emit(file_path)\n tar_file.add(file_path, arcname=os.path.basename(file_path)\n )\n else:\n catalog_path = os.path.dirname(os.path.abspath(file_path))\n for root_folder, subfolders, files in os.walk(file_path):\n for file in files:\n thread.in_progress_signal.emit(os.path.join(\n root_folder, file))\n tar_file.add(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n catalog_path) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n\n\ndef pack_zip(thread, name, target_path, path_list):\n zip_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with zipfile.ZipFile(zip_path, 'w') as zip_file:\n for path_file in path_list:\n if not os.path.isdir(path_file):\n thread.progress_signal.emit(path_file)\n zip_file.write(path_file, arcname=os.path.basename(\n path_file))\n else:\n path_folder = os.path.dirname(os.path.abspath(path_file))\n for root_folder, subfolders, files in os.walk(path_file):\n for file in files:\n thread.emit(os.path.join(root_folder, file))\n zip_file.write(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n path_folder) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n",
"step-4": "__author__ = 'piotrek'\n<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n\n def __init__(self, model, index, path, parent=None):\n super().__init__(parent)\n self.setWindowTitle('Utworz archiwum')\n self.setWindowModality(QtCore.Qt.WindowModal)\n self.resize(350, 400)\n self.path = path\n self.file_model = model\n self.index = index\n self.create_components()\n self.create_layout()\n self.pack_thread = PackThread()\n self.pack_thread.status_signal.connect(self.ended)\n self.pack_thread.progress_signal.connect(self.progress)\n self.pack_thread.access_signal.connect(self.access)\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n\n def add_catalog(self):\n catalog = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if catalog and not QtCore.QFileInfo(catalog).isSymLink():\n index = self.file_model.index(catalog)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def access(self):\n self.setWindowTitle('Brak dostepu')\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\ndef pack_tar(thread, name, target_path, path_list):\n tar_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with tarfile.open(tar_path, 'w') as tar_file:\n for file_path in path_list:\n if not os.path.isdir(file_path):\n thread.progress_signal.emit(file_path)\n tar_file.add(file_path, arcname=os.path.basename(file_path)\n )\n else:\n catalog_path = os.path.dirname(os.path.abspath(file_path))\n for root_folder, subfolders, files in os.walk(file_path):\n for file in files:\n thread.in_progress_signal.emit(os.path.join(\n root_folder, file))\n tar_file.add(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n catalog_path) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n\n\ndef pack_zip(thread, name, target_path, path_list):\n zip_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with zipfile.ZipFile(zip_path, 'w') as zip_file:\n for path_file in path_list:\n if not os.path.isdir(path_file):\n thread.progress_signal.emit(path_file)\n zip_file.write(path_file, arcname=os.path.basename(\n path_file))\n else:\n path_folder = os.path.dirname(os.path.abspath(path_file))\n for root_folder, subfolders, files in os.walk(path_file):\n for file in files:\n thread.emit(os.path.join(root_folder, file))\n zip_file.write(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n path_folder) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n",
"step-5": "__author__ = 'piotrek'\n\nimport os\nimport zipfile\nimport tarfile\n\nfrom PyQt5 import QtWidgets\nfrom PyQt5 import QtGui\nfrom PyQt5 import QtCore\n\nfrom Widgets.list_view import ListView\nfrom Threads.PackThread import PackThread\n\n\nclass CreateArchive(QtWidgets.QDialog):\n\n def __init__(self, model, index, path, parent=None):\n super().__init__(parent)\n self.setWindowTitle('Utworz archiwum')\n self.setWindowModality(QtCore.Qt.WindowModal)\n self.resize(350, 400)\n\n self.path = path\n self.file_model = model\n self.index = index\n\n self.create_components()\n self.create_layout()\n\n self.pack_thread = PackThread()\n self.pack_thread.status_signal.connect(self.ended)\n self.pack_thread.progress_signal.connect(self.progress)\n self.pack_thread.access_signal.connect(self.access)\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp('\\w{30}'), self.name_edit))\n\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.set_path)\n\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog', clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone', clicked=self.file_list.remove_selected)\n\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n\n self.progress_lbl = QtWidgets.QLabel()\n\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())\n\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n\n v_option_layout.addLayout(h_name_layout)\n\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n\n v_option_layout.addLayout(h_path_layout)\n\n v_option_layout.addWidget(self.file_list)\n\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n\n v_option_layout.addLayout(h_remove_layout)\n\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n\n self.option_widget.setLayout(option_layout)\n\n v_main_layout.addWidget(self.option_widget)\n\n v_main_layout.addWidget(self.progress_bar)\n\n v_main_layout.addWidget(self.pack_btn)\n\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.archive_type_cb.currentIndex(),\n QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n\n path_list = [self.file_model.filePath(index) for index in list_index]\n\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n\n self.pack_thread.start()\n\n def add_catalog(self):\n catalog = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())\n\n if catalog and not QtCore.QFileInfo(catalog).isSymLink():\n index = self.file_model.index(catalog)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self, 'Wybierz plik', QtCore.QDir.homePath())\n\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.name),\n QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def access(self):\n self.setWindowTitle('Brak dostepu')\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def progress(self, info):\n print('info', info) # remove\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex())\n\n\ndef pack_tar(thread, name, target_path, path_list):\n tar_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with tarfile.open(tar_path, 'w') as tar_file:\n for file_path in path_list:\n if not os.path.isdir(file_path):\n thread.progress_signal.emit(file_path)\n tar_file.add(file_path, arcname=os.path.basename(file_path))\n else:\n catalog_path = os.path.dirname(os.path.abspath(file_path))\n for root_folder, subfolders, files in os.walk(file_path):\n for file in files:\n thread.in_progress_signal.emit(os.path.join(root_folder, file))\n tar_file.add(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(catalog_path) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n\n\ndef pack_zip(thread, name, target_path, path_list):\n zip_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with zipfile.ZipFile(zip_path, 'w') as zip_file:\n for path_file in path_list:\n if not os.path.isdir(path_file):\n thread.progress_signal.emit(path_file)\n zip_file.write(path_file, arcname=os.path.basename(path_file))\n else:\n path_folder = os.path.dirname(os.path.abspath(path_file))\n for root_folder, subfolders, files in os.walk(path_file):\n for file in files:\n thread.emit(os.path.join(root_folder, file))\n zip_file.write(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(path_folder) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n",
"step-ids": [
9,
10,
15,
16,
18
]
}
|
[
9,
10,
15,
16,
18
] |
<|reserved_special_token_0|>
def get_youtube_handler():
"""Return the API Youtube object."""
options = {}
home = os.path.expanduser('~')
default_credentials = os.path.join(home, '.youtube-upload-credentials.json'
)
client_secrets = os.path.join(home, '.client_secrets.json')
credentials = default_credentials
debug('Using client secrets: {0}'.format(client_secrets))
debug('Using credentials file: {0}'.format(credentials))
get_code_callback = auth.browser.get_code
return auth.get_resource(client_secrets, credentials, get_code_callback
=get_code_callback)
<|reserved_special_token_0|>
def add_video_to_existing_playlist(youtube, playlist_id, video_id):
"""Add video to playlist (by identifier) and return the playlist ID."""
lib.debug('Adding video to playlist: {0}'.format(playlist_id))
return youtube.playlistItems().insert(part='snippet', body={'snippet':
{'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',
'videoId': video_id}}}).execute()
def add_video_to_playlist(youtube, args, privacy='public'):
"""Add video to playlist (by title) and return the full response."""
video_id = args['video_id']
playlist_id = args['playlist_id']
print(video_id)
if playlist_id:
return add_video_to_existing_playlist(youtube, playlist_id, video_id)
else:
lib.debug('Error adding video to playlist')
def main(args):
args = args
youtube = get_youtube_handler()
try:
if youtube:
add_video_to_playlist(youtube, args)
except HttpError as e:
print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
print('Tag "%s" was added to video id "%s".' % (args.add_tag, args.
video_id))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_youtube_handler():
"""Return the API Youtube object."""
options = {}
home = os.path.expanduser('~')
default_credentials = os.path.join(home, '.youtube-upload-credentials.json'
)
client_secrets = os.path.join(home, '.client_secrets.json')
credentials = default_credentials
debug('Using client secrets: {0}'.format(client_secrets))
debug('Using credentials file: {0}'.format(credentials))
get_code_callback = auth.browser.get_code
return auth.get_resource(client_secrets, credentials, get_code_callback
=get_code_callback)
<|reserved_special_token_0|>
def add_video_to_existing_playlist(youtube, playlist_id, video_id):
"""Add video to playlist (by identifier) and return the playlist ID."""
lib.debug('Adding video to playlist: {0}'.format(playlist_id))
return youtube.playlistItems().insert(part='snippet', body={'snippet':
{'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',
'videoId': video_id}}}).execute()
def add_video_to_playlist(youtube, args, privacy='public'):
"""Add video to playlist (by title) and return the full response."""
video_id = args['video_id']
playlist_id = args['playlist_id']
print(video_id)
if playlist_id:
return add_video_to_existing_playlist(youtube, playlist_id, video_id)
else:
lib.debug('Error adding video to playlist')
def main(args):
args = args
youtube = get_youtube_handler()
try:
if youtube:
add_video_to_playlist(youtube, args)
except HttpError as e:
print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
print('Tag "%s" was added to video id "%s".' % (args.add_tag, args.
video_id))
def run():
titles = [title.strip('\n') for title in open('update_playlist.txt', 'r')]
playlist_id = 'PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e'
for title in titles:
aa_id = title.split('|||')[0]
new_test = {'video_id': aa_id, 'playlist_id': playlist_id}
main(new_test)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
debug = lib.debug
def get_youtube_handler():
"""Return the API Youtube object."""
options = {}
home = os.path.expanduser('~')
default_credentials = os.path.join(home, '.youtube-upload-credentials.json'
)
client_secrets = os.path.join(home, '.client_secrets.json')
credentials = default_credentials
debug('Using client secrets: {0}'.format(client_secrets))
debug('Using credentials file: {0}'.format(credentials))
get_code_callback = auth.browser.get_code
return auth.get_resource(client_secrets, credentials, get_code_callback
=get_code_callback)
<|reserved_special_token_0|>
def add_video_to_existing_playlist(youtube, playlist_id, video_id):
"""Add video to playlist (by identifier) and return the playlist ID."""
lib.debug('Adding video to playlist: {0}'.format(playlist_id))
return youtube.playlistItems().insert(part='snippet', body={'snippet':
{'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',
'videoId': video_id}}}).execute()
def add_video_to_playlist(youtube, args, privacy='public'):
"""Add video to playlist (by title) and return the full response."""
video_id = args['video_id']
playlist_id = args['playlist_id']
print(video_id)
if playlist_id:
return add_video_to_existing_playlist(youtube, playlist_id, video_id)
else:
lib.debug('Error adding video to playlist')
def main(args):
args = args
youtube = get_youtube_handler()
try:
if youtube:
add_video_to_playlist(youtube, args)
except HttpError as e:
print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
print('Tag "%s" was added to video id "%s".' % (args.add_tag, args.
video_id))
def run():
titles = [title.strip('\n') for title in open('update_playlist.txt', 'r')]
playlist_id = 'PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e'
for title in titles:
aa_id = title.split('|||')[0]
new_test = {'video_id': aa_id, 'playlist_id': playlist_id}
main(new_test)
<|reserved_special_token_1|>
import os
import sys
import googleapiclient.errors
import oauth2client
from googleapiclient.errors import HttpError
from . import auth
from . import lib
debug = lib.debug
def get_youtube_handler():
"""Return the API Youtube object."""
options = {}
home = os.path.expanduser('~')
default_credentials = os.path.join(home, '.youtube-upload-credentials.json'
)
client_secrets = os.path.join(home, '.client_secrets.json')
credentials = default_credentials
debug('Using client secrets: {0}'.format(client_secrets))
debug('Using credentials file: {0}'.format(credentials))
get_code_callback = auth.browser.get_code
return auth.get_resource(client_secrets, credentials, get_code_callback
=get_code_callback)
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
def add_video_to_existing_playlist(youtube, playlist_id, video_id):
"""Add video to playlist (by identifier) and return the playlist ID."""
lib.debug('Adding video to playlist: {0}'.format(playlist_id))
return youtube.playlistItems().insert(part='snippet', body={'snippet':
{'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',
'videoId': video_id}}}).execute()
def add_video_to_playlist(youtube, args, privacy='public'):
"""Add video to playlist (by title) and return the full response."""
video_id = args['video_id']
playlist_id = args['playlist_id']
print(video_id)
if playlist_id:
return add_video_to_existing_playlist(youtube, playlist_id, video_id)
else:
lib.debug('Error adding video to playlist')
def main(args):
args = args
youtube = get_youtube_handler()
try:
if youtube:
add_video_to_playlist(youtube, args)
except HttpError as e:
print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
print('Tag "%s" was added to video id "%s".' % (args.add_tag, args.
video_id))
def run():
titles = [title.strip('\n') for title in open('update_playlist.txt', 'r')]
playlist_id = 'PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e'
for title in titles:
aa_id = title.split('|||')[0]
new_test = {'video_id': aa_id, 'playlist_id': playlist_id}
main(new_test)
<|reserved_special_token_1|>
#!/usr/bin/env python
import os
import sys
#from io import open
import googleapiclient.errors
import oauth2client
from googleapiclient.errors import HttpError
from . import auth
from . import lib
debug = lib.debug
# modified start
def get_youtube_handler():
"""Return the API Youtube object."""
options = {}
home = os.path.expanduser("~")
default_credentials = os.path.join(home, ".youtube-upload-credentials.json")
#client_secrets = options.client_secrets or os.path.join(home, ".client_secrets.json")
#credentials = options.credentials_file or default_credentials
client_secrets = os.path.join(home, ".client_secrets.json")
credentials = default_credentials
debug("Using client secrets: {0}".format(client_secrets))
debug("Using credentials file: {0}".format(credentials))
#get_code_callback = (auth.browser.get_code
#if options.auth_browser else auth.console.get_code)
get_code_callback = auth.browser.get_code
return auth.get_resource(client_secrets, credentials,
get_code_callback=get_code_callback)
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
def add_video_to_existing_playlist(youtube, playlist_id, video_id):
"""Add video to playlist (by identifier) and return the playlist ID."""
lib.debug("Adding video to playlist: {0}".format(playlist_id))
return youtube.playlistItems().insert(part="snippet", body={
"snippet": {
"playlistId": playlist_id,
"resourceId": {
"kind": "youtube#video",
"videoId": video_id,
}
}
}).execute()
def add_video_to_playlist(youtube, args, privacy="public"):
"""Add video to playlist (by title) and return the full response."""
video_id = args['video_id']
playlist_id = args['playlist_id']
print(video_id)
#print(type(args))
if playlist_id:
return add_video_to_existing_playlist(youtube, playlist_id, video_id)
else:
lib.debug("Error adding video to playlist")
def main(args):
#print(args)
args = args
#print(args)
youtube = get_youtube_handler()
try:
if youtube:
add_video_to_playlist(youtube, args)
except HttpError as e:
print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
print('Tag "%s" was added to video id "%s".' % (args.add_tag, args.video_id))
def run():
titles = [title.strip('\n') for title in open('update_playlist.txt', 'r')]
playlist_id = "PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e"
for title in titles:
#print(title.split('|||')[0])
aa_id = title.split('|||')[0]
new_test = {'video_id':aa_id,
'playlist_id':playlist_id
}
main(
new_test
)
# modified end
|
flexible
|
{
"blob_id": "65d08fe1a3f6e5cc2458209706307513d808bdb2",
"index": 3824,
"step-1": "<mask token>\n\n\ndef get_youtube_handler():\n \"\"\"Return the API Youtube object.\"\"\"\n options = {}\n home = os.path.expanduser('~')\n default_credentials = os.path.join(home, '.youtube-upload-credentials.json'\n )\n client_secrets = os.path.join(home, '.client_secrets.json')\n credentials = default_credentials\n debug('Using client secrets: {0}'.format(client_secrets))\n debug('Using credentials file: {0}'.format(credentials))\n get_code_callback = auth.browser.get_code\n return auth.get_resource(client_secrets, credentials, get_code_callback\n =get_code_callback)\n\n\n<mask token>\n\n\ndef add_video_to_existing_playlist(youtube, playlist_id, video_id):\n \"\"\"Add video to playlist (by identifier) and return the playlist ID.\"\"\"\n lib.debug('Adding video to playlist: {0}'.format(playlist_id))\n return youtube.playlistItems().insert(part='snippet', body={'snippet':\n {'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',\n 'videoId': video_id}}}).execute()\n\n\ndef add_video_to_playlist(youtube, args, privacy='public'):\n \"\"\"Add video to playlist (by title) and return the full response.\"\"\"\n video_id = args['video_id']\n playlist_id = args['playlist_id']\n print(video_id)\n if playlist_id:\n return add_video_to_existing_playlist(youtube, playlist_id, video_id)\n else:\n lib.debug('Error adding video to playlist')\n\n\ndef main(args):\n args = args\n youtube = get_youtube_handler()\n try:\n if youtube:\n add_video_to_playlist(youtube, args)\n except HttpError as e:\n print('An HTTP error %d occurred:\\n%s' % (e.resp.status, e.content))\n print('Tag \"%s\" was added to video id \"%s\".' % (args.add_tag, args.\n video_id))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_youtube_handler():\n \"\"\"Return the API Youtube object.\"\"\"\n options = {}\n home = os.path.expanduser('~')\n default_credentials = os.path.join(home, '.youtube-upload-credentials.json'\n )\n client_secrets = os.path.join(home, '.client_secrets.json')\n credentials = default_credentials\n debug('Using client secrets: {0}'.format(client_secrets))\n debug('Using credentials file: {0}'.format(credentials))\n get_code_callback = auth.browser.get_code\n return auth.get_resource(client_secrets, credentials, get_code_callback\n =get_code_callback)\n\n\n<mask token>\n\n\ndef add_video_to_existing_playlist(youtube, playlist_id, video_id):\n \"\"\"Add video to playlist (by identifier) and return the playlist ID.\"\"\"\n lib.debug('Adding video to playlist: {0}'.format(playlist_id))\n return youtube.playlistItems().insert(part='snippet', body={'snippet':\n {'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',\n 'videoId': video_id}}}).execute()\n\n\ndef add_video_to_playlist(youtube, args, privacy='public'):\n \"\"\"Add video to playlist (by title) and return the full response.\"\"\"\n video_id = args['video_id']\n playlist_id = args['playlist_id']\n print(video_id)\n if playlist_id:\n return add_video_to_existing_playlist(youtube, playlist_id, video_id)\n else:\n lib.debug('Error adding video to playlist')\n\n\ndef main(args):\n args = args\n youtube = get_youtube_handler()\n try:\n if youtube:\n add_video_to_playlist(youtube, args)\n except HttpError as e:\n print('An HTTP error %d occurred:\\n%s' % (e.resp.status, e.content))\n print('Tag \"%s\" was added to video id \"%s\".' % (args.add_tag, args.\n video_id))\n\n\ndef run():\n titles = [title.strip('\\n') for title in open('update_playlist.txt', 'r')]\n playlist_id = 'PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e'\n for title in titles:\n aa_id = title.split('|||')[0]\n new_test = {'video_id': aa_id, 'playlist_id': playlist_id}\n main(new_test)\n",
"step-3": "<mask token>\ndebug = lib.debug\n\n\ndef get_youtube_handler():\n \"\"\"Return the API Youtube object.\"\"\"\n options = {}\n home = os.path.expanduser('~')\n default_credentials = os.path.join(home, '.youtube-upload-credentials.json'\n )\n client_secrets = os.path.join(home, '.client_secrets.json')\n credentials = default_credentials\n debug('Using client secrets: {0}'.format(client_secrets))\n debug('Using credentials file: {0}'.format(credentials))\n get_code_callback = auth.browser.get_code\n return auth.get_resource(client_secrets, credentials, get_code_callback\n =get_code_callback)\n\n\n<mask token>\n\n\ndef add_video_to_existing_playlist(youtube, playlist_id, video_id):\n \"\"\"Add video to playlist (by identifier) and return the playlist ID.\"\"\"\n lib.debug('Adding video to playlist: {0}'.format(playlist_id))\n return youtube.playlistItems().insert(part='snippet', body={'snippet':\n {'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',\n 'videoId': video_id}}}).execute()\n\n\ndef add_video_to_playlist(youtube, args, privacy='public'):\n \"\"\"Add video to playlist (by title) and return the full response.\"\"\"\n video_id = args['video_id']\n playlist_id = args['playlist_id']\n print(video_id)\n if playlist_id:\n return add_video_to_existing_playlist(youtube, playlist_id, video_id)\n else:\n lib.debug('Error adding video to playlist')\n\n\ndef main(args):\n args = args\n youtube = get_youtube_handler()\n try:\n if youtube:\n add_video_to_playlist(youtube, args)\n except HttpError as e:\n print('An HTTP error %d occurred:\\n%s' % (e.resp.status, e.content))\n print('Tag \"%s\" was added to video id \"%s\".' % (args.add_tag, args.\n video_id))\n\n\ndef run():\n titles = [title.strip('\\n') for title in open('update_playlist.txt', 'r')]\n playlist_id = 'PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e'\n for title in titles:\n aa_id = title.split('|||')[0]\n new_test = {'video_id': aa_id, 'playlist_id': playlist_id}\n main(new_test)\n",
"step-4": "import os\nimport sys\nimport googleapiclient.errors\nimport oauth2client\nfrom googleapiclient.errors import HttpError\nfrom . import auth\nfrom . import lib\ndebug = lib.debug\n\n\ndef get_youtube_handler():\n \"\"\"Return the API Youtube object.\"\"\"\n options = {}\n home = os.path.expanduser('~')\n default_credentials = os.path.join(home, '.youtube-upload-credentials.json'\n )\n client_secrets = os.path.join(home, '.client_secrets.json')\n credentials = default_credentials\n debug('Using client secrets: {0}'.format(client_secrets))\n debug('Using credentials file: {0}'.format(credentials))\n get_code_callback = auth.browser.get_code\n return auth.get_resource(client_secrets, credentials, get_code_callback\n =get_code_callback)\n\n\nfrom apiclient import discovery\nfrom oauth2client import client\nfrom oauth2client import tools\nfrom oauth2client.file import Storage\n\n\ndef add_video_to_existing_playlist(youtube, playlist_id, video_id):\n \"\"\"Add video to playlist (by identifier) and return the playlist ID.\"\"\"\n lib.debug('Adding video to playlist: {0}'.format(playlist_id))\n return youtube.playlistItems().insert(part='snippet', body={'snippet':\n {'playlistId': playlist_id, 'resourceId': {'kind': 'youtube#video',\n 'videoId': video_id}}}).execute()\n\n\ndef add_video_to_playlist(youtube, args, privacy='public'):\n \"\"\"Add video to playlist (by title) and return the full response.\"\"\"\n video_id = args['video_id']\n playlist_id = args['playlist_id']\n print(video_id)\n if playlist_id:\n return add_video_to_existing_playlist(youtube, playlist_id, video_id)\n else:\n lib.debug('Error adding video to playlist')\n\n\ndef main(args):\n args = args\n youtube = get_youtube_handler()\n try:\n if youtube:\n add_video_to_playlist(youtube, args)\n except HttpError as e:\n print('An HTTP error %d occurred:\\n%s' % (e.resp.status, e.content))\n print('Tag \"%s\" was added to video id \"%s\".' % (args.add_tag, args.\n video_id))\n\n\ndef run():\n titles = [title.strip('\\n') for title in open('update_playlist.txt', 'r')]\n playlist_id = 'PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e'\n for title in titles:\n aa_id = title.split('|||')[0]\n new_test = {'video_id': aa_id, 'playlist_id': playlist_id}\n main(new_test)\n",
"step-5": "#!/usr/bin/env python\n\n\nimport os\nimport sys\n#from io import open\n\nimport googleapiclient.errors\nimport oauth2client\nfrom googleapiclient.errors import HttpError\n\nfrom . import auth\nfrom . import lib\n\n\ndebug = lib.debug\n\n\n# modified start \ndef get_youtube_handler():\n \"\"\"Return the API Youtube object.\"\"\"\n options = {}\n home = os.path.expanduser(\"~\")\n default_credentials = os.path.join(home, \".youtube-upload-credentials.json\")\n #client_secrets = options.client_secrets or os.path.join(home, \".client_secrets.json\")\n #credentials = options.credentials_file or default_credentials\n client_secrets = os.path.join(home, \".client_secrets.json\")\n credentials = default_credentials \n debug(\"Using client secrets: {0}\".format(client_secrets))\n debug(\"Using credentials file: {0}\".format(credentials))\n #get_code_callback = (auth.browser.get_code\n #if options.auth_browser else auth.console.get_code)\n get_code_callback = auth.browser.get_code\n return auth.get_resource(client_secrets, credentials,\n get_code_callback=get_code_callback)\n\nfrom apiclient import discovery\nfrom oauth2client import client\nfrom oauth2client import tools\nfrom oauth2client.file import Storage \n \ndef add_video_to_existing_playlist(youtube, playlist_id, video_id):\n \"\"\"Add video to playlist (by identifier) and return the playlist ID.\"\"\"\n lib.debug(\"Adding video to playlist: {0}\".format(playlist_id))\n return youtube.playlistItems().insert(part=\"snippet\", body={\n \"snippet\": {\n \"playlistId\": playlist_id,\n \"resourceId\": {\n \"kind\": \"youtube#video\",\n \"videoId\": video_id,\n }\n }\n }).execute()\n \ndef add_video_to_playlist(youtube, args, privacy=\"public\"):\n \"\"\"Add video to playlist (by title) and return the full response.\"\"\"\n video_id = args['video_id']\n playlist_id = args['playlist_id']\n \n print(video_id)\n #print(type(args))\n \n if playlist_id:\n return add_video_to_existing_playlist(youtube, playlist_id, video_id)\n else:\n lib.debug(\"Error adding video to playlist\") \n\t \n \n\n\ndef main(args):\n #print(args)\n \n args = args\n #print(args)\n youtube = get_youtube_handler()\n \n try:\n if youtube:\n add_video_to_playlist(youtube, args)\n except HttpError as e:\n print('An HTTP error %d occurred:\\n%s' % (e.resp.status, e.content))\n print('Tag \"%s\" was added to video id \"%s\".' % (args.add_tag, args.video_id))\n \n\ndef run():\n titles = [title.strip('\\n') for title in open('update_playlist.txt', 'r')]\n \n playlist_id = \"PLANgBzSjRA6PD-hnW8--eK61w5GTtH_8e\"\n \n for title in titles:\n #print(title.split('|||')[0])\n aa_id = title.split('|||')[0]\n \n new_test = {'video_id':aa_id,\n 'playlist_id':playlist_id\n }\n main(\n new_test\n \n )\n\n# modified end \n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
__author__ = 'piotrek'
import os
import zipfile
import tarfile
from PyQt5 import QtWidgets
from PyQt5 import QtGui
from PyQt5 import QtCore
from Widgets.list_view import ListView
from Threads.PackThread import PackThread
class CreateArchive(QtWidgets.QDialog):
def __init__(self, model, index, path, parent=None):
super().__init__(parent)
self.setWindowTitle('Utworz archiwum')
self.setWindowModality(QtCore.Qt.WindowModal)
self.resize(350, 400)
self.path = path
self.file_model = model
self.index = index
self.create_components()
self.create_layout()
self.pack_thread = PackThread()
self.pack_thread.status_signal.connect(self.ended)
self.pack_thread.progress_signal.connect(self.progress)
self.pack_thread.access_signal.connect(self.access)
def create_item(self, index):
path = os.path.abspath(self.file_model.filePath(index))
item = QtGui.QStandardItem(os.path.basename(path))
item.setIcon(self.file_model.fileIcon(index))
item.setCheckable(True)
item.setEditable(False)
return item
def create_components(self):
self.option_widget = QtWidgets.QWidget()
self.name_lbl = QtWidgets.QLabel('Nazwa')
self.name_edit = QtWidgets.QLineEdit('untitled')
self.name_edit.setMaxLength(30)
self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp('\w{30}'), self.name_edit))
self.archive_type_cb = QtWidgets.QComboBox()
self.archive_type_cb.addItem('.zip')
self.archive_type_cb.addItem('.tar')
self.path_lbl = QtWidgets.QLabel(self.path)
self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)
self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.set_path)
self.file_list = ListView('Pliki do zapakowania')
self.file_list.add_element(self.index)
self.file_list.add_to_model(self.create_item(self.index))
self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog', clicked=self.add_catalog)
self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=self.add_file)
self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone', clicked=self.file_list.remove_selected)
self.progress_bar = QtWidgets.QProgressBar()
self.progress_bar.setMinimum(0)
self.progress_lbl = QtWidgets.QLabel()
self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.pack_files)
def set_path(self):
path = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())
if path:
self.path = path
self.path_lbl.setText(self.path)
def create_layout(self):
option_layout = QtWidgets.QGridLayout()
v_option_layout = QtWidgets.QVBoxLayout()
main_layout = QtWidgets.QGridLayout()
v_main_layout = QtWidgets.QVBoxLayout()
h_name_layout = QtWidgets.QHBoxLayout()
h_name_layout.addWidget(self.name_lbl)
h_name_layout.addWidget(self.name_edit)
h_name_layout.addWidget(self.archive_type_cb)
v_option_layout.addLayout(h_name_layout)
h_path_layout = QtWidgets.QHBoxLayout()
h_path_layout.addWidget(self.path_lbl)
h_path_layout.addWidget(self.set_path_btn)
v_option_layout.addLayout(h_path_layout)
v_option_layout.addWidget(self.file_list)
h_remove_layout = QtWidgets.QHBoxLayout()
h_remove_layout.addWidget(self.add_folder_btn)
h_remove_layout.addWidget(self.add_file_btn)
h_remove_layout.addWidget(self.remove_selected_btn)
v_option_layout.addLayout(h_remove_layout)
option_layout.addLayout(v_option_layout, 0, 0, 1, 1)
self.option_widget.setLayout(option_layout)
v_main_layout.addWidget(self.option_widget)
v_main_layout.addWidget(self.progress_bar)
v_main_layout.addWidget(self.pack_btn)
main_layout.addLayout(v_main_layout, 0, 0, 1, 1)
self.setLayout(main_layout)
def pack_files(self):
if not self.name_edit.text():
return
if not self.file_list.get_quantity():
return
self.option_widget.setEnabled(False)
self.progress_bar.setMaximum(0)
name = self.name_edit.text() + self.archive_type_cb.itemData(self.archive_type_cb.currentIndex(),
QtCore.Qt.DisplayRole)
path = self.path_lbl.text()
list_index = self.file_list.get_index_list()
path_list = [self.file_model.filePath(index) for index in list_index]
if self.archive_type_cb.currentText() == '.zip':
self.pack_thread.set(pack_zip, name, path, path_list)
elif self.archive_type_cb.currentText() == '.tar':
self.pack_thread.set(pack_tar, name, path, path_list)
self.pack_thread.start()
def add_catalog(self):
catalog = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())
if catalog and not QtCore.QFileInfo(catalog).isSymLink():
index = self.file_model.index(catalog)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def add_file(self):
file, _filter = QtWidgets.QFileDialog.getOpenFileName(self, 'Wybierz plik', QtCore.QDir.homePath())
if file:
index = self.file_model.index(file)
if index not in self.file_list:
self.file_list.add_element(index)
self.file_list.add_to_model(self.create_item(index))
def ended(self):
self.parent().trayIcon.showMessage('Zakonczono',
'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.name),
QtWidgets.QSystemTrayIcon.Information, 2000)
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def access(self):
self.setWindowTitle('Brak dostepu')
self.pack_btn.setText('Zamknij')
self.progress_bar.setMaximum(1)
self.progress_bar.setValue(1)
self.pack_thread.terminate()
self.pack_btn.clicked.connect(self.close)
def progress(self, info):
print('info', info) # remove
self.setWindowTitle(info)
def closeEvent(self, QCloseEvent):
if not self.pack_thread.ended:
QCloseEvent.ignore()
self.parent().catalog_list.setRootIndex(self.parent().catalog_list.rootIndex())
self.parent().catalog_list.scrollTo(self.parent().catalog_list.currentIndex())
self.parent().model_list.refresh(self.parent().catalog_list.rootIndex())
def pack_tar(thread, name, target_path, path_list):
tar_path = os.path.join(os.path.abspath(target_path), name)
try:
with tarfile.open(tar_path, 'w') as tar_file:
for file_path in path_list:
if not os.path.isdir(file_path):
thread.progress_signal.emit(file_path)
tar_file.add(file_path, arcname=os.path.basename(file_path))
else:
catalog_path = os.path.dirname(os.path.abspath(file_path))
for root_folder, subfolders, files in os.walk(file_path):
for file in files:
thread.in_progress_signal.emit(os.path.join(root_folder, file))
tar_file.add(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(catalog_path) + 1:], file))
except IOError:
thread.access_signal.emit()
def pack_zip(thread, name, target_path, path_list):
zip_path = os.path.join(os.path.abspath(target_path), name)
try:
with zipfile.ZipFile(zip_path, 'w') as zip_file:
for path_file in path_list:
if not os.path.isdir(path_file):
thread.progress_signal.emit(path_file)
zip_file.write(path_file, arcname=os.path.basename(path_file))
else:
path_folder = os.path.dirname(os.path.abspath(path_file))
for root_folder, subfolders, files in os.walk(path_file):
for file in files:
thread.emit(os.path.join(root_folder, file))
zip_file.write(os.path.join(root_folder, file),
arcname=os.path.join(root_folder[len(path_folder) + 1:], file))
except IOError:
thread.access_signal.emit()
|
normal
|
{
"blob_id": "7a41826f65f2f55b4c678df2ac06027df6ca50d4",
"index": 3623,
"step-1": "<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n <mask token>\n <mask token>\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n <mask token>\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n <mask token>\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n <mask token>\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n <mask token>\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n <mask token>\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n\n def __init__(self, model, index, path, parent=None):\n super().__init__(parent)\n self.setWindowTitle('Utworz archiwum')\n self.setWindowModality(QtCore.Qt.WindowModal)\n self.resize(350, 400)\n self.path = path\n self.file_model = model\n self.index = index\n self.create_components()\n self.create_layout()\n self.pack_thread = PackThread()\n self.pack_thread.status_signal.connect(self.ended)\n self.pack_thread.progress_signal.connect(self.progress)\n self.pack_thread.access_signal.connect(self.access)\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n\n def add_catalog(self):\n catalog = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if catalog and not QtCore.QFileInfo(catalog).isSymLink():\n index = self.file_model.index(catalog)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def access(self):\n self.setWindowTitle('Brak dostepu')\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\ndef pack_tar(thread, name, target_path, path_list):\n tar_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with tarfile.open(tar_path, 'w') as tar_file:\n for file_path in path_list:\n if not os.path.isdir(file_path):\n thread.progress_signal.emit(file_path)\n tar_file.add(file_path, arcname=os.path.basename(file_path)\n )\n else:\n catalog_path = os.path.dirname(os.path.abspath(file_path))\n for root_folder, subfolders, files in os.walk(file_path):\n for file in files:\n thread.in_progress_signal.emit(os.path.join(\n root_folder, file))\n tar_file.add(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n catalog_path) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n\n\ndef pack_zip(thread, name, target_path, path_list):\n zip_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with zipfile.ZipFile(zip_path, 'w') as zip_file:\n for path_file in path_list:\n if not os.path.isdir(path_file):\n thread.progress_signal.emit(path_file)\n zip_file.write(path_file, arcname=os.path.basename(\n path_file))\n else:\n path_folder = os.path.dirname(os.path.abspath(path_file))\n for root_folder, subfolders, files in os.walk(path_file):\n for file in files:\n thread.emit(os.path.join(root_folder, file))\n zip_file.write(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n path_folder) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n",
"step-4": "__author__ = 'piotrek'\n<mask token>\n\n\nclass CreateArchive(QtWidgets.QDialog):\n\n def __init__(self, model, index, path, parent=None):\n super().__init__(parent)\n self.setWindowTitle('Utworz archiwum')\n self.setWindowModality(QtCore.Qt.WindowModal)\n self.resize(350, 400)\n self.path = path\n self.file_model = model\n self.index = index\n self.create_components()\n self.create_layout()\n self.pack_thread = PackThread()\n self.pack_thread.status_signal.connect(self.ended)\n self.pack_thread.progress_signal.connect(self.progress)\n self.pack_thread.access_signal.connect(self.access)\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(\n '\\\\w{30}'), self.name_edit))\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding,\n QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.\n set_path)\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog',\n clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=\n self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone',\n clicked=self.file_list.remove_selected)\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n self.progress_lbl = QtWidgets.QLabel()\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.\n pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n v_option_layout.addLayout(h_name_layout)\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n v_option_layout.addLayout(h_path_layout)\n v_option_layout.addWidget(self.file_list)\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n v_option_layout.addLayout(h_remove_layout)\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n self.option_widget.setLayout(option_layout)\n v_main_layout.addWidget(self.option_widget)\n v_main_layout.addWidget(self.progress_bar)\n v_main_layout.addWidget(self.pack_btn)\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.\n archive_type_cb.currentIndex(), QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n path_list = [self.file_model.filePath(index) for index in list_index]\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n self.pack_thread.start()\n\n def add_catalog(self):\n catalog = QtWidgets.QFileDialog.getExistingDirectory(self,\n 'Wybierz katalog', QtCore.QDir.homePath())\n if catalog and not QtCore.QFileInfo(catalog).isSymLink():\n index = self.file_model.index(catalog)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self,\n 'Wybierz plik', QtCore.QDir.homePath())\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.\n name), QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def access(self):\n self.setWindowTitle('Brak dostepu')\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def progress(self, info):\n print('info', info)\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.\n rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.\n currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex()\n )\n\n\ndef pack_tar(thread, name, target_path, path_list):\n tar_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with tarfile.open(tar_path, 'w') as tar_file:\n for file_path in path_list:\n if not os.path.isdir(file_path):\n thread.progress_signal.emit(file_path)\n tar_file.add(file_path, arcname=os.path.basename(file_path)\n )\n else:\n catalog_path = os.path.dirname(os.path.abspath(file_path))\n for root_folder, subfolders, files in os.walk(file_path):\n for file in files:\n thread.in_progress_signal.emit(os.path.join(\n root_folder, file))\n tar_file.add(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n catalog_path) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n\n\ndef pack_zip(thread, name, target_path, path_list):\n zip_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with zipfile.ZipFile(zip_path, 'w') as zip_file:\n for path_file in path_list:\n if not os.path.isdir(path_file):\n thread.progress_signal.emit(path_file)\n zip_file.write(path_file, arcname=os.path.basename(\n path_file))\n else:\n path_folder = os.path.dirname(os.path.abspath(path_file))\n for root_folder, subfolders, files in os.walk(path_file):\n for file in files:\n thread.emit(os.path.join(root_folder, file))\n zip_file.write(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(\n path_folder) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n",
"step-5": "__author__ = 'piotrek'\n\nimport os\nimport zipfile\nimport tarfile\n\nfrom PyQt5 import QtWidgets\nfrom PyQt5 import QtGui\nfrom PyQt5 import QtCore\n\nfrom Widgets.list_view import ListView\nfrom Threads.PackThread import PackThread\n\n\nclass CreateArchive(QtWidgets.QDialog):\n\n def __init__(self, model, index, path, parent=None):\n super().__init__(parent)\n self.setWindowTitle('Utworz archiwum')\n self.setWindowModality(QtCore.Qt.WindowModal)\n self.resize(350, 400)\n\n self.path = path\n self.file_model = model\n self.index = index\n\n self.create_components()\n self.create_layout()\n\n self.pack_thread = PackThread()\n self.pack_thread.status_signal.connect(self.ended)\n self.pack_thread.progress_signal.connect(self.progress)\n self.pack_thread.access_signal.connect(self.access)\n\n def create_item(self, index):\n path = os.path.abspath(self.file_model.filePath(index))\n item = QtGui.QStandardItem(os.path.basename(path))\n item.setIcon(self.file_model.fileIcon(index))\n item.setCheckable(True)\n item.setEditable(False)\n return item\n\n def create_components(self):\n self.option_widget = QtWidgets.QWidget()\n\n self.name_lbl = QtWidgets.QLabel('Nazwa')\n\n self.name_edit = QtWidgets.QLineEdit('untitled')\n self.name_edit.setMaxLength(30)\n self.name_edit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp('\\w{30}'), self.name_edit))\n\n self.archive_type_cb = QtWidgets.QComboBox()\n self.archive_type_cb.addItem('.zip')\n self.archive_type_cb.addItem('.tar')\n\n self.path_lbl = QtWidgets.QLabel(self.path)\n self.path_lbl.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)\n self.path_lbl.setFrameShape(QtWidgets.QFrame.StyledPanel)\n self.path_lbl.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.set_path_btn = QtWidgets.QPushButton('Sciezka', clicked=self.set_path)\n\n self.file_list = ListView('Pliki do zapakowania')\n self.file_list.add_element(self.index)\n self.file_list.add_to_model(self.create_item(self.index))\n\n self.add_folder_btn = QtWidgets.QPushButton('Dodaj katalog', clicked=self.add_catalog)\n self.add_file_btn = QtWidgets.QPushButton('Dodaj plik', clicked=self.add_file)\n self.remove_selected_btn = QtWidgets.QPushButton('Usun zaznaczone', clicked=self.file_list.remove_selected)\n\n self.progress_bar = QtWidgets.QProgressBar()\n self.progress_bar.setMinimum(0)\n\n self.progress_lbl = QtWidgets.QLabel()\n\n self.pack_btn = QtWidgets.QPushButton('Zapakuj', clicked=self.pack_files)\n\n def set_path(self):\n path = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())\n\n if path:\n self.path = path\n self.path_lbl.setText(self.path)\n\n def create_layout(self):\n option_layout = QtWidgets.QGridLayout()\n v_option_layout = QtWidgets.QVBoxLayout()\n\n main_layout = QtWidgets.QGridLayout()\n v_main_layout = QtWidgets.QVBoxLayout()\n\n h_name_layout = QtWidgets.QHBoxLayout()\n h_name_layout.addWidget(self.name_lbl)\n h_name_layout.addWidget(self.name_edit)\n h_name_layout.addWidget(self.archive_type_cb)\n\n v_option_layout.addLayout(h_name_layout)\n\n h_path_layout = QtWidgets.QHBoxLayout()\n h_path_layout.addWidget(self.path_lbl)\n h_path_layout.addWidget(self.set_path_btn)\n\n v_option_layout.addLayout(h_path_layout)\n\n v_option_layout.addWidget(self.file_list)\n\n h_remove_layout = QtWidgets.QHBoxLayout()\n h_remove_layout.addWidget(self.add_folder_btn)\n h_remove_layout.addWidget(self.add_file_btn)\n h_remove_layout.addWidget(self.remove_selected_btn)\n\n v_option_layout.addLayout(h_remove_layout)\n\n option_layout.addLayout(v_option_layout, 0, 0, 1, 1)\n\n self.option_widget.setLayout(option_layout)\n\n v_main_layout.addWidget(self.option_widget)\n\n v_main_layout.addWidget(self.progress_bar)\n\n v_main_layout.addWidget(self.pack_btn)\n\n main_layout.addLayout(v_main_layout, 0, 0, 1, 1)\n\n self.setLayout(main_layout)\n\n def pack_files(self):\n if not self.name_edit.text():\n return\n if not self.file_list.get_quantity():\n return\n self.option_widget.setEnabled(False)\n self.progress_bar.setMaximum(0)\n\n name = self.name_edit.text() + self.archive_type_cb.itemData(self.archive_type_cb.currentIndex(),\n QtCore.Qt.DisplayRole)\n path = self.path_lbl.text()\n list_index = self.file_list.get_index_list()\n\n path_list = [self.file_model.filePath(index) for index in list_index]\n\n if self.archive_type_cb.currentText() == '.zip':\n self.pack_thread.set(pack_zip, name, path, path_list)\n elif self.archive_type_cb.currentText() == '.tar':\n self.pack_thread.set(pack_tar, name, path, path_list)\n\n self.pack_thread.start()\n\n def add_catalog(self):\n catalog = QtWidgets.QFileDialog.getExistingDirectory(self, 'Wybierz katalog', QtCore.QDir.homePath())\n\n if catalog and not QtCore.QFileInfo(catalog).isSymLink():\n index = self.file_model.index(catalog)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def add_file(self):\n file, _filter = QtWidgets.QFileDialog.getOpenFileName(self, 'Wybierz plik', QtCore.QDir.homePath())\n\n if file:\n index = self.file_model.index(file)\n if index not in self.file_list:\n self.file_list.add_element(index)\n self.file_list.add_to_model(self.create_item(index))\n\n def ended(self):\n self.parent().trayIcon.showMessage('Zakonczono',\n 'Zakonczono zapakowywanie pliku {0}'.format(self.pack_thread.name),\n QtWidgets.QSystemTrayIcon.Information, 2000)\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def access(self):\n self.setWindowTitle('Brak dostepu')\n self.pack_btn.setText('Zamknij')\n self.progress_bar.setMaximum(1)\n self.progress_bar.setValue(1)\n self.pack_thread.terminate()\n self.pack_btn.clicked.connect(self.close)\n\n def progress(self, info):\n print('info', info) # remove\n self.setWindowTitle(info)\n\n def closeEvent(self, QCloseEvent):\n if not self.pack_thread.ended:\n QCloseEvent.ignore()\n self.parent().catalog_list.setRootIndex(self.parent().catalog_list.rootIndex())\n self.parent().catalog_list.scrollTo(self.parent().catalog_list.currentIndex())\n self.parent().model_list.refresh(self.parent().catalog_list.rootIndex())\n\n\ndef pack_tar(thread, name, target_path, path_list):\n tar_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with tarfile.open(tar_path, 'w') as tar_file:\n for file_path in path_list:\n if not os.path.isdir(file_path):\n thread.progress_signal.emit(file_path)\n tar_file.add(file_path, arcname=os.path.basename(file_path))\n else:\n catalog_path = os.path.dirname(os.path.abspath(file_path))\n for root_folder, subfolders, files in os.walk(file_path):\n for file in files:\n thread.in_progress_signal.emit(os.path.join(root_folder, file))\n tar_file.add(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(catalog_path) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n\n\ndef pack_zip(thread, name, target_path, path_list):\n zip_path = os.path.join(os.path.abspath(target_path), name)\n try:\n with zipfile.ZipFile(zip_path, 'w') as zip_file:\n for path_file in path_list:\n if not os.path.isdir(path_file):\n thread.progress_signal.emit(path_file)\n zip_file.write(path_file, arcname=os.path.basename(path_file))\n else:\n path_folder = os.path.dirname(os.path.abspath(path_file))\n for root_folder, subfolders, files in os.walk(path_file):\n for file in files:\n thread.emit(os.path.join(root_folder, file))\n zip_file.write(os.path.join(root_folder, file),\n arcname=os.path.join(root_folder[len(path_folder) + 1:], file))\n except IOError:\n thread.access_signal.emit()\n",
"step-ids": [
9,
10,
15,
16,
18
]
}
|
[
9,
10,
15,
16,
18
] |
<|reserved_special_token_0|>
@app.route('/')
def home():
"""List all available api routes."""
return (
f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'
)
<|reserved_special_token_0|>
@app.route('/api/v1.0/tobs')
def tobs():
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')
tobs_results = session.query(Measurement.date, Measurement.tobs).filter(
Measurement.date < q_end).filter(Measurement.date >= q_start).all()
session.close()
tobslist = list(np.ravel(tobs_results))
return jsonify(tobslist)
@app.route('/api/v1.0/<start>')
def startonly(start):
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.
tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start
).filter(Measurement.date <= q_end).all()
statslist = list(np.ravel(stats))
return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist
[0], 'TAVG': statslist[1], 'TMAX': statslist[2]})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
def home():
"""List all available api routes."""
return (
f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'
)
@app.route('/api/v1.0/precipitation')
def precip():
session = Session(engine)
results = session.query(Measurement.date, Measurement.prcp).all()
session.close()
precip = list(np.ravel(results))
return jsonify(precip)
@app.route('/api/v1.0/stations')
def stations():
session = Session(engine)
results = session.query(Station.station, Station.name).all()
session.close()
stationlist = list(np.ravel(results))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')
tobs_results = session.query(Measurement.date, Measurement.tobs).filter(
Measurement.date < q_end).filter(Measurement.date >= q_start).all()
session.close()
tobslist = list(np.ravel(tobs_results))
return jsonify(tobslist)
@app.route('/api/v1.0/<start>')
def startonly(start):
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.
tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start
).filter(Measurement.date <= q_end).all()
statslist = list(np.ravel(stats))
return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist
[0], 'TAVG': statslist[1], 'TMAX': statslist[2]})
@app.route('/api/v1.0/<start>/<end>')
def daterange(start, end):
session = Session(engine)
stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement
.tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start
).filter(Measurement.date <= end).all()
statslist = list(np.ravel(stats2))
return jsonify({'StartDate': start, 'EndDate': end, 'TMIN': statslist[0
], 'TAVG': statslist[1], 'TMAX': statslist[2]})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
Base.prepare(engine, reflect=True)
<|reserved_special_token_0|>
@app.route('/')
def home():
"""List all available api routes."""
return (
f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'
)
@app.route('/api/v1.0/precipitation')
def precip():
session = Session(engine)
results = session.query(Measurement.date, Measurement.prcp).all()
session.close()
precip = list(np.ravel(results))
return jsonify(precip)
@app.route('/api/v1.0/stations')
def stations():
session = Session(engine)
results = session.query(Station.station, Station.name).all()
session.close()
stationlist = list(np.ravel(results))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')
tobs_results = session.query(Measurement.date, Measurement.tobs).filter(
Measurement.date < q_end).filter(Measurement.date >= q_start).all()
session.close()
tobslist = list(np.ravel(tobs_results))
return jsonify(tobslist)
@app.route('/api/v1.0/<start>')
def startonly(start):
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.
tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start
).filter(Measurement.date <= q_end).all()
statslist = list(np.ravel(stats))
return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist
[0], 'TAVG': statslist[1], 'TMAX': statslist[2]})
@app.route('/api/v1.0/<start>/<end>')
def daterange(start, end):
session = Session(engine)
stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement
.tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start
).filter(Measurement.date <= end).all()
statslist = list(np.ravel(stats2))
return jsonify({'StartDate': start, 'EndDate': end, 'TMIN': statslist[0
], 'TAVG': statslist[1], 'TMAX': statslist[2]})
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
import numpy as np
import datetime as dt
from datetime import timedelta
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify
postgresStr = (
'postgresql://postgres:password@localhost:5432/HW8-sqlalchemy-vacation')
engine = create_engine(postgresStr)
Base = automap_base()
Base.prepare(engine, reflect=True)
Measurement = Base.classes.measurements
Station = Base.classes.station
app = Flask(__name__)
@app.route('/')
def home():
"""List all available api routes."""
return (
f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'
)
@app.route('/api/v1.0/precipitation')
def precip():
session = Session(engine)
results = session.query(Measurement.date, Measurement.prcp).all()
session.close()
precip = list(np.ravel(results))
return jsonify(precip)
@app.route('/api/v1.0/stations')
def stations():
session = Session(engine)
results = session.query(Station.station, Station.name).all()
session.close()
stationlist = list(np.ravel(results))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')
tobs_results = session.query(Measurement.date, Measurement.tobs).filter(
Measurement.date < q_end).filter(Measurement.date >= q_start).all()
session.close()
tobslist = list(np.ravel(tobs_results))
return jsonify(tobslist)
@app.route('/api/v1.0/<start>')
def startonly(start):
session = Session(engine)
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime('%Y-%m-%d')
stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.
tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start
).filter(Measurement.date <= q_end).all()
statslist = list(np.ravel(stats))
return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist
[0], 'TAVG': statslist[1], 'TMAX': statslist[2]})
@app.route('/api/v1.0/<start>/<end>')
def daterange(start, end):
session = Session(engine)
stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement
.tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start
).filter(Measurement.date <= end).all()
statslist = list(np.ravel(stats2))
return jsonify({'StartDate': start, 'EndDate': end, 'TMIN': statslist[0
], 'TAVG': statslist[1], 'TMAX': statslist[2]})
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
#dependencies go here
import numpy as np
import datetime as dt
from datetime import timedelta
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify
#Set up the engine to connect to HW8 database
postgresStr = ("postgresql://postgres:password@localhost:5432/HW8-sqlalchemy-vacation")
engine = create_engine(postgresStr)
# reflect existing tables/classes
Base = automap_base()
Base.prepare(engine, reflect=True)
# Save reference to the tables
Measurement = Base.classes.measurements
Station = Base.classes.station
# Flask Setup
app = Flask(__name__)
# Set up flask routes
@app.route("/")
def home():
"""List all available api routes."""
return (
f"Available Routes:<br/>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/stations<br/>"
f"/api/v1.0/tobs<br/>"
f"/api/v1.0/<start><br/>"
f"/api/v1.0/<start>/<end><br/>"
)
@app.route("/api/v1.0/precipitation")
def precip():
#Convert the query results to a Dictionary using `date` as the key and `prcp` as the value.
#Return the JSON representation of your dictionary.
# Create our session (link) from Python to the DB
session = Session(engine)
#query the db, get a list of all precip measurements and dates
results = session.query(Measurement.date, Measurement.prcp).all()
session.close()
# Convert list of tuples into normal list
precip = list(np.ravel(results))
return jsonify(precip)
@app.route("/api/v1.0/stations")
def stations():
#Return a JSON list of stations from the dataset
# Create our session (link) from Python to the DB
session = Session(engine)
#query the db, get a list of the stations and their respective names
results = session.query(Station.station, Station.name).all()
session.close()
# Convert list of tuples into normal list
stationlist = list(np.ravel(results))
return jsonify(stationlist)
#query for the dates and temperature observations from a year from the last data point.
# return a JSON list of Temperature Observations (tobs) for the previous year.
@app.route("/api/v1.0/tobs")
def tobs():
# Create our session (link) from Python to the DB
session = Session(engine)
#find the last date in the dataset, query the prior year's temperature observations
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime("%Y-%m-%d")
q_start = (last[0][0]-dt.timedelta(days = 365)).strftime("%Y-%m-%d")
tobs_results = session.query(Measurement.date, Measurement.tobs).\
filter(Measurement.date < q_end).\
filter(Measurement.date >= q_start).all()
session.close()
# Convert list of tuples into normal list
tobslist = list(np.ravel(tobs_results))
return jsonify(tobslist)
@app.route("/api/v1.0/<start>")
def startonly(start):
# Create our session (link) from Python to the DB
session = Session(engine)
#find the last date in the dataset to use as an ending point for our temperature calculations
last = session.query(func.max(Measurement.date)).limit(1).all()
q_end = last[0][0].strftime("%Y-%m-%d")
stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= start).\
filter(Measurement.date <= q_end).all()
statslist = list(np.ravel(stats))
return jsonify({"StartDate":start,"EndDate":q_end,"TMIN": statslist[0],"TAVG":statslist[1],"TMAX":statslist[2]})
#Return a JSON list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.
#When given the start only, calculate `TMIN`, `TAVG`, and `TMAX` for all dates greater than and equal to the start date.
@app.route("/api/v1.0/<start>/<end>")
def daterange(start,end):
# Create our session (link) from Python to the DB
session = Session(engine)
stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= start).\
filter(Measurement.date <= end).all()
statslist = list(np.ravel(stats2))
return jsonify({"StartDate":start,"EndDate":end,"TMIN": statslist[0],"TAVG":statslist[1],"TMAX":statslist[2]})
#Return a JSON list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.
#When given the start and the end date, calculate the `TMIN`, `TAVG`, and `TMAX` for dates between the start and end date inclusive.
if __name__ == '__main__':
app.run(debug=True)
|
flexible
|
{
"blob_id": "7ab964352c1d51b70e3a1a7bf0a624f2d96cfd55",
"index": 8168,
"step-1": "<mask token>\n\n\[email protected]('/')\ndef home():\n \"\"\"List all available api routes.\"\"\"\n return (\n f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'\n )\n\n\n<mask token>\n\n\[email protected]('/api/v1.0/tobs')\ndef tobs():\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')\n tobs_results = session.query(Measurement.date, Measurement.tobs).filter(\n Measurement.date < q_end).filter(Measurement.date >= q_start).all()\n session.close()\n tobslist = list(np.ravel(tobs_results))\n return jsonify(tobslist)\n\n\[email protected]('/api/v1.0/<start>')\ndef startonly(start):\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.\n tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start\n ).filter(Measurement.date <= q_end).all()\n statslist = list(np.ravel(stats))\n return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist\n [0], 'TAVG': statslist[1], 'TMAX': statslist[2]})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef home():\n \"\"\"List all available api routes.\"\"\"\n return (\n f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'\n )\n\n\[email protected]('/api/v1.0/precipitation')\ndef precip():\n session = Session(engine)\n results = session.query(Measurement.date, Measurement.prcp).all()\n session.close()\n precip = list(np.ravel(results))\n return jsonify(precip)\n\n\[email protected]('/api/v1.0/stations')\ndef stations():\n session = Session(engine)\n results = session.query(Station.station, Station.name).all()\n session.close()\n stationlist = list(np.ravel(results))\n return jsonify(stationlist)\n\n\[email protected]('/api/v1.0/tobs')\ndef tobs():\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')\n tobs_results = session.query(Measurement.date, Measurement.tobs).filter(\n Measurement.date < q_end).filter(Measurement.date >= q_start).all()\n session.close()\n tobslist = list(np.ravel(tobs_results))\n return jsonify(tobslist)\n\n\[email protected]('/api/v1.0/<start>')\ndef startonly(start):\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.\n tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start\n ).filter(Measurement.date <= q_end).all()\n statslist = list(np.ravel(stats))\n return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist\n [0], 'TAVG': statslist[1], 'TMAX': statslist[2]})\n\n\[email protected]('/api/v1.0/<start>/<end>')\ndef daterange(start, end):\n session = Session(engine)\n stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement\n .tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start\n ).filter(Measurement.date <= end).all()\n statslist = list(np.ravel(stats2))\n return jsonify({'StartDate': start, 'EndDate': end, 'TMIN': statslist[0\n ], 'TAVG': statslist[1], 'TMAX': statslist[2]})\n\n\n<mask token>\n",
"step-3": "<mask token>\nBase.prepare(engine, reflect=True)\n<mask token>\n\n\[email protected]('/')\ndef home():\n \"\"\"List all available api routes.\"\"\"\n return (\n f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'\n )\n\n\[email protected]('/api/v1.0/precipitation')\ndef precip():\n session = Session(engine)\n results = session.query(Measurement.date, Measurement.prcp).all()\n session.close()\n precip = list(np.ravel(results))\n return jsonify(precip)\n\n\[email protected]('/api/v1.0/stations')\ndef stations():\n session = Session(engine)\n results = session.query(Station.station, Station.name).all()\n session.close()\n stationlist = list(np.ravel(results))\n return jsonify(stationlist)\n\n\[email protected]('/api/v1.0/tobs')\ndef tobs():\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')\n tobs_results = session.query(Measurement.date, Measurement.tobs).filter(\n Measurement.date < q_end).filter(Measurement.date >= q_start).all()\n session.close()\n tobslist = list(np.ravel(tobs_results))\n return jsonify(tobslist)\n\n\[email protected]('/api/v1.0/<start>')\ndef startonly(start):\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.\n tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start\n ).filter(Measurement.date <= q_end).all()\n statslist = list(np.ravel(stats))\n return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist\n [0], 'TAVG': statslist[1], 'TMAX': statslist[2]})\n\n\[email protected]('/api/v1.0/<start>/<end>')\ndef daterange(start, end):\n session = Session(engine)\n stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement\n .tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start\n ).filter(Measurement.date <= end).all()\n statslist = list(np.ravel(stats2))\n return jsonify({'StartDate': start, 'EndDate': end, 'TMIN': statslist[0\n ], 'TAVG': statslist[1], 'TMAX': statslist[2]})\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "import numpy as np\nimport datetime as dt\nfrom datetime import timedelta\nimport sqlalchemy\nfrom sqlalchemy.ext.automap import automap_base\nfrom sqlalchemy.orm import Session\nfrom sqlalchemy import create_engine, func\nfrom flask import Flask, jsonify\npostgresStr = (\n 'postgresql://postgres:password@localhost:5432/HW8-sqlalchemy-vacation')\nengine = create_engine(postgresStr)\nBase = automap_base()\nBase.prepare(engine, reflect=True)\nMeasurement = Base.classes.measurements\nStation = Base.classes.station\napp = Flask(__name__)\n\n\[email protected]('/')\ndef home():\n \"\"\"List all available api routes.\"\"\"\n return (\n f'Available Routes:<br/>/api/v1.0/precipitation<br/>/api/v1.0/stations<br/>/api/v1.0/tobs<br/>/api/v1.0/<start><br/>/api/v1.0/<start>/<end><br/>'\n )\n\n\[email protected]('/api/v1.0/precipitation')\ndef precip():\n session = Session(engine)\n results = session.query(Measurement.date, Measurement.prcp).all()\n session.close()\n precip = list(np.ravel(results))\n return jsonify(precip)\n\n\[email protected]('/api/v1.0/stations')\ndef stations():\n session = Session(engine)\n results = session.query(Station.station, Station.name).all()\n session.close()\n stationlist = list(np.ravel(results))\n return jsonify(stationlist)\n\n\[email protected]('/api/v1.0/tobs')\ndef tobs():\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n q_start = (last[0][0] - dt.timedelta(days=365)).strftime('%Y-%m-%d')\n tobs_results = session.query(Measurement.date, Measurement.tobs).filter(\n Measurement.date < q_end).filter(Measurement.date >= q_start).all()\n session.close()\n tobslist = list(np.ravel(tobs_results))\n return jsonify(tobslist)\n\n\[email protected]('/api/v1.0/<start>')\ndef startonly(start):\n session = Session(engine)\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime('%Y-%m-%d')\n stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.\n tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start\n ).filter(Measurement.date <= q_end).all()\n statslist = list(np.ravel(stats))\n return jsonify({'StartDate': start, 'EndDate': q_end, 'TMIN': statslist\n [0], 'TAVG': statslist[1], 'TMAX': statslist[2]})\n\n\[email protected]('/api/v1.0/<start>/<end>')\ndef daterange(start, end):\n session = Session(engine)\n stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement\n .tobs), func.max(Measurement.tobs)).filter(Measurement.date >= start\n ).filter(Measurement.date <= end).all()\n statslist = list(np.ravel(stats2))\n return jsonify({'StartDate': start, 'EndDate': end, 'TMIN': statslist[0\n ], 'TAVG': statslist[1], 'TMAX': statslist[2]})\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "#dependencies go here\nimport numpy as np\nimport datetime as dt\nfrom datetime import timedelta\n\n\nimport sqlalchemy\nfrom sqlalchemy.ext.automap import automap_base\nfrom sqlalchemy.orm import Session\nfrom sqlalchemy import create_engine, func\n\nfrom flask import Flask, jsonify\n\n\n\n\n#Set up the engine to connect to HW8 database\npostgresStr = (\"postgresql://postgres:password@localhost:5432/HW8-sqlalchemy-vacation\")\nengine = create_engine(postgresStr)\n\n# reflect existing tables/classes\nBase = automap_base()\nBase.prepare(engine, reflect=True)\n\n# Save reference to the tables\nMeasurement = Base.classes.measurements\nStation = Base.classes.station\n\n# Flask Setup\napp = Flask(__name__)\n\n# Set up flask routes\[email protected](\"/\")\ndef home():\n \"\"\"List all available api routes.\"\"\"\n return (\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/<start><br/>\"\n f\"/api/v1.0/<start>/<end><br/>\"\n )\n\n\[email protected](\"/api/v1.0/precipitation\")\ndef precip():\n \n #Convert the query results to a Dictionary using `date` as the key and `prcp` as the value.\n #Return the JSON representation of your dictionary.\n \n # Create our session (link) from Python to the DB\n session = Session(engine)\n\n #query the db, get a list of all precip measurements and dates\n results = session.query(Measurement.date, Measurement.prcp).all()\n\n session.close()\n\n # Convert list of tuples into normal list\n precip = list(np.ravel(results))\n return jsonify(precip)\n\[email protected](\"/api/v1.0/stations\")\ndef stations():\n \n #Return a JSON list of stations from the dataset\n \n # Create our session (link) from Python to the DB\n session = Session(engine)\n\n #query the db, get a list of the stations and their respective names\n results = session.query(Station.station, Station.name).all()\n\n session.close()\n\n # Convert list of tuples into normal list\n stationlist = list(np.ravel(results))\n return jsonify(stationlist)\n\n#query for the dates and temperature observations from a year from the last data point.\n# return a JSON list of Temperature Observations (tobs) for the previous year.\n\[email protected](\"/api/v1.0/tobs\")\ndef tobs():\n \n # Create our session (link) from Python to the DB\n session = Session(engine)\n\n \n #find the last date in the dataset, query the prior year's temperature observations\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime(\"%Y-%m-%d\")\n q_start = (last[0][0]-dt.timedelta(days = 365)).strftime(\"%Y-%m-%d\")\n \n tobs_results = session.query(Measurement.date, Measurement.tobs).\\\n filter(Measurement.date < q_end).\\\n filter(Measurement.date >= q_start).all()\n \n session.close()\n\n # Convert list of tuples into normal list\n tobslist = list(np.ravel(tobs_results))\n \n return jsonify(tobslist)\n\[email protected](\"/api/v1.0/<start>\")\ndef startonly(start):\n \n # Create our session (link) from Python to the DB\n session = Session(engine)\n \n #find the last date in the dataset to use as an ending point for our temperature calculations\n last = session.query(func.max(Measurement.date)).limit(1).all()\n q_end = last[0][0].strftime(\"%Y-%m-%d\")\n \n stats = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\\\n filter(Measurement.date >= start).\\\n filter(Measurement.date <= q_end).all()\n\n statslist = list(np.ravel(stats))\n \n return jsonify({\"StartDate\":start,\"EndDate\":q_end,\"TMIN\": statslist[0],\"TAVG\":statslist[1],\"TMAX\":statslist[2]})\n\n #Return a JSON list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.\n #When given the start only, calculate `TMIN`, `TAVG`, and `TMAX` for all dates greater than and equal to the start date.\n\[email protected](\"/api/v1.0/<start>/<end>\")\ndef daterange(start,end):\n \n # Create our session (link) from Python to the DB\n session = Session(engine)\n \n stats2 = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\\\n filter(Measurement.date >= start).\\\n filter(Measurement.date <= end).all()\n\n statslist = list(np.ravel(stats2))\n \n return jsonify({\"StartDate\":start,\"EndDate\":end,\"TMIN\": statslist[0],\"TAVG\":statslist[1],\"TMAX\":statslist[2]})\n\n #Return a JSON list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.\n #When given the start and the end date, calculate the `TMIN`, `TAVG`, and `TMAX` for dates between the start and end date inclusive.\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-ids": [
3,
6,
7,
9,
10
]
}
|
[
3,
6,
7,
9,
10
] |
<|reserved_special_token_0|>
class FDA_node(object):
<|reserved_special_token_0|>
def grow(self):
self.right = FDA_node()
self.left = FDA_node()
def find_optimal_param(self, x, y):
self.m = self.method.find_optimal_param(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
self.left.find_optimal_param(x[left], y[left])
self.right.find_optimal_param(x[right], y[right])
def fit(self, x, y):
self.method.fit(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
if max(y[left]) == 0 or min(y[right]) == 1:
self.left = self.right = None
else:
self.right.fit(x[left], y[left])
self.left.fit(x[right], y[right])
<|reserved_special_token_0|>
def predict(self, x):
if self.left == None and self.right == None:
pred = self.method.predict(x, self.m)
elif self.left != None and self.right != None:
left, right = self.divide_data(x)
l_pred = self.left.predict(x[left])
r_pred = self.right.predict(x[right])
pred = np.ones(x.shape[0]) * 2
pred[left] = l_pred
pred[right] = r_pred
return pred
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FDA_node(object):
def __init__(self):
"""Constructor"""
self.method = FdaUtils()
self.left = None
self.right = None
self.m = 0.5
def grow(self):
self.right = FDA_node()
self.left = FDA_node()
def find_optimal_param(self, x, y):
self.m = self.method.find_optimal_param(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
self.left.find_optimal_param(x[left], y[left])
self.right.find_optimal_param(x[right], y[right])
def fit(self, x, y):
self.method.fit(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
if max(y[left]) == 0 or min(y[right]) == 1:
self.left = self.right = None
else:
self.right.fit(x[left], y[left])
self.left.fit(x[right], y[right])
<|reserved_special_token_0|>
def predict(self, x):
if self.left == None and self.right == None:
pred = self.method.predict(x, self.m)
elif self.left != None and self.right != None:
left, right = self.divide_data(x)
l_pred = self.left.predict(x[left])
r_pred = self.right.predict(x[right])
pred = np.ones(x.shape[0]) * 2
pred[left] = l_pred
pred[right] = r_pred
return pred
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_optimal_param(lda, x_train, y_train):
probs_train = lda.predict_proba(x_train)[:, 1]
y_train = [x for _, x in sorted(zip(probs_train, y_train))]
y_train = np.array(y_train)
probs_train.sort()
Se = []
Sp = []
for p in range(len(probs_train)):
tp = np.count_nonzero(y_train[p:] == 1)
fp = np.count_nonzero(y_train[p:] == 0)
tn = np.count_nonzero(y_train[:p] == 0)
fn = np.count_nonzero(y_train[:p] == 1)
Se.append(tp / (tp + fn))
Sp.append(tn / (tn + fp))
mx = np.argmax(-(1 - np.array(Sp) - np.array(Se)))
return probs_train[mx]
def predict(lda, x, y, m):
tp = 0
fp = 0
tn = 0
fn = 0
if len(x) != 0:
probs = lda.predict_proba(x)[:, 1]
for j in range(len(x)):
if probs[j] > m:
if y[j] == 1:
tp += 1
else:
fp += 1
elif y[j] == 1:
fn += 1
else:
tn += 1
return tp, fp, fn, tn
<|reserved_special_token_0|>
class FDA_node(object):
def __init__(self):
"""Constructor"""
self.method = FdaUtils()
self.left = None
self.right = None
self.m = 0.5
def grow(self):
self.right = FDA_node()
self.left = FDA_node()
def find_optimal_param(self, x, y):
self.m = self.method.find_optimal_param(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
self.left.find_optimal_param(x[left], y[left])
self.right.find_optimal_param(x[right], y[right])
def fit(self, x, y):
self.method.fit(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
if max(y[left]) == 0 or min(y[right]) == 1:
self.left = self.right = None
else:
self.right.fit(x[left], y[left])
self.left.fit(x[right], y[right])
def divide_data(self, x):
probs = self.method.predict_proba(x)[:, 1]
left = probs <= self.m
right = probs > self.m
return left, right
def predict(self, x):
if self.left == None and self.right == None:
pred = self.method.predict(x, self.m)
elif self.left != None and self.right != None:
left, right = self.divide_data(x)
l_pred = self.left.predict(x[left])
r_pred = self.right.predict(x[right])
pred = np.ones(x.shape[0]) * 2
pred[left] = l_pred
pred[right] = r_pred
return pred
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import numpy as np
from sklearn.decomposition import PCA
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from preprocessing import *
from utils import *
def find_optimal_param(lda, x_train, y_train):
probs_train = lda.predict_proba(x_train)[:, 1]
y_train = [x for _, x in sorted(zip(probs_train, y_train))]
y_train = np.array(y_train)
probs_train.sort()
Se = []
Sp = []
for p in range(len(probs_train)):
tp = np.count_nonzero(y_train[p:] == 1)
fp = np.count_nonzero(y_train[p:] == 0)
tn = np.count_nonzero(y_train[:p] == 0)
fn = np.count_nonzero(y_train[:p] == 1)
Se.append(tp / (tp + fn))
Sp.append(tn / (tn + fp))
mx = np.argmax(-(1 - np.array(Sp) - np.array(Se)))
return probs_train[mx]
def predict(lda, x, y, m):
tp = 0
fp = 0
tn = 0
fn = 0
if len(x) != 0:
probs = lda.predict_proba(x)[:, 1]
for j in range(len(x)):
if probs[j] > m:
if y[j] == 1:
tp += 1
else:
fp += 1
elif y[j] == 1:
fn += 1
else:
tn += 1
return tp, fp, fn, tn
from methodutils import FdaUtils
class FDA_node(object):
def __init__(self):
"""Constructor"""
self.method = FdaUtils()
self.left = None
self.right = None
self.m = 0.5
def grow(self):
self.right = FDA_node()
self.left = FDA_node()
def find_optimal_param(self, x, y):
self.m = self.method.find_optimal_param(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
self.left.find_optimal_param(x[left], y[left])
self.right.find_optimal_param(x[right], y[right])
def fit(self, x, y):
self.method.fit(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
if max(y[left]) == 0 or min(y[right]) == 1:
self.left = self.right = None
else:
self.right.fit(x[left], y[left])
self.left.fit(x[right], y[right])
def divide_data(self, x):
probs = self.method.predict_proba(x)[:, 1]
left = probs <= self.m
right = probs > self.m
return left, right
def predict(self, x):
if self.left == None and self.right == None:
pred = self.method.predict(x, self.m)
elif self.left != None and self.right != None:
left, right = self.divide_data(x)
l_pred = self.left.predict(x[left])
r_pred = self.right.predict(x[right])
pred = np.ones(x.shape[0]) * 2
pred[left] = l_pred
pred[right] = r_pred
return pred
if __name__ == '__main__':
np.seterr(all='raise')
from sklearn.metrics import confusion_matrix
from dataset import load_dataset, load_new_dataset_6002, diagnosis_to_binary, MOST_FREQ_DIAGS_NUMS_NEW
from fisher_discriminant import FisherDiscriminantAnalisys
num_components = 100
infile = open(
'C:\\Users\\donte_000\\PycharmProjects\\Basic_Methods\\data\\data_old_and_new_without_noise.pkl'
, 'rb')
old, new = pkl.load(infile)
infile.close()
Y = old['y']
outfile = open(
'C:\\Users\\donte_000\\PycharmProjects\\Basic_Methods\\data\\6002_old_Dif.pkl'
, 'rb')
X = pkl.load(outfile)
outfile.close()
pca = PCA(n_components=X.shape[0])
b = pca.fit_transform(X)
for d in reversed(MOST_FREQ_DIAGS_NUMS_NEW):
y_prediction = []
y_labels = []
for train_index, test_index in cross_val(b.shape[0], 500):
tree = FDA_node()
tree.grow()
tree.fit(b[train_index, :num_components], Y[train_index, d])
tree.find_optimal_param(b[train_index, :num_components], Y[
train_index, d])
y_prediction.append(tree.predict(b[test_index, :num_components]))
y_labels.append(Y[test_index, d])
y_prediction = np.array(y_prediction).flatten()
y_labels = np.array(y_labels).flatten()
tn, fp, fn, tp = confusion_matrix(y_labels, y_prediction).ravel()
test_se = tp / (tp + fn)
test_sp = tn / (tn + fp)
print('Val. Se = %s, Val. Sp = %s' % (round(test_sp, 4), round(
test_se, 4)))
<|reserved_special_token_1|>
import numpy as np
from sklearn.decomposition import PCA
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from preprocessing import *
from utils import *
def find_optimal_param(lda, x_train, y_train):
probs_train = lda.predict_proba(x_train)[:, 1]
y_train = [x for _,x in sorted(zip(probs_train,y_train))]
y_train = np.array(y_train)
probs_train.sort()
Se = []
Sp = []
for p in range(len(probs_train)):
tp = np.count_nonzero(y_train[p:] == 1)
fp = np.count_nonzero(y_train[p:] == 0)
tn = np.count_nonzero(y_train[:p] == 0)
fn = np.count_nonzero(y_train[:p] == 1)
Se.append(tp/(tp+fn))
Sp.append(tn/(tn+fp))
mx = np.argmax(-(1-np.array(Sp) - np.array(Se)))
return probs_train[mx]
def predict(lda, x, y, m):
tp = 0
fp = 0
tn = 0
fn = 0
if len(x) != 0:
probs= lda.predict_proba(x)[:, 1]
for j in range(len(x)):
if probs[j] > m:
if y[j] == 1:
tp+=1
else:
fp+=1
else:
if y[j] == 1:
fn +=1
else:
tn +=1
return tp, fp, fn, tn
from methodutils import FdaUtils
class FDA_node(object):
def __init__(self):
"""Constructor"""
self.method = FdaUtils()
self.left = None
self.right = None
self.m = 0.5
def grow(self):
self.right = FDA_node()
self.left = FDA_node()
def find_optimal_param(self, x, y):
self.m = self.method.find_optimal_param(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
self.left.find_optimal_param(x[left], y[left])
self.right.find_optimal_param(x[right], y[right])
def fit(self, x, y):
self.method.fit(x, y)
if self.left != None and self.right != None:
left, right = self.divide_data(x)
if (max(y[left]) == 0 or min(y[right]) == 1):
self.left = self.right = None
else:
self.right.fit(x[left], y[left])
self.left.fit(x[right], y[right])
def divide_data(self, x):
probs = self.method.predict_proba(x)[:, 1]
left = (probs <= self.m)
right = (probs > self.m)
return left, right
def predict(self, x):
if self.left == None and self.right == None:
pred = self.method.predict(x, self.m)
elif self.left != None and self.right != None:
left, right = self.divide_data(x)
l_pred = self.left.predict(x[left])
r_pred =self.right.predict(x[right])
pred = np.ones(x.shape[0])*2
pred[left] = l_pred
pred[right] = r_pred
return pred
if __name__ == "__main__":
np.seterr(all='raise')
from sklearn.metrics import confusion_matrix
from dataset import load_dataset, load_new_dataset_6002, diagnosis_to_binary, MOST_FREQ_DIAGS_NUMS_NEW
from fisher_discriminant import FisherDiscriminantAnalisys
num_components = 100
infile = open('C:\\Users\\donte_000\\PycharmProjects\\Basic_Methods\\data\\data_old_and_new_without_noise.pkl', 'rb')
(old, new) = pkl.load(infile)
infile.close()
Y = old["y"]
outfile = open('C:\\Users\\donte_000\\PycharmProjects\\Basic_Methods\\data\\6002_old_Dif.pkl', 'rb')
X = pkl.load(outfile)
outfile.close()
pca = PCA(n_components=X.shape[0])
b = pca.fit_transform(X)
for d in reversed(MOST_FREQ_DIAGS_NUMS_NEW):
y_prediction =[]
y_labels = []
for train_index, test_index in cross_val(b.shape[0], 500):
tree = FDA_node()
tree.grow()
tree.fit(b[train_index, :num_components],Y[train_index,d])
tree.find_optimal_param(b[train_index, :num_components], Y[train_index,d])
y_prediction.append(tree.predict(b[test_index, :num_components]))
y_labels.append(Y[test_index, d])
y_prediction = np.array(y_prediction).flatten()
y_labels = np.array(y_labels).flatten()
tn, fp, fn, tp = confusion_matrix(y_labels, y_prediction).ravel()
test_se = tp / (tp + fn)
test_sp = tn / (tn + fp)
print("Val. Se = %s, Val. Sp = %s" % (round(test_sp, 4), round(test_se, 4)))
|
flexible
|
{
"blob_id": "784b51c05dc7b5e70016634e2664c9ec25b8a65a",
"index": 6506,
"step-1": "<mask token>\n\n\nclass FDA_node(object):\n <mask token>\n\n def grow(self):\n self.right = FDA_node()\n self.left = FDA_node()\n\n def find_optimal_param(self, x, y):\n self.m = self.method.find_optimal_param(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n self.left.find_optimal_param(x[left], y[left])\n self.right.find_optimal_param(x[right], y[right])\n\n def fit(self, x, y):\n self.method.fit(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n if max(y[left]) == 0 or min(y[right]) == 1:\n self.left = self.right = None\n else:\n self.right.fit(x[left], y[left])\n self.left.fit(x[right], y[right])\n <mask token>\n\n def predict(self, x):\n if self.left == None and self.right == None:\n pred = self.method.predict(x, self.m)\n elif self.left != None and self.right != None:\n left, right = self.divide_data(x)\n l_pred = self.left.predict(x[left])\n r_pred = self.right.predict(x[right])\n pred = np.ones(x.shape[0]) * 2\n pred[left] = l_pred\n pred[right] = r_pred\n return pred\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass FDA_node(object):\n\n def __init__(self):\n \"\"\"Constructor\"\"\"\n self.method = FdaUtils()\n self.left = None\n self.right = None\n self.m = 0.5\n\n def grow(self):\n self.right = FDA_node()\n self.left = FDA_node()\n\n def find_optimal_param(self, x, y):\n self.m = self.method.find_optimal_param(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n self.left.find_optimal_param(x[left], y[left])\n self.right.find_optimal_param(x[right], y[right])\n\n def fit(self, x, y):\n self.method.fit(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n if max(y[left]) == 0 or min(y[right]) == 1:\n self.left = self.right = None\n else:\n self.right.fit(x[left], y[left])\n self.left.fit(x[right], y[right])\n <mask token>\n\n def predict(self, x):\n if self.left == None and self.right == None:\n pred = self.method.predict(x, self.m)\n elif self.left != None and self.right != None:\n left, right = self.divide_data(x)\n l_pred = self.left.predict(x[left])\n r_pred = self.right.predict(x[right])\n pred = np.ones(x.shape[0]) * 2\n pred[left] = l_pred\n pred[right] = r_pred\n return pred\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef find_optimal_param(lda, x_train, y_train):\n probs_train = lda.predict_proba(x_train)[:, 1]\n y_train = [x for _, x in sorted(zip(probs_train, y_train))]\n y_train = np.array(y_train)\n probs_train.sort()\n Se = []\n Sp = []\n for p in range(len(probs_train)):\n tp = np.count_nonzero(y_train[p:] == 1)\n fp = np.count_nonzero(y_train[p:] == 0)\n tn = np.count_nonzero(y_train[:p] == 0)\n fn = np.count_nonzero(y_train[:p] == 1)\n Se.append(tp / (tp + fn))\n Sp.append(tn / (tn + fp))\n mx = np.argmax(-(1 - np.array(Sp) - np.array(Se)))\n return probs_train[mx]\n\n\ndef predict(lda, x, y, m):\n tp = 0\n fp = 0\n tn = 0\n fn = 0\n if len(x) != 0:\n probs = lda.predict_proba(x)[:, 1]\n for j in range(len(x)):\n if probs[j] > m:\n if y[j] == 1:\n tp += 1\n else:\n fp += 1\n elif y[j] == 1:\n fn += 1\n else:\n tn += 1\n return tp, fp, fn, tn\n\n\n<mask token>\n\n\nclass FDA_node(object):\n\n def __init__(self):\n \"\"\"Constructor\"\"\"\n self.method = FdaUtils()\n self.left = None\n self.right = None\n self.m = 0.5\n\n def grow(self):\n self.right = FDA_node()\n self.left = FDA_node()\n\n def find_optimal_param(self, x, y):\n self.m = self.method.find_optimal_param(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n self.left.find_optimal_param(x[left], y[left])\n self.right.find_optimal_param(x[right], y[right])\n\n def fit(self, x, y):\n self.method.fit(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n if max(y[left]) == 0 or min(y[right]) == 1:\n self.left = self.right = None\n else:\n self.right.fit(x[left], y[left])\n self.left.fit(x[right], y[right])\n\n def divide_data(self, x):\n probs = self.method.predict_proba(x)[:, 1]\n left = probs <= self.m\n right = probs > self.m\n return left, right\n\n def predict(self, x):\n if self.left == None and self.right == None:\n pred = self.method.predict(x, self.m)\n elif self.left != None and self.right != None:\n left, right = self.divide_data(x)\n l_pred = self.left.predict(x[left])\n r_pred = self.right.predict(x[right])\n pred = np.ones(x.shape[0]) * 2\n pred[left] = l_pred\n pred[right] = r_pred\n return pred\n\n\n<mask token>\n",
"step-4": "import numpy as np\nfrom sklearn.decomposition import PCA\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\nfrom preprocessing import *\nfrom utils import *\n\n\ndef find_optimal_param(lda, x_train, y_train):\n probs_train = lda.predict_proba(x_train)[:, 1]\n y_train = [x for _, x in sorted(zip(probs_train, y_train))]\n y_train = np.array(y_train)\n probs_train.sort()\n Se = []\n Sp = []\n for p in range(len(probs_train)):\n tp = np.count_nonzero(y_train[p:] == 1)\n fp = np.count_nonzero(y_train[p:] == 0)\n tn = np.count_nonzero(y_train[:p] == 0)\n fn = np.count_nonzero(y_train[:p] == 1)\n Se.append(tp / (tp + fn))\n Sp.append(tn / (tn + fp))\n mx = np.argmax(-(1 - np.array(Sp) - np.array(Se)))\n return probs_train[mx]\n\n\ndef predict(lda, x, y, m):\n tp = 0\n fp = 0\n tn = 0\n fn = 0\n if len(x) != 0:\n probs = lda.predict_proba(x)[:, 1]\n for j in range(len(x)):\n if probs[j] > m:\n if y[j] == 1:\n tp += 1\n else:\n fp += 1\n elif y[j] == 1:\n fn += 1\n else:\n tn += 1\n return tp, fp, fn, tn\n\n\nfrom methodutils import FdaUtils\n\n\nclass FDA_node(object):\n\n def __init__(self):\n \"\"\"Constructor\"\"\"\n self.method = FdaUtils()\n self.left = None\n self.right = None\n self.m = 0.5\n\n def grow(self):\n self.right = FDA_node()\n self.left = FDA_node()\n\n def find_optimal_param(self, x, y):\n self.m = self.method.find_optimal_param(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n self.left.find_optimal_param(x[left], y[left])\n self.right.find_optimal_param(x[right], y[right])\n\n def fit(self, x, y):\n self.method.fit(x, y)\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n if max(y[left]) == 0 or min(y[right]) == 1:\n self.left = self.right = None\n else:\n self.right.fit(x[left], y[left])\n self.left.fit(x[right], y[right])\n\n def divide_data(self, x):\n probs = self.method.predict_proba(x)[:, 1]\n left = probs <= self.m\n right = probs > self.m\n return left, right\n\n def predict(self, x):\n if self.left == None and self.right == None:\n pred = self.method.predict(x, self.m)\n elif self.left != None and self.right != None:\n left, right = self.divide_data(x)\n l_pred = self.left.predict(x[left])\n r_pred = self.right.predict(x[right])\n pred = np.ones(x.shape[0]) * 2\n pred[left] = l_pred\n pred[right] = r_pred\n return pred\n\n\nif __name__ == '__main__':\n np.seterr(all='raise')\n from sklearn.metrics import confusion_matrix\n from dataset import load_dataset, load_new_dataset_6002, diagnosis_to_binary, MOST_FREQ_DIAGS_NUMS_NEW\n from fisher_discriminant import FisherDiscriminantAnalisys\n num_components = 100\n infile = open(\n 'C:\\\\Users\\\\donte_000\\\\PycharmProjects\\\\Basic_Methods\\\\data\\\\data_old_and_new_without_noise.pkl'\n , 'rb')\n old, new = pkl.load(infile)\n infile.close()\n Y = old['y']\n outfile = open(\n 'C:\\\\Users\\\\donte_000\\\\PycharmProjects\\\\Basic_Methods\\\\data\\\\6002_old_Dif.pkl'\n , 'rb')\n X = pkl.load(outfile)\n outfile.close()\n pca = PCA(n_components=X.shape[0])\n b = pca.fit_transform(X)\n for d in reversed(MOST_FREQ_DIAGS_NUMS_NEW):\n y_prediction = []\n y_labels = []\n for train_index, test_index in cross_val(b.shape[0], 500):\n tree = FDA_node()\n tree.grow()\n tree.fit(b[train_index, :num_components], Y[train_index, d])\n tree.find_optimal_param(b[train_index, :num_components], Y[\n train_index, d])\n y_prediction.append(tree.predict(b[test_index, :num_components]))\n y_labels.append(Y[test_index, d])\n y_prediction = np.array(y_prediction).flatten()\n y_labels = np.array(y_labels).flatten()\n tn, fp, fn, tp = confusion_matrix(y_labels, y_prediction).ravel()\n test_se = tp / (tp + fn)\n test_sp = tn / (tn + fp)\n print('Val. Se = %s, Val. Sp = %s' % (round(test_sp, 4), round(\n test_se, 4)))\n",
"step-5": "import numpy as np\nfrom sklearn.decomposition import PCA\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\n\nfrom preprocessing import *\nfrom utils import *\n\n\ndef find_optimal_param(lda, x_train, y_train):\n\n probs_train = lda.predict_proba(x_train)[:, 1]\n\n y_train = [x for _,x in sorted(zip(probs_train,y_train))]\n y_train = np.array(y_train)\n probs_train.sort()\n Se = []\n Sp = []\n for p in range(len(probs_train)):\n tp = np.count_nonzero(y_train[p:] == 1)\n fp = np.count_nonzero(y_train[p:] == 0)\n tn = np.count_nonzero(y_train[:p] == 0)\n fn = np.count_nonzero(y_train[:p] == 1)\n Se.append(tp/(tp+fn))\n Sp.append(tn/(tn+fp))\n\n mx = np.argmax(-(1-np.array(Sp) - np.array(Se)))\n\n return probs_train[mx]\n\ndef predict(lda, x, y, m):\n tp = 0\n fp = 0\n tn = 0\n fn = 0\n if len(x) != 0:\n probs= lda.predict_proba(x)[:, 1]\n\n for j in range(len(x)):\n if probs[j] > m:\n if y[j] == 1:\n tp+=1\n else:\n fp+=1\n else:\n if y[j] == 1:\n fn +=1\n else:\n tn +=1\n\n return tp, fp, fn, tn\n\nfrom methodutils import FdaUtils\n\nclass FDA_node(object):\n\n def __init__(self):\n \"\"\"Constructor\"\"\"\n self.method = FdaUtils()\n self.left = None\n self.right = None\n self.m = 0.5\n\n def grow(self):\n self.right = FDA_node()\n self.left = FDA_node()\n\n def find_optimal_param(self, x, y):\n self.m = self.method.find_optimal_param(x, y)\n\n\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n self.left.find_optimal_param(x[left], y[left])\n self.right.find_optimal_param(x[right], y[right])\n\n\n def fit(self, x, y):\n self.method.fit(x, y)\n\n if self.left != None and self.right != None:\n left, right = self.divide_data(x)\n if (max(y[left]) == 0 or min(y[right]) == 1):\n self.left = self.right = None\n else:\n self.right.fit(x[left], y[left])\n self.left.fit(x[right], y[right])\n\n\n def divide_data(self, x):\n probs = self.method.predict_proba(x)[:, 1]\n left = (probs <= self.m)\n right = (probs > self.m)\n return left, right\n\n\n def predict(self, x):\n if self.left == None and self.right == None:\n pred = self.method.predict(x, self.m)\n\n elif self.left != None and self.right != None:\n left, right = self.divide_data(x)\n l_pred = self.left.predict(x[left])\n r_pred =self.right.predict(x[right])\n pred = np.ones(x.shape[0])*2\n pred[left] = l_pred\n pred[right] = r_pred\n\n return pred\n\n\n\nif __name__ == \"__main__\":\n np.seterr(all='raise')\n from sklearn.metrics import confusion_matrix\n from dataset import load_dataset, load_new_dataset_6002, diagnosis_to_binary, MOST_FREQ_DIAGS_NUMS_NEW\n from fisher_discriminant import FisherDiscriminantAnalisys\n num_components = 100\n\n infile = open('C:\\\\Users\\\\donte_000\\\\PycharmProjects\\\\Basic_Methods\\\\data\\\\data_old_and_new_without_noise.pkl', 'rb')\n (old, new) = pkl.load(infile)\n infile.close()\n\n Y = old[\"y\"]\n outfile = open('C:\\\\Users\\\\donte_000\\\\PycharmProjects\\\\Basic_Methods\\\\data\\\\6002_old_Dif.pkl', 'rb')\n X = pkl.load(outfile)\n outfile.close()\n pca = PCA(n_components=X.shape[0])\n b = pca.fit_transform(X)\n\n\n\n for d in reversed(MOST_FREQ_DIAGS_NUMS_NEW):\n y_prediction =[]\n y_labels = []\n for train_index, test_index in cross_val(b.shape[0], 500):\n tree = FDA_node()\n tree.grow()\n tree.fit(b[train_index, :num_components],Y[train_index,d])\n tree.find_optimal_param(b[train_index, :num_components], Y[train_index,d])\n\n y_prediction.append(tree.predict(b[test_index, :num_components]))\n y_labels.append(Y[test_index, d])\n\n y_prediction = np.array(y_prediction).flatten()\n y_labels = np.array(y_labels).flatten()\n tn, fp, fn, tp = confusion_matrix(y_labels, y_prediction).ravel()\n\n test_se = tp / (tp + fn)\n test_sp = tn / (tn + fp)\n print(\"Val. Se = %s, Val. Sp = %s\" % (round(test_sp, 4), round(test_se, 4)))\n",
"step-ids": [
5,
6,
9,
11,
12
]
}
|
[
5,
6,
9,
11,
12
] |
<|reserved_special_token_0|>
def from_url(url: str) ->Image.Image:
api_response = requests.get(url).content
response_bytes = BytesIO(api_response)
return Image.open(response_bytes)
def from_file(path: str) ->Union[Image.Image, None]:
if os.path.exists(path):
return Image.open(path)
else:
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_img_from_file_or_url(img_format: str='JPEG') ->Callable[[str, str],
Image.Image]:
def _apply(filepath: str, url: str) ->Image.Image:
img = from_file(filepath)
if img is None:
img = from_url(url)
img.save(filepath, img_format)
return img.convert('RGB')
return _apply
def from_url(url: str) ->Image.Image:
api_response = requests.get(url).content
response_bytes = BytesIO(api_response)
return Image.open(response_bytes)
def from_file(path: str) ->Union[Image.Image, None]:
if os.path.exists(path):
return Image.open(path)
else:
return None
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_img_from_file_or_url(img_format: str='JPEG') ->Callable[[str, str],
Image.Image]:
def _apply(filepath: str, url: str) ->Image.Image:
img = from_file(filepath)
if img is None:
img = from_url(url)
img.save(filepath, img_format)
return img.convert('RGB')
return _apply
def from_url(url: str) ->Image.Image:
api_response = requests.get(url).content
response_bytes = BytesIO(api_response)
return Image.open(response_bytes)
def from_file(path: str) ->Union[Image.Image, None]:
if os.path.exists(path):
return Image.open(path)
else:
return None
def load_metadata(path: str, cols: Iterable[int], class_cols: Collection[
int]=tuple(), valid_only: bool=True, **reader_args) ->Tuple[List, int,
List, List[Dict[str, int]], List[Dict[int, str]], int]:
metadata = []
class_to_index: List[Dict[str, int]] = [{}] * len(class_cols)
index_to_class: List[Dict[int, str]] = [{}] * len(class_cols)
next_indices = [0] * len(class_cols)
with open(path, 'r', newline='', encoding='utf8') as metadata_file:
reader = csv.reader(metadata_file, **reader_args)
headers = next(reader)
for row in reader:
if len(row) != 0:
metadatum = [row[c] for c in cols]
for c, class_col in enumerate(class_cols):
if not row[class_col] in class_to_index[c]:
class_to_index[c][row[class_col]] = next_indices[c]
index_to_class[c][next_indices[c]] = row[class_col]
next_indices[c] += 1
if valid_only and '' in metadatum:
continue
metadata.append(metadatum)
len_metadata = len(metadata)
num_classes = 0 if len(next_indices) == 0 else next_indices[-1]
return (metadata, len_metadata, headers, class_to_index, index_to_class,
num_classes)
<|reserved_special_token_1|>
import os
import requests
from PIL import Image
from io import BytesIO
import csv
from typing import Iterable, List, Tuple, Dict, Callable, Union, Collection
def get_img_from_file_or_url(img_format: str='JPEG') ->Callable[[str, str],
Image.Image]:
def _apply(filepath: str, url: str) ->Image.Image:
img = from_file(filepath)
if img is None:
img = from_url(url)
img.save(filepath, img_format)
return img.convert('RGB')
return _apply
def from_url(url: str) ->Image.Image:
api_response = requests.get(url).content
response_bytes = BytesIO(api_response)
return Image.open(response_bytes)
def from_file(path: str) ->Union[Image.Image, None]:
if os.path.exists(path):
return Image.open(path)
else:
return None
def load_metadata(path: str, cols: Iterable[int], class_cols: Collection[
int]=tuple(), valid_only: bool=True, **reader_args) ->Tuple[List, int,
List, List[Dict[str, int]], List[Dict[int, str]], int]:
metadata = []
class_to_index: List[Dict[str, int]] = [{}] * len(class_cols)
index_to_class: List[Dict[int, str]] = [{}] * len(class_cols)
next_indices = [0] * len(class_cols)
with open(path, 'r', newline='', encoding='utf8') as metadata_file:
reader = csv.reader(metadata_file, **reader_args)
headers = next(reader)
for row in reader:
if len(row) != 0:
metadatum = [row[c] for c in cols]
for c, class_col in enumerate(class_cols):
if not row[class_col] in class_to_index[c]:
class_to_index[c][row[class_col]] = next_indices[c]
index_to_class[c][next_indices[c]] = row[class_col]
next_indices[c] += 1
if valid_only and '' in metadatum:
continue
metadata.append(metadatum)
len_metadata = len(metadata)
num_classes = 0 if len(next_indices) == 0 else next_indices[-1]
return (metadata, len_metadata, headers, class_to_index, index_to_class,
num_classes)
<|reserved_special_token_1|>
import os
import requests
from PIL import Image
from io import BytesIO
import csv
from typing import Iterable, List, Tuple, Dict, Callable, Union, Collection
# pull the image from the api endpoint and save it if we don't have it, else load it from disk
def get_img_from_file_or_url(img_format: str = 'JPEG') -> Callable[[str, str], Image.Image]:
def _apply(filepath: str, url: str) -> Image.Image:
img = from_file(filepath)
if img is None:
img = from_url(url)
img.save(filepath, img_format)
return img.convert('RGB') # convert to rgb if not already (eg if grayscale)
return _apply
def from_url(url: str) -> Image.Image:
api_response = requests.get(url).content
response_bytes = BytesIO(api_response)
return Image.open(response_bytes)
def from_file(path: str) -> Union[Image.Image, None]:
if os.path.exists(path):
return Image.open(path)
else:
return None
def load_metadata(path: str, cols: Iterable[int], class_cols: Collection[int] = tuple(), valid_only: bool = True, **reader_args)\
-> Tuple[List, int, List, List[Dict[str, int]], List[Dict[int, str]], int]:
metadata = []
# one dict for each class col
class_to_index: List[Dict[str, int]] = [{}] * len(class_cols)
index_to_class: List[Dict[int, str]] = [{}] * len(class_cols)
next_indices = [0] * len(class_cols) # next index for a new class value
with open(path, 'r', newline='', encoding="utf8") as metadata_file:
reader = csv.reader(metadata_file, **reader_args)
headers = next(reader)
for row in reader:
if len(row) != 0:
metadatum = [row[c] for c in cols]
# for all class cols, add their vals to the class_to_index and index_to_class dicts if not there already
for c, class_col in enumerate(class_cols):
if not row[class_col] in class_to_index[c]:
class_to_index[c][row[class_col]] = next_indices[c]
index_to_class[c][next_indices[c]] = row[class_col]
next_indices[c] += 1
if valid_only and '' in metadatum:
continue
metadata.append(metadatum)
len_metadata = len(metadata)
num_classes = 0 if len(next_indices) == 0 else next_indices[-1]
# split off the headers
return metadata, len_metadata, headers, class_to_index, index_to_class, num_classes
|
flexible
|
{
"blob_id": "f2bb44600f011a205c71985ad94c18f7e058634f",
"index": 8,
"step-1": "<mask token>\n\n\ndef from_url(url: str) ->Image.Image:\n api_response = requests.get(url).content\n response_bytes = BytesIO(api_response)\n return Image.open(response_bytes)\n\n\ndef from_file(path: str) ->Union[Image.Image, None]:\n if os.path.exists(path):\n return Image.open(path)\n else:\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_img_from_file_or_url(img_format: str='JPEG') ->Callable[[str, str],\n Image.Image]:\n\n def _apply(filepath: str, url: str) ->Image.Image:\n img = from_file(filepath)\n if img is None:\n img = from_url(url)\n img.save(filepath, img_format)\n return img.convert('RGB')\n return _apply\n\n\ndef from_url(url: str) ->Image.Image:\n api_response = requests.get(url).content\n response_bytes = BytesIO(api_response)\n return Image.open(response_bytes)\n\n\ndef from_file(path: str) ->Union[Image.Image, None]:\n if os.path.exists(path):\n return Image.open(path)\n else:\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_img_from_file_or_url(img_format: str='JPEG') ->Callable[[str, str],\n Image.Image]:\n\n def _apply(filepath: str, url: str) ->Image.Image:\n img = from_file(filepath)\n if img is None:\n img = from_url(url)\n img.save(filepath, img_format)\n return img.convert('RGB')\n return _apply\n\n\ndef from_url(url: str) ->Image.Image:\n api_response = requests.get(url).content\n response_bytes = BytesIO(api_response)\n return Image.open(response_bytes)\n\n\ndef from_file(path: str) ->Union[Image.Image, None]:\n if os.path.exists(path):\n return Image.open(path)\n else:\n return None\n\n\ndef load_metadata(path: str, cols: Iterable[int], class_cols: Collection[\n int]=tuple(), valid_only: bool=True, **reader_args) ->Tuple[List, int,\n List, List[Dict[str, int]], List[Dict[int, str]], int]:\n metadata = []\n class_to_index: List[Dict[str, int]] = [{}] * len(class_cols)\n index_to_class: List[Dict[int, str]] = [{}] * len(class_cols)\n next_indices = [0] * len(class_cols)\n with open(path, 'r', newline='', encoding='utf8') as metadata_file:\n reader = csv.reader(metadata_file, **reader_args)\n headers = next(reader)\n for row in reader:\n if len(row) != 0:\n metadatum = [row[c] for c in cols]\n for c, class_col in enumerate(class_cols):\n if not row[class_col] in class_to_index[c]:\n class_to_index[c][row[class_col]] = next_indices[c]\n index_to_class[c][next_indices[c]] = row[class_col]\n next_indices[c] += 1\n if valid_only and '' in metadatum:\n continue\n metadata.append(metadatum)\n len_metadata = len(metadata)\n num_classes = 0 if len(next_indices) == 0 else next_indices[-1]\n return (metadata, len_metadata, headers, class_to_index, index_to_class,\n num_classes)\n",
"step-4": "import os\nimport requests\nfrom PIL import Image\nfrom io import BytesIO\nimport csv\nfrom typing import Iterable, List, Tuple, Dict, Callable, Union, Collection\n\n\ndef get_img_from_file_or_url(img_format: str='JPEG') ->Callable[[str, str],\n Image.Image]:\n\n def _apply(filepath: str, url: str) ->Image.Image:\n img = from_file(filepath)\n if img is None:\n img = from_url(url)\n img.save(filepath, img_format)\n return img.convert('RGB')\n return _apply\n\n\ndef from_url(url: str) ->Image.Image:\n api_response = requests.get(url).content\n response_bytes = BytesIO(api_response)\n return Image.open(response_bytes)\n\n\ndef from_file(path: str) ->Union[Image.Image, None]:\n if os.path.exists(path):\n return Image.open(path)\n else:\n return None\n\n\ndef load_metadata(path: str, cols: Iterable[int], class_cols: Collection[\n int]=tuple(), valid_only: bool=True, **reader_args) ->Tuple[List, int,\n List, List[Dict[str, int]], List[Dict[int, str]], int]:\n metadata = []\n class_to_index: List[Dict[str, int]] = [{}] * len(class_cols)\n index_to_class: List[Dict[int, str]] = [{}] * len(class_cols)\n next_indices = [0] * len(class_cols)\n with open(path, 'r', newline='', encoding='utf8') as metadata_file:\n reader = csv.reader(metadata_file, **reader_args)\n headers = next(reader)\n for row in reader:\n if len(row) != 0:\n metadatum = [row[c] for c in cols]\n for c, class_col in enumerate(class_cols):\n if not row[class_col] in class_to_index[c]:\n class_to_index[c][row[class_col]] = next_indices[c]\n index_to_class[c][next_indices[c]] = row[class_col]\n next_indices[c] += 1\n if valid_only and '' in metadatum:\n continue\n metadata.append(metadatum)\n len_metadata = len(metadata)\n num_classes = 0 if len(next_indices) == 0 else next_indices[-1]\n return (metadata, len_metadata, headers, class_to_index, index_to_class,\n num_classes)\n",
"step-5": "import os\nimport requests\nfrom PIL import Image\nfrom io import BytesIO\nimport csv\nfrom typing import Iterable, List, Tuple, Dict, Callable, Union, Collection\n\n\n# pull the image from the api endpoint and save it if we don't have it, else load it from disk\ndef get_img_from_file_or_url(img_format: str = 'JPEG') -> Callable[[str, str], Image.Image]:\n def _apply(filepath: str, url: str) -> Image.Image:\n img = from_file(filepath)\n if img is None:\n img = from_url(url)\n img.save(filepath, img_format)\n return img.convert('RGB') # convert to rgb if not already (eg if grayscale)\n return _apply\n\n\ndef from_url(url: str) -> Image.Image:\n api_response = requests.get(url).content\n response_bytes = BytesIO(api_response)\n return Image.open(response_bytes)\n\n\ndef from_file(path: str) -> Union[Image.Image, None]:\n if os.path.exists(path):\n return Image.open(path)\n else:\n return None\n\n\ndef load_metadata(path: str, cols: Iterable[int], class_cols: Collection[int] = tuple(), valid_only: bool = True, **reader_args)\\\n -> Tuple[List, int, List, List[Dict[str, int]], List[Dict[int, str]], int]:\n metadata = []\n # one dict for each class col\n class_to_index: List[Dict[str, int]] = [{}] * len(class_cols)\n index_to_class: List[Dict[int, str]] = [{}] * len(class_cols)\n next_indices = [0] * len(class_cols) # next index for a new class value\n with open(path, 'r', newline='', encoding=\"utf8\") as metadata_file:\n reader = csv.reader(metadata_file, **reader_args)\n headers = next(reader)\n for row in reader:\n if len(row) != 0:\n metadatum = [row[c] for c in cols]\n # for all class cols, add their vals to the class_to_index and index_to_class dicts if not there already\n for c, class_col in enumerate(class_cols):\n if not row[class_col] in class_to_index[c]:\n class_to_index[c][row[class_col]] = next_indices[c]\n index_to_class[c][next_indices[c]] = row[class_col]\n next_indices[c] += 1\n if valid_only and '' in metadatum:\n continue\n metadata.append(metadatum)\n len_metadata = len(metadata)\n num_classes = 0 if len(next_indices) == 0 else next_indices[-1]\n # split off the headers\n return metadata, len_metadata, headers, class_to_index, index_to_class, num_classes\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import numpy as np
from .metrics import r2_score
class LinearRegression:
def __init__(self):
self.coef_ = None # 系数
self.interception_ = None # 截距
self._theta = None
def fit_normal(self, X_train, y_train):
assert X_train.shape[0] == y_train.shape[0], ""
#!!!important
X_b = np.hstack([np.ones((len(X_train), 1)), X_train])
self._theta = np.linalg.inv(X_b.T.dot(X_b)).dot(X_b.T).dot(y_train)
self.interception_ = self._theta[0]
self.coef_ = self._theta[1:]
return self
def fit_gd(self, X_train, y_train, eta=0.01, n_iter=1e4):
assert X_train.shape[0] == y_train.shape[0], ""
def J(theta, X_b, y):
try:
return np.sum((y - X_b.dot(theta)) ** 2) / len(X_b)
except:
return float('inf')
def dJ(theta, X_b, y):
# 向量化实现
return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(X_b)
def gradient_descent(X_b, y, initial_theta, eta, n_iter, epsilon=1e-8):
theta = initial_theta
i_iter = 0
while i_iter < n_iter:
gradient = dJ(theta, X_b, y)
last_theta = theta
theta = theta - eta * gradient
if (abs(J(theta, X_b, y) - J(last_theta, X_b, y)) < epsilon):
break
i_iter += 1
return theta
X_b = np.hstack([np.ones((len(X_train), 1)), X_train])
initial_theta = np.zeros(X_b.shape[1])
self._theta = gradient_descent(X_b, y_train, initial_theta, eta, n_iter)
self.interception_ = self._theta[0]
self.coef_ = self._theta[1:]
return self
# n_iter 代表观测所有数据几次
def fit_sgd(self, X_train, y_train, n_iter=5, t0=5, t1=50):
assert X_train.shape[0] == y_train.shape[0], ""
def dJ_sgd(theta, X_b_i, y_i):
return X_b_i.T.dot(X_b_i.dot(theta) - y_i) * 2
# Stochastic gradient descent
def sgd(X_b, y, initial_theta, n_iter, t0=5, t1=50):
def learning_rate(t):
return t0 / (t + t1)
theta = initial_theta
m = len(X_b)
for curr_iter in range(n_iter):
indexes = np.random.permutation(m)
X_b_new = X_b[indexes]
y_new = y[indexes]
for i in range(m):
gradient = dJ_sgd(theta, X_b_new[i], y_new[i])
theta = theta - learning_rate(curr_iter * m + i) * gradient
return theta
X_b = np.hstack([np.ones([len(X_train), 1]), X_train])
initial_theta = np.zeros(X_b.shape[1])
self._theta = sgd(X_b, y_train, initial_theta, n_iter)
self.interception_ = self._theta[0]
self.coef_ = self._theta[1:]
def predict(self,X_predict):
assert self.interception_ is not None and self.coef_ is not None,\
"must fit before predict"
assert X_predict.shape[1] == len(self.coef_),\
"the feature number of X_predict must be equal to X_train"
X_b = np.hstack([np.ones((len(X_predict), 1)), X_predict])
y_predict = X_b.dot(self._theta)
return y_predict
def score(self,X_test,y_test):
y_predict = self.predict(X_test)
return r2_score(y_test,y_predict)
def __repr__(self):
return "LinearRegression()"
|
normal
|
{
"blob_id": "e47e614c88c78fb6e8ff4098ea2b89d21bfa9684",
"index": 6935,
"step-1": "<mask token>\n\n\nclass LinearRegression:\n\n def __init__(self):\n self.coef_ = None\n self.interception_ = None\n self._theta = None\n <mask token>\n\n def fit_gd(self, X_train, y_train, eta=0.01, n_iter=10000.0):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def J(theta, X_b, y):\n try:\n return np.sum((y - X_b.dot(theta)) ** 2) / len(X_b)\n except:\n return float('inf')\n\n def dJ(theta, X_b, y):\n return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(X_b)\n\n def gradient_descent(X_b, y, initial_theta, eta, n_iter, epsilon=1e-08\n ):\n theta = initial_theta\n i_iter = 0\n while i_iter < n_iter:\n gradient = dJ(theta, X_b, y)\n last_theta = theta\n theta = theta - eta * gradient\n if abs(J(theta, X_b, y) - J(last_theta, X_b, y)) < epsilon:\n break\n i_iter += 1\n return theta\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = gradient_descent(X_b, y_train, initial_theta, eta, n_iter\n )\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n return self\n\n def fit_sgd(self, X_train, y_train, n_iter=5, t0=5, t1=50):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def dJ_sgd(theta, X_b_i, y_i):\n return X_b_i.T.dot(X_b_i.dot(theta) - y_i) * 2\n\n def sgd(X_b, y, initial_theta, n_iter, t0=5, t1=50):\n\n def learning_rate(t):\n return t0 / (t + t1)\n theta = initial_theta\n m = len(X_b)\n for curr_iter in range(n_iter):\n indexes = np.random.permutation(m)\n X_b_new = X_b[indexes]\n y_new = y[indexes]\n for i in range(m):\n gradient = dJ_sgd(theta, X_b_new[i], y_new[i])\n theta = theta - learning_rate(curr_iter * m + i) * gradient\n return theta\n X_b = np.hstack([np.ones([len(X_train), 1]), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = sgd(X_b, y_train, initial_theta, n_iter)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n\n def predict(self, X_predict):\n assert self.interception_ is not None and self.coef_ is not None, 'must fit before predict'\n assert X_predict.shape[1] == len(self.coef_\n ), 'the feature number of X_predict must be equal to X_train'\n X_b = np.hstack([np.ones((len(X_predict), 1)), X_predict])\n y_predict = X_b.dot(self._theta)\n return y_predict\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass LinearRegression:\n\n def __init__(self):\n self.coef_ = None\n self.interception_ = None\n self._theta = None\n <mask token>\n\n def fit_gd(self, X_train, y_train, eta=0.01, n_iter=10000.0):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def J(theta, X_b, y):\n try:\n return np.sum((y - X_b.dot(theta)) ** 2) / len(X_b)\n except:\n return float('inf')\n\n def dJ(theta, X_b, y):\n return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(X_b)\n\n def gradient_descent(X_b, y, initial_theta, eta, n_iter, epsilon=1e-08\n ):\n theta = initial_theta\n i_iter = 0\n while i_iter < n_iter:\n gradient = dJ(theta, X_b, y)\n last_theta = theta\n theta = theta - eta * gradient\n if abs(J(theta, X_b, y) - J(last_theta, X_b, y)) < epsilon:\n break\n i_iter += 1\n return theta\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = gradient_descent(X_b, y_train, initial_theta, eta, n_iter\n )\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n return self\n\n def fit_sgd(self, X_train, y_train, n_iter=5, t0=5, t1=50):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def dJ_sgd(theta, X_b_i, y_i):\n return X_b_i.T.dot(X_b_i.dot(theta) - y_i) * 2\n\n def sgd(X_b, y, initial_theta, n_iter, t0=5, t1=50):\n\n def learning_rate(t):\n return t0 / (t + t1)\n theta = initial_theta\n m = len(X_b)\n for curr_iter in range(n_iter):\n indexes = np.random.permutation(m)\n X_b_new = X_b[indexes]\n y_new = y[indexes]\n for i in range(m):\n gradient = dJ_sgd(theta, X_b_new[i], y_new[i])\n theta = theta - learning_rate(curr_iter * m + i) * gradient\n return theta\n X_b = np.hstack([np.ones([len(X_train), 1]), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = sgd(X_b, y_train, initial_theta, n_iter)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n\n def predict(self, X_predict):\n assert self.interception_ is not None and self.coef_ is not None, 'must fit before predict'\n assert X_predict.shape[1] == len(self.coef_\n ), 'the feature number of X_predict must be equal to X_train'\n X_b = np.hstack([np.ones((len(X_predict), 1)), X_predict])\n y_predict = X_b.dot(self._theta)\n return y_predict\n\n def score(self, X_test, y_test):\n y_predict = self.predict(X_test)\n return r2_score(y_test, y_predict)\n\n def __repr__(self):\n return 'LinearRegression()'\n",
"step-3": "<mask token>\n\n\nclass LinearRegression:\n\n def __init__(self):\n self.coef_ = None\n self.interception_ = None\n self._theta = None\n\n def fit_normal(self, X_train, y_train):\n assert X_train.shape[0] == y_train.shape[0], ''\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n self._theta = np.linalg.inv(X_b.T.dot(X_b)).dot(X_b.T).dot(y_train)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n return self\n\n def fit_gd(self, X_train, y_train, eta=0.01, n_iter=10000.0):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def J(theta, X_b, y):\n try:\n return np.sum((y - X_b.dot(theta)) ** 2) / len(X_b)\n except:\n return float('inf')\n\n def dJ(theta, X_b, y):\n return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(X_b)\n\n def gradient_descent(X_b, y, initial_theta, eta, n_iter, epsilon=1e-08\n ):\n theta = initial_theta\n i_iter = 0\n while i_iter < n_iter:\n gradient = dJ(theta, X_b, y)\n last_theta = theta\n theta = theta - eta * gradient\n if abs(J(theta, X_b, y) - J(last_theta, X_b, y)) < epsilon:\n break\n i_iter += 1\n return theta\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = gradient_descent(X_b, y_train, initial_theta, eta, n_iter\n )\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n return self\n\n def fit_sgd(self, X_train, y_train, n_iter=5, t0=5, t1=50):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def dJ_sgd(theta, X_b_i, y_i):\n return X_b_i.T.dot(X_b_i.dot(theta) - y_i) * 2\n\n def sgd(X_b, y, initial_theta, n_iter, t0=5, t1=50):\n\n def learning_rate(t):\n return t0 / (t + t1)\n theta = initial_theta\n m = len(X_b)\n for curr_iter in range(n_iter):\n indexes = np.random.permutation(m)\n X_b_new = X_b[indexes]\n y_new = y[indexes]\n for i in range(m):\n gradient = dJ_sgd(theta, X_b_new[i], y_new[i])\n theta = theta - learning_rate(curr_iter * m + i) * gradient\n return theta\n X_b = np.hstack([np.ones([len(X_train), 1]), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = sgd(X_b, y_train, initial_theta, n_iter)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n\n def predict(self, X_predict):\n assert self.interception_ is not None and self.coef_ is not None, 'must fit before predict'\n assert X_predict.shape[1] == len(self.coef_\n ), 'the feature number of X_predict must be equal to X_train'\n X_b = np.hstack([np.ones((len(X_predict), 1)), X_predict])\n y_predict = X_b.dot(self._theta)\n return y_predict\n\n def score(self, X_test, y_test):\n y_predict = self.predict(X_test)\n return r2_score(y_test, y_predict)\n\n def __repr__(self):\n return 'LinearRegression()'\n",
"step-4": "import numpy as np\nfrom .metrics import r2_score\n\n\nclass LinearRegression:\n\n def __init__(self):\n self.coef_ = None\n self.interception_ = None\n self._theta = None\n\n def fit_normal(self, X_train, y_train):\n assert X_train.shape[0] == y_train.shape[0], ''\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n self._theta = np.linalg.inv(X_b.T.dot(X_b)).dot(X_b.T).dot(y_train)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n return self\n\n def fit_gd(self, X_train, y_train, eta=0.01, n_iter=10000.0):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def J(theta, X_b, y):\n try:\n return np.sum((y - X_b.dot(theta)) ** 2) / len(X_b)\n except:\n return float('inf')\n\n def dJ(theta, X_b, y):\n return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(X_b)\n\n def gradient_descent(X_b, y, initial_theta, eta, n_iter, epsilon=1e-08\n ):\n theta = initial_theta\n i_iter = 0\n while i_iter < n_iter:\n gradient = dJ(theta, X_b, y)\n last_theta = theta\n theta = theta - eta * gradient\n if abs(J(theta, X_b, y) - J(last_theta, X_b, y)) < epsilon:\n break\n i_iter += 1\n return theta\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = gradient_descent(X_b, y_train, initial_theta, eta, n_iter\n )\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n return self\n\n def fit_sgd(self, X_train, y_train, n_iter=5, t0=5, t1=50):\n assert X_train.shape[0] == y_train.shape[0], ''\n\n def dJ_sgd(theta, X_b_i, y_i):\n return X_b_i.T.dot(X_b_i.dot(theta) - y_i) * 2\n\n def sgd(X_b, y, initial_theta, n_iter, t0=5, t1=50):\n\n def learning_rate(t):\n return t0 / (t + t1)\n theta = initial_theta\n m = len(X_b)\n for curr_iter in range(n_iter):\n indexes = np.random.permutation(m)\n X_b_new = X_b[indexes]\n y_new = y[indexes]\n for i in range(m):\n gradient = dJ_sgd(theta, X_b_new[i], y_new[i])\n theta = theta - learning_rate(curr_iter * m + i) * gradient\n return theta\n X_b = np.hstack([np.ones([len(X_train), 1]), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = sgd(X_b, y_train, initial_theta, n_iter)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n\n def predict(self, X_predict):\n assert self.interception_ is not None and self.coef_ is not None, 'must fit before predict'\n assert X_predict.shape[1] == len(self.coef_\n ), 'the feature number of X_predict must be equal to X_train'\n X_b = np.hstack([np.ones((len(X_predict), 1)), X_predict])\n y_predict = X_b.dot(self._theta)\n return y_predict\n\n def score(self, X_test, y_test):\n y_predict = self.predict(X_test)\n return r2_score(y_test, y_predict)\n\n def __repr__(self):\n return 'LinearRegression()'\n",
"step-5": "import numpy as np\nfrom .metrics import r2_score\n\nclass LinearRegression:\n\n def __init__(self):\n self.coef_ = None # 系数\n self.interception_ = None # 截距\n self._theta = None\n\n def fit_normal(self, X_train, y_train):\n assert X_train.shape[0] == y_train.shape[0], \"\"\n\n #!!!important\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n self._theta = np.linalg.inv(X_b.T.dot(X_b)).dot(X_b.T).dot(y_train)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n\n return self\n\n def fit_gd(self, X_train, y_train, eta=0.01, n_iter=1e4):\n\n assert X_train.shape[0] == y_train.shape[0], \"\"\n def J(theta, X_b, y):\n try:\n return np.sum((y - X_b.dot(theta)) ** 2) / len(X_b)\n except:\n return float('inf')\n\n def dJ(theta, X_b, y):\n # 向量化实现\n return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(X_b)\n\n def gradient_descent(X_b, y, initial_theta, eta, n_iter, epsilon=1e-8):\n theta = initial_theta\n i_iter = 0\n while i_iter < n_iter:\n gradient = dJ(theta, X_b, y)\n last_theta = theta\n theta = theta - eta * gradient\n if (abs(J(theta, X_b, y) - J(last_theta, X_b, y)) < epsilon):\n break\n i_iter += 1\n return theta\n\n X_b = np.hstack([np.ones((len(X_train), 1)), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = gradient_descent(X_b, y_train, initial_theta, eta, n_iter)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n\n return self\n\n # n_iter 代表观测所有数据几次\n def fit_sgd(self, X_train, y_train, n_iter=5, t0=5, t1=50):\n\n assert X_train.shape[0] == y_train.shape[0], \"\"\n def dJ_sgd(theta, X_b_i, y_i):\n return X_b_i.T.dot(X_b_i.dot(theta) - y_i) * 2\n\n # Stochastic gradient descent\n def sgd(X_b, y, initial_theta, n_iter, t0=5, t1=50):\n\n def learning_rate(t):\n return t0 / (t + t1)\n\n theta = initial_theta\n m = len(X_b)\n for curr_iter in range(n_iter):\n indexes = np.random.permutation(m)\n X_b_new = X_b[indexes]\n y_new = y[indexes]\n for i in range(m):\n gradient = dJ_sgd(theta, X_b_new[i], y_new[i])\n theta = theta - learning_rate(curr_iter * m + i) * gradient\n\n return theta\n\n X_b = np.hstack([np.ones([len(X_train), 1]), X_train])\n initial_theta = np.zeros(X_b.shape[1])\n self._theta = sgd(X_b, y_train, initial_theta, n_iter)\n self.interception_ = self._theta[0]\n self.coef_ = self._theta[1:]\n\n def predict(self,X_predict):\n assert self.interception_ is not None and self.coef_ is not None,\\\n \"must fit before predict\"\n assert X_predict.shape[1] == len(self.coef_),\\\n \"the feature number of X_predict must be equal to X_train\"\n X_b = np.hstack([np.ones((len(X_predict), 1)), X_predict])\n y_predict = X_b.dot(self._theta)\n return y_predict\n\n def score(self,X_test,y_test):\n y_predict = self.predict(X_test)\n return r2_score(y_test,y_predict)\n\n def __repr__(self):\n return \"LinearRegression()\"",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = []
operations = [migrations.CreateModel(name='Author', fields=[('id',
models.AutoField(auto_created=True, serialize=False, primary_key=
True, verbose_name='ID')), ('name', models.CharField(max_length=60)
), ('email', models.EmailField(max_length=100)), ('telephone',
models.CharField(max_length=12)), ('cellphone', models.CharField(
max_length=12)), ('img', models.ImageField(upload_to='')), ('role',
models.IntegerField(default=5, choices=[(0, 'Pastor titular'), (1,
'Pastor/a'), (2, 'Diacono/a'), (3, 'Editor/a')]))]), migrations.
CreateModel(name='Preach', fields=[('id', models.AutoField(
auto_created=True, serialize=False, primary_key=True, verbose_name=
'ID')), ('title', models.CharField(max_length=60)), ('summary',
models.CharField(blank=True, max_length=500)), ('date', models.
DateField()), ('published_date', models.DateField(default=datetime.
datetime(2017, 5, 7, 2, 3, 52, 71419))), ('url', models.URLField()),
('img', models.ImageField(verbose_name='Imagen', upload_to='images'
)), ('author', models.ForeignKey(to='preaches.Author'))]),
migrations.CreateModel(name='Social_media', fields=[('id', models.
AutoField(auto_created=True, serialize=False, primary_key=True,
verbose_name='ID')), ('name', models.IntegerField(default=0,
verbose_name='Nombre de la red social', choices=[(0, 'Facebook'), (
1, 'Instagram'), (2, 'Twitter')])), ('url', models.URLField())]),
migrations.CreateModel(name='Tags', fields=[('id', models.AutoField
(auto_created=True, serialize=False, primary_key=True, verbose_name
='ID')), ('name', models.CharField(verbose_name='Categoria',
max_length=80))]), migrations.AddField(model_name='preach', name=
'tags', field=models.ManyToManyField(to='preaches.Tags')),
migrations.AddField(model_name='author', name='social_media', field
=models.ManyToManyField(to='preaches.Social_media'))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = []
operations = [migrations.CreateModel(name='Author', fields=[('id',
models.AutoField(auto_created=True, serialize=False, primary_key=
True, verbose_name='ID')), ('name', models.CharField(max_length=60)
), ('email', models.EmailField(max_length=100)), ('telephone',
models.CharField(max_length=12)), ('cellphone', models.CharField(
max_length=12)), ('img', models.ImageField(upload_to='')), ('role',
models.IntegerField(default=5, choices=[(0, 'Pastor titular'), (1,
'Pastor/a'), (2, 'Diacono/a'), (3, 'Editor/a')]))]), migrations.
CreateModel(name='Preach', fields=[('id', models.AutoField(
auto_created=True, serialize=False, primary_key=True, verbose_name=
'ID')), ('title', models.CharField(max_length=60)), ('summary',
models.CharField(blank=True, max_length=500)), ('date', models.
DateField()), ('published_date', models.DateField(default=datetime.
datetime(2017, 5, 7, 2, 3, 52, 71419))), ('url', models.URLField()),
('img', models.ImageField(verbose_name='Imagen', upload_to='images'
)), ('author', models.ForeignKey(to='preaches.Author'))]),
migrations.CreateModel(name='Social_media', fields=[('id', models.
AutoField(auto_created=True, serialize=False, primary_key=True,
verbose_name='ID')), ('name', models.IntegerField(default=0,
verbose_name='Nombre de la red social', choices=[(0, 'Facebook'), (
1, 'Instagram'), (2, 'Twitter')])), ('url', models.URLField())]),
migrations.CreateModel(name='Tags', fields=[('id', models.AutoField
(auto_created=True, serialize=False, primary_key=True, verbose_name
='ID')), ('name', models.CharField(verbose_name='Categoria',
max_length=80))]), migrations.AddField(model_name='preach', name=
'tags', field=models.ManyToManyField(to='preaches.Tags')),
migrations.AddField(model_name='author', name='social_media', field
=models.ManyToManyField(to='preaches.Social_media'))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('name', models.CharField(max_length=60)),
('email', models.EmailField(max_length=100)),
('telephone', models.CharField(max_length=12)),
('cellphone', models.CharField(max_length=12)),
('img', models.ImageField(upload_to='')),
('role', models.IntegerField(default=5, choices=[(0, 'Pastor titular'), (1, 'Pastor/a'), (2, 'Diacono/a'), (3, 'Editor/a')])),
],
),
migrations.CreateModel(
name='Preach',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('title', models.CharField(max_length=60)),
('summary', models.CharField(blank=True, max_length=500)),
('date', models.DateField()),
('published_date', models.DateField(default=datetime.datetime(2017, 5, 7, 2, 3, 52, 71419))),
('url', models.URLField()),
('img', models.ImageField(verbose_name='Imagen', upload_to='images')),
('author', models.ForeignKey(to='preaches.Author')),
],
),
migrations.CreateModel(
name='Social_media',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('name', models.IntegerField(default=0, verbose_name='Nombre de la red social', choices=[(0, 'Facebook'), (1, 'Instagram'), (2, 'Twitter')])),
('url', models.URLField()),
],
),
migrations.CreateModel(
name='Tags',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('name', models.CharField(verbose_name='Categoria', max_length=80)),
],
),
migrations.AddField(
model_name='preach',
name='tags',
field=models.ManyToManyField(to='preaches.Tags'),
),
migrations.AddField(
model_name='author',
name='social_media',
field=models.ManyToManyField(to='preaches.Social_media'),
),
]
|
flexible
|
{
"blob_id": "4a118f9081a8b3baf0b074c8dc14eaeef4559c08",
"index": 6684,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = []\n operations = [migrations.CreateModel(name='Author', fields=[('id',\n models.AutoField(auto_created=True, serialize=False, primary_key=\n True, verbose_name='ID')), ('name', models.CharField(max_length=60)\n ), ('email', models.EmailField(max_length=100)), ('telephone',\n models.CharField(max_length=12)), ('cellphone', models.CharField(\n max_length=12)), ('img', models.ImageField(upload_to='')), ('role',\n models.IntegerField(default=5, choices=[(0, 'Pastor titular'), (1,\n 'Pastor/a'), (2, 'Diacono/a'), (3, 'Editor/a')]))]), migrations.\n CreateModel(name='Preach', fields=[('id', models.AutoField(\n auto_created=True, serialize=False, primary_key=True, verbose_name=\n 'ID')), ('title', models.CharField(max_length=60)), ('summary',\n models.CharField(blank=True, max_length=500)), ('date', models.\n DateField()), ('published_date', models.DateField(default=datetime.\n datetime(2017, 5, 7, 2, 3, 52, 71419))), ('url', models.URLField()),\n ('img', models.ImageField(verbose_name='Imagen', upload_to='images'\n )), ('author', models.ForeignKey(to='preaches.Author'))]),\n migrations.CreateModel(name='Social_media', fields=[('id', models.\n AutoField(auto_created=True, serialize=False, primary_key=True,\n verbose_name='ID')), ('name', models.IntegerField(default=0,\n verbose_name='Nombre de la red social', choices=[(0, 'Facebook'), (\n 1, 'Instagram'), (2, 'Twitter')])), ('url', models.URLField())]),\n migrations.CreateModel(name='Tags', fields=[('id', models.AutoField\n (auto_created=True, serialize=False, primary_key=True, verbose_name\n ='ID')), ('name', models.CharField(verbose_name='Categoria',\n max_length=80))]), migrations.AddField(model_name='preach', name=\n 'tags', field=models.ManyToManyField(to='preaches.Tags')),\n migrations.AddField(model_name='author', name='social_media', field\n =models.ManyToManyField(to='preaches.Social_media'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport datetime\n\n\nclass Migration(migrations.Migration):\n dependencies = []\n operations = [migrations.CreateModel(name='Author', fields=[('id',\n models.AutoField(auto_created=True, serialize=False, primary_key=\n True, verbose_name='ID')), ('name', models.CharField(max_length=60)\n ), ('email', models.EmailField(max_length=100)), ('telephone',\n models.CharField(max_length=12)), ('cellphone', models.CharField(\n max_length=12)), ('img', models.ImageField(upload_to='')), ('role',\n models.IntegerField(default=5, choices=[(0, 'Pastor titular'), (1,\n 'Pastor/a'), (2, 'Diacono/a'), (3, 'Editor/a')]))]), migrations.\n CreateModel(name='Preach', fields=[('id', models.AutoField(\n auto_created=True, serialize=False, primary_key=True, verbose_name=\n 'ID')), ('title', models.CharField(max_length=60)), ('summary',\n models.CharField(blank=True, max_length=500)), ('date', models.\n DateField()), ('published_date', models.DateField(default=datetime.\n datetime(2017, 5, 7, 2, 3, 52, 71419))), ('url', models.URLField()),\n ('img', models.ImageField(verbose_name='Imagen', upload_to='images'\n )), ('author', models.ForeignKey(to='preaches.Author'))]),\n migrations.CreateModel(name='Social_media', fields=[('id', models.\n AutoField(auto_created=True, serialize=False, primary_key=True,\n verbose_name='ID')), ('name', models.IntegerField(default=0,\n verbose_name='Nombre de la red social', choices=[(0, 'Facebook'), (\n 1, 'Instagram'), (2, 'Twitter')])), ('url', models.URLField())]),\n migrations.CreateModel(name='Tags', fields=[('id', models.AutoField\n (auto_created=True, serialize=False, primary_key=True, verbose_name\n ='ID')), ('name', models.CharField(verbose_name='Categoria',\n max_length=80))]), migrations.AddField(model_name='preach', name=\n 'tags', field=models.ManyToManyField(to='preaches.Tags')),\n migrations.AddField(model_name='author', name='social_media', field\n =models.ManyToManyField(to='preaches.Social_media'))]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport datetime\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Author',\n fields=[\n ('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),\n ('name', models.CharField(max_length=60)),\n ('email', models.EmailField(max_length=100)),\n ('telephone', models.CharField(max_length=12)),\n ('cellphone', models.CharField(max_length=12)),\n ('img', models.ImageField(upload_to='')),\n ('role', models.IntegerField(default=5, choices=[(0, 'Pastor titular'), (1, 'Pastor/a'), (2, 'Diacono/a'), (3, 'Editor/a')])),\n ],\n ),\n migrations.CreateModel(\n name='Preach',\n fields=[\n ('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),\n ('title', models.CharField(max_length=60)),\n ('summary', models.CharField(blank=True, max_length=500)),\n ('date', models.DateField()),\n ('published_date', models.DateField(default=datetime.datetime(2017, 5, 7, 2, 3, 52, 71419))),\n ('url', models.URLField()),\n ('img', models.ImageField(verbose_name='Imagen', upload_to='images')),\n ('author', models.ForeignKey(to='preaches.Author')),\n ],\n ),\n migrations.CreateModel(\n name='Social_media',\n fields=[\n ('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),\n ('name', models.IntegerField(default=0, verbose_name='Nombre de la red social', choices=[(0, 'Facebook'), (1, 'Instagram'), (2, 'Twitter')])),\n ('url', models.URLField()),\n ],\n ),\n migrations.CreateModel(\n name='Tags',\n fields=[\n ('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),\n ('name', models.CharField(verbose_name='Categoria', max_length=80)),\n ],\n ),\n migrations.AddField(\n model_name='preach',\n name='tags',\n field=models.ManyToManyField(to='preaches.Tags'),\n ),\n migrations.AddField(\n model_name='author',\n name='social_media',\n field=models.ManyToManyField(to='preaches.Social_media'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(response.text)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
response = requests.get(
'https://any-api.com:8443/https://rbaskets.in/api/version')
print(response.text)
<|reserved_special_token_1|>
import requests
response = requests.get(
'https://any-api.com:8443/https://rbaskets.in/api/version')
print(response.text)
|
flexible
|
{
"blob_id": "ab36b3d418be67080e2efaba15edc1354386e191",
"index": 6888,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(response.text)\n",
"step-3": "<mask token>\nresponse = requests.get(\n 'https://any-api.com:8443/https://rbaskets.in/api/version')\nprint(response.text)\n",
"step-4": "import requests\nresponse = requests.get(\n 'https://any-api.com:8443/https://rbaskets.in/api/version')\nprint(response.text)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def get_diabetes_data(target='progression'):
"""Get the SKLearn Diabetes regression dataset, formatted as a DataFrame
Parameters
----------
target: String, default='progression'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The diabetes dataset, with friendly column names"""
data = load_diabetes()
df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in
data.feature_names])
df[target] = data.target
return df
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_diabetes_data(target='progression'):
"""Get the SKLearn Diabetes regression dataset, formatted as a DataFrame
Parameters
----------
target: String, default='progression'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The diabetes dataset, with friendly column names"""
data = load_diabetes()
df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in
data.feature_names])
df[target] = data.target
return df
def get_toy_classification_data(target='target', n_samples=300, n_classes=2,
shuffle=True, random_state=32, **kwargs):
"""Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`"""
x, y = make_classification(n_samples=n_samples, n_classes=n_classes,
shuffle=shuffle, random_state=random_state, **kwargs)
train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))
train_df[target] = y
return train_df
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_breast_cancer_data(target='diagnosis'):
"""Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame
Parameters
----------
target: String, default='diagnosis'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The breast cancer dataset, with friendly column names"""
data = load_breast_cancer()
df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in
data.feature_names])
df[target] = data.target
return df
def get_diabetes_data(target='progression'):
"""Get the SKLearn Diabetes regression dataset, formatted as a DataFrame
Parameters
----------
target: String, default='progression'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The diabetes dataset, with friendly column names"""
data = load_diabetes()
df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in
data.feature_names])
df[target] = data.target
return df
def get_toy_classification_data(target='target', n_samples=300, n_classes=2,
shuffle=True, random_state=32, **kwargs):
"""Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`"""
x, y = make_classification(n_samples=n_samples, n_classes=n_classes,
shuffle=shuffle, random_state=random_state, **kwargs)
train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))
train_df[target] = y
return train_df
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pandas as pd
from sklearn.datasets import load_breast_cancer, make_classification, load_diabetes
def get_breast_cancer_data(target='diagnosis'):
"""Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame
Parameters
----------
target: String, default='diagnosis'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The breast cancer dataset, with friendly column names"""
data = load_breast_cancer()
df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in
data.feature_names])
df[target] = data.target
return df
def get_diabetes_data(target='progression'):
"""Get the SKLearn Diabetes regression dataset, formatted as a DataFrame
Parameters
----------
target: String, default='progression'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The diabetes dataset, with friendly column names"""
data = load_diabetes()
df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in
data.feature_names])
df[target] = data.target
return df
def get_toy_classification_data(target='target', n_samples=300, n_classes=2,
shuffle=True, random_state=32, **kwargs):
"""Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`"""
x, y = make_classification(n_samples=n_samples, n_classes=n_classes,
shuffle=shuffle, random_state=random_state, **kwargs)
train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))
train_df[target] = y
return train_df
<|reserved_special_token_1|>
"""This module defines simple utilities for making toy datasets to be used in testing/examples"""
##################################################
# Import Miscellaneous Assets
##################################################
import pandas as pd
###############################################
# Import Learning Assets
###############################################
from sklearn.datasets import load_breast_cancer, make_classification, load_diabetes
##################################################
# Dataset Utilities
##################################################
def get_breast_cancer_data(target="diagnosis"):
"""Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame
Parameters
----------
target: String, default='diagnosis'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The breast cancer dataset, with friendly column names"""
data = load_breast_cancer()
df = pd.DataFrame(data=data.data, columns=[_.replace(" ", "_") for _ in data.feature_names])
df[target] = data.target
return df
def get_diabetes_data(target="progression"):
"""Get the SKLearn Diabetes regression dataset, formatted as a DataFrame
Parameters
----------
target: String, default='progression'
What to name the column in `df` that contains the target output values
Returns
-------
df: `pandas.DataFrame`
The diabetes dataset, with friendly column names"""
data = load_diabetes()
df = pd.DataFrame(data=data.data, columns=[_.replace(" ", "_") for _ in data.feature_names])
df[target] = data.target
return df
def get_toy_classification_data(
target="target", n_samples=300, n_classes=2, shuffle=True, random_state=32, **kwargs
):
"""Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`"""
x, y = make_classification(
n_samples=n_samples,
n_classes=n_classes,
shuffle=shuffle,
random_state=random_state,
**kwargs
)
train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))
train_df[target] = y
return train_df
|
flexible
|
{
"blob_id": "285ca945696b32160175f15c4e89b3938f41ebf4",
"index": 2172,
"step-1": "<mask token>\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(target='target', n_samples=300, n_classes=2,\n shuffle=True, random_state=32, **kwargs):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(n_samples=n_samples, n_classes=n_classes,\n shuffle=shuffle, random_state=random_state, **kwargs)\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n",
"step-3": "<mask token>\n\n\ndef get_breast_cancer_data(target='diagnosis'):\n \"\"\"Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='diagnosis'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The breast cancer dataset, with friendly column names\"\"\"\n data = load_breast_cancer()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(target='target', n_samples=300, n_classes=2,\n shuffle=True, random_state=32, **kwargs):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(n_samples=n_samples, n_classes=n_classes,\n shuffle=shuffle, random_state=random_state, **kwargs)\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n",
"step-4": "<mask token>\nimport pandas as pd\nfrom sklearn.datasets import load_breast_cancer, make_classification, load_diabetes\n\n\ndef get_breast_cancer_data(target='diagnosis'):\n \"\"\"Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='diagnosis'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The breast cancer dataset, with friendly column names\"\"\"\n data = load_breast_cancer()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_diabetes_data(target='progression'):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(' ', '_') for _ in\n data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(target='target', n_samples=300, n_classes=2,\n shuffle=True, random_state=32, **kwargs):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(n_samples=n_samples, n_classes=n_classes,\n shuffle=shuffle, random_state=random_state, **kwargs)\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n",
"step-5": "\"\"\"This module defines simple utilities for making toy datasets to be used in testing/examples\"\"\"\n##################################################\n# Import Miscellaneous Assets\n##################################################\nimport pandas as pd\n\n###############################################\n# Import Learning Assets\n###############################################\nfrom sklearn.datasets import load_breast_cancer, make_classification, load_diabetes\n\n\n##################################################\n# Dataset Utilities\n##################################################\ndef get_breast_cancer_data(target=\"diagnosis\"):\n \"\"\"Get the Wisconsin Breast Cancer classification dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='diagnosis'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The breast cancer dataset, with friendly column names\"\"\"\n data = load_breast_cancer()\n df = pd.DataFrame(data=data.data, columns=[_.replace(\" \", \"_\") for _ in data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_diabetes_data(target=\"progression\"):\n \"\"\"Get the SKLearn Diabetes regression dataset, formatted as a DataFrame\n\n Parameters\n ----------\n target: String, default='progression'\n What to name the column in `df` that contains the target output values\n\n Returns\n -------\n df: `pandas.DataFrame`\n The diabetes dataset, with friendly column names\"\"\"\n data = load_diabetes()\n df = pd.DataFrame(data=data.data, columns=[_.replace(\" \", \"_\") for _ in data.feature_names])\n df[target] = data.target\n return df\n\n\ndef get_toy_classification_data(\n target=\"target\", n_samples=300, n_classes=2, shuffle=True, random_state=32, **kwargs\n):\n \"\"\"Wrapper around `sklearn.datasets.make_classification` to produce a `pandas.DataFrame`\"\"\"\n x, y = make_classification(\n n_samples=n_samples,\n n_classes=n_classes,\n shuffle=shuffle,\n random_state=random_state,\n **kwargs\n )\n train_df = pd.DataFrame(data=x, columns=range(x.shape[1]))\n train_df[target] = y\n return train_df\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from pirates.teleport.AreaTeleportActor import AreaTeleportActor
class DoorTeleportActor(AreaTeleportActor):
pass
|
normal
|
{
"blob_id": "b679444fde7cd8eb819443922f37ee54c0f29de4",
"index": 424,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass DoorTeleportActor(AreaTeleportActor):\n pass\n",
"step-3": "from pirates.teleport.AreaTeleportActor import AreaTeleportActor\n\n\nclass DoorTeleportActor(AreaTeleportActor):\n pass\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class SessionRun:
def __init__(self, sessionId, cypher, params):
self.sessionId = sessionId
self.cypher = cypher
self.params = params
class SessionReadTransaction:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryablePositive:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryableNegative:
def __init__(self, sessionId, errorId=''):
self.sessionId = sessionId
self.errorId = errorId
class TransactionRun:
def __init__(self, txId, cypher, params):
self.txId = txId
self.cypher = cypher
self.params = params
<|reserved_special_token_0|>
class ResultNext:
def __init__(self, resultId):
self.resultId = resultId
class AuthorizationToken:
def __init__(self, scheme='none', principal='', credentials='', realm=
'', ticket=''):
self.scheme = scheme
self.principal = principal
self.credentials = credentials
self.realm = realm
self.ticket = ticket
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SessionClose:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class SessionRun:
def __init__(self, sessionId, cypher, params):
self.sessionId = sessionId
self.cypher = cypher
self.params = params
class SessionReadTransaction:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryablePositive:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryableNegative:
def __init__(self, sessionId, errorId=''):
self.sessionId = sessionId
self.errorId = errorId
class TransactionRun:
def __init__(self, txId, cypher, params):
self.txId = txId
self.cypher = cypher
self.params = params
<|reserved_special_token_0|>
class ResultNext:
def __init__(self, resultId):
self.resultId = resultId
class AuthorizationToken:
def __init__(self, scheme='none', principal='', credentials='', realm=
'', ticket=''):
self.scheme = scheme
self.principal = principal
self.credentials = credentials
self.realm = realm
self.ticket = ticket
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NewSession:
<|reserved_special_token_0|>
class SessionClose:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class SessionRun:
def __init__(self, sessionId, cypher, params):
self.sessionId = sessionId
self.cypher = cypher
self.params = params
class SessionReadTransaction:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryablePositive:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryableNegative:
def __init__(self, sessionId, errorId=''):
self.sessionId = sessionId
self.errorId = errorId
class TransactionRun:
def __init__(self, txId, cypher, params):
self.txId = txId
self.cypher = cypher
self.params = params
<|reserved_special_token_0|>
class ResultNext:
def __init__(self, resultId):
self.resultId = resultId
class AuthorizationToken:
def __init__(self, scheme='none', principal='', credentials='', realm=
'', ticket=''):
self.scheme = scheme
self.principal = principal
self.credentials = credentials
self.realm = realm
self.ticket = ticket
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DriverClose:
<|reserved_special_token_0|>
class NewSession:
def __init__(self, driverId, accessMode, bookmarks):
self.driverId = driverId
self.accessMode = accessMode
self.bookmarks = bookmarks
class SessionClose:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class SessionRun:
def __init__(self, sessionId, cypher, params):
self.sessionId = sessionId
self.cypher = cypher
self.params = params
class SessionReadTransaction:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryablePositive:
def __init__(self, sessionId):
self.sessionId = sessionId
<|reserved_special_token_0|>
class RetryableNegative:
def __init__(self, sessionId, errorId=''):
self.sessionId = sessionId
self.errorId = errorId
class TransactionRun:
def __init__(self, txId, cypher, params):
self.txId = txId
self.cypher = cypher
self.params = params
<|reserved_special_token_0|>
class ResultNext:
def __init__(self, resultId):
self.resultId = resultId
class AuthorizationToken:
def __init__(self, scheme='none', principal='', credentials='', realm=
'', ticket=''):
self.scheme = scheme
self.principal = principal
self.credentials = credentials
self.realm = realm
self.ticket = ticket
<|reserved_special_token_1|>
"""
All requests will be sent to backend as:
{
name: <class name>,
data: {
<all instance variables>
}
}
"""
class NewDriver:
def __init__(self, uri, authToken):
self.uri = uri
self.authorizationToken = authToken
class DriverClose:
def __init__(self, driverId):
self.driverId = driverId
class NewSession:
def __init__(self, driverId, accessMode, bookmarks):
self.driverId = driverId
self.accessMode = accessMode
self.bookmarks = bookmarks
class SessionClose:
def __init__(self, sessionId):
self.sessionId = sessionId
"""
Response should be Result model or raised Error model
"""
class SessionRun:
def __init__(self, sessionId, cypher, params):
self.sessionId = sessionId
self.cypher = cypher
self.params = params
class SessionReadTransaction:
def __init__(self, sessionId):
self.sessionId = sessionId
"""
Indicates a positive intent from the client application to commit the retryable transaction
"""
class RetryablePositive:
def __init__(self, sessionId):
self.sessionId = sessionId
"""
Indicates a negative intent from the client application to commit the retryable transaction
"""
class RetryableNegative:
def __init__(self, sessionId, errorId=""):
self.sessionId = sessionId
self.errorId = errorId
class TransactionRun:
def __init__(self, txId, cypher, params):
self.txId = txId
self.cypher = cypher
self.params = params
"""
Response should be Record model, NullRecord to indicate last record or raised Error model if record
couldn't be retrieved.
"""
class ResultNext:
def __init__(self, resultId):
self.resultId = resultId
class AuthorizationToken:
def __init__(self, scheme="none", principal="", credentials="", realm="", ticket=""):
self.scheme=scheme
self.principal=principal
self.credentials=credentials
self.realm=realm
self.ticket=ticket
|
flexible
|
{
"blob_id": "dfcb095b26a21ba0c8ccc2a2c664bcfab29b8351",
"index": 8214,
"step-1": "<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-2": "<mask token>\n\n\nclass SessionClose:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-3": "<mask token>\n\n\nclass NewSession:\n <mask token>\n\n\nclass SessionClose:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-4": "<mask token>\n\n\nclass DriverClose:\n <mask token>\n\n\nclass NewSession:\n\n def __init__(self, driverId, accessMode, bookmarks):\n self.driverId = driverId\n self.accessMode = accessMode\n self.bookmarks = bookmarks\n\n\nclass SessionClose:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass SessionRun:\n\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryablePositive:\n\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n<mask token>\n\n\nclass RetryableNegative:\n\n def __init__(self, sessionId, errorId=''):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n<mask token>\n\n\nclass ResultNext:\n\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n\n def __init__(self, scheme='none', principal='', credentials='', realm=\n '', ticket=''):\n self.scheme = scheme\n self.principal = principal\n self.credentials = credentials\n self.realm = realm\n self.ticket = ticket\n",
"step-5": "\n\"\"\"\nAll requests will be sent to backend as:\n {\n name: <class name>,\n data: {\n <all instance variables>\n }\n }\n\"\"\"\n\nclass NewDriver:\n def __init__(self, uri, authToken):\n self.uri = uri\n self.authorizationToken = authToken\n\n\nclass DriverClose:\n def __init__(self, driverId):\n self.driverId = driverId\n\n\nclass NewSession:\n def __init__(self, driverId, accessMode, bookmarks):\n self.driverId = driverId\n self.accessMode = accessMode\n self.bookmarks = bookmarks\n\n\nclass SessionClose:\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n\"\"\"\nResponse should be Result model or raised Error model\n\"\"\"\nclass SessionRun:\n def __init__(self, sessionId, cypher, params):\n self.sessionId = sessionId\n self.cypher = cypher\n self.params = params\n\n\nclass SessionReadTransaction:\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n\"\"\"\nIndicates a positive intent from the client application to commit the retryable transaction\n\"\"\"\nclass RetryablePositive:\n def __init__(self, sessionId):\n self.sessionId = sessionId\n\n\n\"\"\"\nIndicates a negative intent from the client application to commit the retryable transaction\n\"\"\"\nclass RetryableNegative:\n def __init__(self, sessionId, errorId=\"\"):\n self.sessionId = sessionId\n self.errorId = errorId\n\n\nclass TransactionRun:\n def __init__(self, txId, cypher, params):\n self.txId = txId\n self.cypher = cypher\n self.params = params\n\n\n\"\"\"\nResponse should be Record model, NullRecord to indicate last record or raised Error model if record\ncouldn't be retrieved.\n\"\"\"\nclass ResultNext:\n def __init__(self, resultId):\n self.resultId = resultId\n\n\nclass AuthorizationToken:\n def __init__(self, scheme=\"none\", principal=\"\", credentials=\"\", realm=\"\", ticket=\"\"):\n self.scheme=scheme\n self.principal=principal\n self.credentials=credentials\n self.realm=realm\n self.ticket=ticket\n\n",
"step-ids": [
14,
16,
17,
19,
23
]
}
|
[
14,
16,
17,
19,
23
] |
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 19 05:29:19 2020
@author: Gaurav
"""
from tensorflow.keras.models import load_model
import cv2
import os
from tensorflow.keras.preprocessing.image import img_to_array
import numpy as np
model=load_model('E:/AI Application Implementation/trained_model/Classification/Cifar-10/cifar-2.h5')
class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck']
# img = cv2.imread("00004_test.png")
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# img = cv2.resize(img, (32, 32))
# img = img_to_array(img)
# img = np.expand_dims(img, axis=0)
# k = model.predict(img)[0]
# k=np.argmax(k)
# print(class_names[k])
arr = os.listdir()
result=[]
for i in arr:
img = cv2.imread(i)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, (32, 32))
img = img_to_array(img)
img = np.expand_dims(img, axis=0)
k = model.predict(img)[0]
k=np.argmax(k)
result.append(class_names[k])
print(i)
dict={"filename":arr,'label':result}
import pandas as pd
df=pd.DataFrame(dict)
df.to_csv(r"E:\AI Application Implementation\trained_model\Classification\Cifar-10\sub.csv",index=False)
# df=pd.read_csv("E:/AI Application Implementation/trained_model/Classification/Cifar-10/sub.csv")
# df.to_csv(r"E:\AI Application Implementation\trained_model\Classification\Cifar-10\sub.csv",index=False)
|
normal
|
{
"blob_id": "c3e2bd635a7ff558ed56e7fb35e8b10e1c660c88",
"index": 6804,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in arr:\n img = cv2.imread(i)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img = cv2.resize(img, (32, 32))\n img = img_to_array(img)\n img = np.expand_dims(img, axis=0)\n k = model.predict(img)[0]\n k = np.argmax(k)\n result.append(class_names[k])\n print(i)\n<mask token>\ndf.to_csv(\n 'E:\\\\AI Application Implementation\\\\trained_model\\\\Classification\\\\Cifar-10\\\\sub.csv'\n , index=False)\n",
"step-3": "<mask token>\nmodel = load_model(\n 'E:/AI Application Implementation/trained_model/Classification/Cifar-10/cifar-2.h5'\n )\nclass_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog',\n 'frog', 'horse', 'ship', 'truck']\narr = os.listdir()\nresult = []\nfor i in arr:\n img = cv2.imread(i)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img = cv2.resize(img, (32, 32))\n img = img_to_array(img)\n img = np.expand_dims(img, axis=0)\n k = model.predict(img)[0]\n k = np.argmax(k)\n result.append(class_names[k])\n print(i)\ndict = {'filename': arr, 'label': result}\n<mask token>\ndf = pd.DataFrame(dict)\ndf.to_csv(\n 'E:\\\\AI Application Implementation\\\\trained_model\\\\Classification\\\\Cifar-10\\\\sub.csv'\n , index=False)\n",
"step-4": "<mask token>\nfrom tensorflow.keras.models import load_model\nimport cv2\nimport os\nfrom tensorflow.keras.preprocessing.image import img_to_array\nimport numpy as np\nmodel = load_model(\n 'E:/AI Application Implementation/trained_model/Classification/Cifar-10/cifar-2.h5'\n )\nclass_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog',\n 'frog', 'horse', 'ship', 'truck']\narr = os.listdir()\nresult = []\nfor i in arr:\n img = cv2.imread(i)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img = cv2.resize(img, (32, 32))\n img = img_to_array(img)\n img = np.expand_dims(img, axis=0)\n k = model.predict(img)[0]\n k = np.argmax(k)\n result.append(class_names[k])\n print(i)\ndict = {'filename': arr, 'label': result}\nimport pandas as pd\ndf = pd.DataFrame(dict)\ndf.to_csv(\n 'E:\\\\AI Application Implementation\\\\trained_model\\\\Classification\\\\Cifar-10\\\\sub.csv'\n , index=False)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Aug 19 05:29:19 2020\r\n\r\n@author: Gaurav\r\n\"\"\"\r\nfrom tensorflow.keras.models import load_model\r\nimport cv2\r\nimport os\r\nfrom tensorflow.keras.preprocessing.image import img_to_array\r\nimport numpy as np\r\n\r\nmodel=load_model('E:/AI Application Implementation/trained_model/Classification/Cifar-10/cifar-2.h5')\r\n\r\nclass_names = ['airplane', 'automobile', 'bird', 'cat', 'deer',\r\n 'dog', 'frog', 'horse', 'ship', 'truck']\r\n\r\n# img = cv2.imread(\"00004_test.png\")\r\n# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\r\n# img = cv2.resize(img, (32, 32))\r\n# img = img_to_array(img)\r\n# img = np.expand_dims(img, axis=0)\r\n# k = model.predict(img)[0]\r\n# k=np.argmax(k)\r\n# print(class_names[k])\r\n\r\narr = os.listdir()\r\nresult=[]\r\nfor i in arr:\r\n img = cv2.imread(i)\r\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\r\n img = cv2.resize(img, (32, 32))\r\n img = img_to_array(img)\r\n img = np.expand_dims(img, axis=0)\r\n k = model.predict(img)[0]\r\n k=np.argmax(k)\r\n result.append(class_names[k])\r\n print(i)\r\n \r\n \r\ndict={\"filename\":arr,'label':result}\r\nimport pandas as pd\r\ndf=pd.DataFrame(dict)\r\ndf.to_csv(r\"E:\\AI Application Implementation\\trained_model\\Classification\\Cifar-10\\sub.csv\",index=False)\r\n\r\n# df=pd.read_csv(\"E:/AI Application Implementation/trained_model/Classification/Cifar-10/sub.csv\")\r\n# df.to_csv(r\"E:\\AI Application Implementation\\trained_model\\Classification\\Cifar-10\\sub.csv\",index=False)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def sketch(img, threshold=15):
"""
素描画生成
param img: Image实例
param threshold: 介于0到100
:return:
"""
if threshold < 0:
threshold = 0
if threshold > 100:
threshold = 100
if len(img.shape) == 3:
img = rgb2grey(img)
m, n = img.shape
diff = np.abs(img[:m - 1, :n - 1] - img[1:, 1:])
img = np.zeros((m - 1, n - 1))
img[diff < threshold / 255] = 1
return img
<|reserved_special_token_1|>
from skimage.color import rgb2grey
import numpy as np
def sketch(img, threshold=15):
"""
素描画生成
param img: Image实例
param threshold: 介于0到100
:return:
"""
if threshold < 0:
threshold = 0
if threshold > 100:
threshold = 100
if len(img.shape) == 3:
img = rgb2grey(img)
m, n = img.shape
diff = np.abs(img[:m - 1, :n - 1] - img[1:, 1:])
img = np.zeros((m - 1, n - 1))
img[diff < threshold / 255] = 1
return img
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : 河北雪域网络科技有限公司 A.Star
# @contact: [email protected]
# @site:
# @file: img_to_sketch.py
# @time: 2018/8/6 1:15
# @Software: PyCharm
from skimage.color import rgb2grey
import numpy as np
def sketch(img, threshold=15):
"""
素描画生成
param img: Image实例
param threshold: 介于0到100
:return:
"""
if threshold < 0:
threshold = 0
if threshold > 100:
threshold = 100
if len(img.shape) == 3:
img = rgb2grey(img)
m, n = img.shape
diff = np.abs(img[:m - 1, :n - 1] - img[1:, 1:])
img = np.zeros((m - 1, n - 1))
img[diff < threshold/255] = 1
return img
|
flexible
|
{
"blob_id": "065354d2a8fd8a75e16bf85f624b12641377029a",
"index": 8568,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef sketch(img, threshold=15):\n \"\"\"\n 素描画生成\n param img: Image实例\n param threshold: 介于0到100\n :return:\n \"\"\"\n if threshold < 0:\n threshold = 0\n if threshold > 100:\n threshold = 100\n if len(img.shape) == 3:\n img = rgb2grey(img)\n m, n = img.shape\n diff = np.abs(img[:m - 1, :n - 1] - img[1:, 1:])\n img = np.zeros((m - 1, n - 1))\n img[diff < threshold / 255] = 1\n return img\n",
"step-3": "from skimage.color import rgb2grey\nimport numpy as np\n\n\ndef sketch(img, threshold=15):\n \"\"\"\n 素描画生成\n param img: Image实例\n param threshold: 介于0到100\n :return:\n \"\"\"\n if threshold < 0:\n threshold = 0\n if threshold > 100:\n threshold = 100\n if len(img.shape) == 3:\n img = rgb2grey(img)\n m, n = img.shape\n diff = np.abs(img[:m - 1, :n - 1] - img[1:, 1:])\n img = np.zeros((m - 1, n - 1))\n img[diff < threshold / 255] = 1\n return img\n",
"step-4": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# @Author : 河北雪域网络科技有限公司 A.Star\n# @contact: [email protected]\n# @site: \n# @file: img_to_sketch.py\n# @time: 2018/8/6 1:15\n# @Software: PyCharm\n\nfrom skimage.color import rgb2grey\nimport numpy as np\n\n\ndef sketch(img, threshold=15):\n \"\"\"\n 素描画生成\n param img: Image实例\n param threshold: 介于0到100\n :return:\n \"\"\"\n if threshold < 0:\n threshold = 0\n if threshold > 100:\n threshold = 100\n if len(img.shape) == 3:\n img = rgb2grey(img)\n m, n = img.shape\n diff = np.abs(img[:m - 1, :n - 1] - img[1:, 1:])\n img = np.zeros((m - 1, n - 1))\n img[diff < threshold/255] = 1\n return img\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import sys
import os
arcpy_path = [r'D:\software\ArcGIS\python 27\ArcGIS10.2\Lib\site-packages',
r'D:\software\ArcGIS\Desktop 10.2\Desktop10.2\arcpy',
r'D:\software\ArcGIS\Desktop 10.2\Desktop10.2\bin',
r'D:\software\ArcGIS\Desktop 10.2\Desktop10.2\ArcToolbox\Scripts']
sys.path.extend(arcpy_path)
import arcpy
arcpy.gp.overweiteOutput = 1
def writePrj(shpPath, test):
prj = open(shpPath.split('.')[0] + '.prj', 'w')
prj.write(test)
prj.close()
def CreateCGCS2000prj(shpPath):
body = 'GEOGCS["CGCS_2000",DATUM["D_2000",SPHEROID["S_2000",6378137.0,298.2572221010041]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]'
writePrj(shpPath, body)
def CreateWGS84(shpPath):
body = 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]'
writePrj(shpPath, body)
def CreateBeijing54(shpPath):
body = 'GEOGCS["GCS_Beijing_1954",DATUM["D_Beijing_1954",SPHEROID["Krasovsky_1940",6378245.0,298.3]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]'
writePrj(shpPath, body)
def CreateXian54(shpPath):
body = 'GEOGCS["GCS_Xian_1980",DATUM["D_Xian_1980",SPHEROID["Xian_1980",6378140.0,298.257]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]'
writePrj(shpPath, body)
def CreatePoint(shpPath, pointList):
point = arcpy.Point()
pointGeoms = []
for pt in pointList:
point.X = pt[0]
point.Y = pt[1]
pointGeoms.append(arcpy.PointGeometry(point))
arcpy.CopyFeatures_management(pointGeoms, shpPath)
ptList =[[20.000,43.000],[25.500, 45.085],[26.574, 46.025], [28.131, 48.124]]
shpPath = r'D:\geodata\test\point.shp'
CreatePoint(shpPath, ptList)
CreateCGCS2000prj(shpPath)
|
normal
|
{
"blob_id": "eab2cdd92d3be5760f13e747b05ca902eaf9aca8",
"index": 8287,
"step-1": "<mask token>\n\n\ndef CreateCGCS2000prj(shpPath):\n body = (\n 'GEOGCS[\"CGCS_2000\",DATUM[\"D_2000\",SPHEROID[\"S_2000\",6378137.0,298.2572221010041]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef writePrj(shpPath, test):\n prj = open(shpPath.split('.')[0] + '.prj', 'w')\n prj.write(test)\n prj.close()\n\n\ndef CreateCGCS2000prj(shpPath):\n body = (\n 'GEOGCS[\"CGCS_2000\",DATUM[\"D_2000\",SPHEROID[\"S_2000\",6378137.0,298.2572221010041]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreateWGS84(shpPath):\n body = (\n 'GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137.0,298.257223563]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef writePrj(shpPath, test):\n prj = open(shpPath.split('.')[0] + '.prj', 'w')\n prj.write(test)\n prj.close()\n\n\ndef CreateCGCS2000prj(shpPath):\n body = (\n 'GEOGCS[\"CGCS_2000\",DATUM[\"D_2000\",SPHEROID[\"S_2000\",6378137.0,298.2572221010041]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreateWGS84(shpPath):\n body = (\n 'GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137.0,298.257223563]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreateBeijing54(shpPath):\n body = (\n 'GEOGCS[\"GCS_Beijing_1954\",DATUM[\"D_Beijing_1954\",SPHEROID[\"Krasovsky_1940\",6378245.0,298.3]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreateXian54(shpPath):\n body = (\n 'GEOGCS[\"GCS_Xian_1980\",DATUM[\"D_Xian_1980\",SPHEROID[\"Xian_1980\",6378140.0,298.257]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreatePoint(shpPath, pointList):\n point = arcpy.Point()\n pointGeoms = []\n for pt in pointList:\n point.X = pt[0]\n point.Y = pt[1]\n pointGeoms.append(arcpy.PointGeometry(point))\n arcpy.CopyFeatures_management(pointGeoms, shpPath)\n\n\n<mask token>\n",
"step-4": "import sys\nimport os\narcpy_path = ['D:\\\\software\\\\ArcGIS\\\\python 27\\\\ArcGIS10.2\\\\Lib\\\\site-packages'\n , 'D:\\\\software\\\\ArcGIS\\\\Desktop 10.2\\\\Desktop10.2\\\\arcpy',\n 'D:\\\\software\\\\ArcGIS\\\\Desktop 10.2\\\\Desktop10.2\\\\bin',\n 'D:\\\\software\\\\ArcGIS\\\\Desktop 10.2\\\\Desktop10.2\\\\ArcToolbox\\\\Scripts']\nsys.path.extend(arcpy_path)\nimport arcpy\narcpy.gp.overweiteOutput = 1\n\n\ndef writePrj(shpPath, test):\n prj = open(shpPath.split('.')[0] + '.prj', 'w')\n prj.write(test)\n prj.close()\n\n\ndef CreateCGCS2000prj(shpPath):\n body = (\n 'GEOGCS[\"CGCS_2000\",DATUM[\"D_2000\",SPHEROID[\"S_2000\",6378137.0,298.2572221010041]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreateWGS84(shpPath):\n body = (\n 'GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137.0,298.257223563]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreateBeijing54(shpPath):\n body = (\n 'GEOGCS[\"GCS_Beijing_1954\",DATUM[\"D_Beijing_1954\",SPHEROID[\"Krasovsky_1940\",6378245.0,298.3]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreateXian54(shpPath):\n body = (\n 'GEOGCS[\"GCS_Xian_1980\",DATUM[\"D_Xian_1980\",SPHEROID[\"Xian_1980\",6378140.0,298.257]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n )\n writePrj(shpPath, body)\n\n\ndef CreatePoint(shpPath, pointList):\n point = arcpy.Point()\n pointGeoms = []\n for pt in pointList:\n point.X = pt[0]\n point.Y = pt[1]\n pointGeoms.append(arcpy.PointGeometry(point))\n arcpy.CopyFeatures_management(pointGeoms, shpPath)\n\n\nptList = [[20.0, 43.0], [25.5, 45.085], [26.574, 46.025], [28.131, 48.124]]\nshpPath = 'D:\\\\geodata\\\\test\\\\point.shp'\nCreatePoint(shpPath, ptList)\nCreateCGCS2000prj(shpPath)\n",
"step-5": "import sys\nimport os\n\narcpy_path = [r'D:\\software\\ArcGIS\\python 27\\ArcGIS10.2\\Lib\\site-packages',\n r'D:\\software\\ArcGIS\\Desktop 10.2\\Desktop10.2\\arcpy',\n r'D:\\software\\ArcGIS\\Desktop 10.2\\Desktop10.2\\bin',\n r'D:\\software\\ArcGIS\\Desktop 10.2\\Desktop10.2\\ArcToolbox\\Scripts']\n\nsys.path.extend(arcpy_path)\n\nimport arcpy\narcpy.gp.overweiteOutput = 1\n\ndef writePrj(shpPath, test):\n prj = open(shpPath.split('.')[0] + '.prj', 'w')\n prj.write(test)\n prj.close()\n\ndef CreateCGCS2000prj(shpPath):\n body = 'GEOGCS[\"CGCS_2000\",DATUM[\"D_2000\",SPHEROID[\"S_2000\",6378137.0,298.2572221010041]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n writePrj(shpPath, body)\ndef CreateWGS84(shpPath):\n body = 'GEOGCS[\"GCS_WGS_1984\",DATUM[\"D_WGS_1984\",SPHEROID[\"WGS_1984\",6378137.0,298.257223563]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n writePrj(shpPath, body)\ndef CreateBeijing54(shpPath):\n body = 'GEOGCS[\"GCS_Beijing_1954\",DATUM[\"D_Beijing_1954\",SPHEROID[\"Krasovsky_1940\",6378245.0,298.3]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n writePrj(shpPath, body)\ndef CreateXian54(shpPath):\n body = 'GEOGCS[\"GCS_Xian_1980\",DATUM[\"D_Xian_1980\",SPHEROID[\"Xian_1980\",6378140.0,298.257]],PRIMEM[\"Greenwich\",0.0],UNIT[\"Degree\",0.0174532925199433]]'\n writePrj(shpPath, body)\n \n \ndef CreatePoint(shpPath, pointList):\n point = arcpy.Point()\n pointGeoms = []\n for pt in pointList:\n point.X = pt[0]\n point.Y = pt[1]\n pointGeoms.append(arcpy.PointGeometry(point))\n arcpy.CopyFeatures_management(pointGeoms, shpPath)\n\nptList =[[20.000,43.000],[25.500, 45.085],[26.574, 46.025], [28.131, 48.124]]\nshpPath = r'D:\\geodata\\test\\point.shp'\nCreatePoint(shpPath, ptList)\nCreateCGCS2000prj(shpPath)",
"step-ids": [
1,
3,
6,
9,
10
]
}
|
[
1,
3,
6,
9,
10
] |
<|reserved_special_token_0|>
def corr2d(X, K):
"""
定义二维互相关运算函数
:param X:输入数组
:param K: 核数组
:return:二维互相关的运算结果
"""
h, w = K.shape
Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))
for i in range(Y.shape[0]):
for j in range(Y.shape[1]):
Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),
dtype=tf.float32))
return Y
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def corr2d(X, K):
"""
定义二维互相关运算函数
:param X:输入数组
:param K: 核数组
:return:二维互相关的运算结果
"""
h, w = K.shape
Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))
for i in range(Y.shape[0]):
for j in range(Y.shape[1]):
Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),
dtype=tf.float32))
return Y
print('----------验证二维互相关运算的结果--------------')
<|reserved_special_token_0|>
print(corr2d(X, K))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def corr2d(X, K):
"""
定义二维互相关运算函数
:param X:输入数组
:param K: 核数组
:return:二维互相关的运算结果
"""
h, w = K.shape
Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))
for i in range(Y.shape[0]):
for j in range(Y.shape[1]):
Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),
dtype=tf.float32))
return Y
print('----------验证二维互相关运算的结果--------------')
X = tf.constant([[0, 1, 2], [3, 4, 5], [6, 7, 8]])
K = tf.constant([[0, 1], [2, 3]])
<|reserved_special_token_0|>
print(corr2d(X, K))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import tensorflow as tf
def corr2d(X, K):
"""
定义二维互相关运算函数
:param X:输入数组
:param K: 核数组
:return:二维互相关的运算结果
"""
h, w = K.shape
Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))
for i in range(Y.shape[0]):
for j in range(Y.shape[1]):
Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),
dtype=tf.float32))
return Y
print('----------验证二维互相关运算的结果--------------')
X = tf.constant([[0, 1, 2], [3, 4, 5], [6, 7, 8]])
K = tf.constant([[0, 1], [2, 3]])
<|reserved_special_token_0|>
print(corr2d(X, K))
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@File : corr2d.py
@Author : jeffsheng
@Date : 2020/1/3
@Desc : 卷积层中的互相关(cross-correlation)运算
卷积层需要学习的参数是:卷积核和偏置大小
"""
import tensorflow as tf
def corr2d(X, K):
"""
定义二维互相关运算函数
:param X:输入数组
:param K: 核数组
:return:二维互相关的运算结果
"""
h, w = K.shape
Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w +1)))
for i in range(Y.shape[0]):
for j in range(Y.shape[1]):
Y[i,j].assign(tf.cast(tf.reduce_sum(X[i:i+h, j:j+w] * K), dtype=tf.float32))
return Y
print("----------验证二维互相关运算的结果--------------")
X = tf.constant([[0,1,2], [3,4,5], [6,7,8]])
K = tf.constant([[0,1], [2,3]])
"""
<tf.Variable 'Variable:0' shape=(2, 2) dtype=float32, numpy=
array([[19., 25.],
[37., 43.]], dtype=float32)>
"""
print(corr2d(X, K))
|
flexible
|
{
"blob_id": "3f473701b186b5287258ba74e478cccdad0f29bf",
"index": 2463,
"step-1": "<mask token>\n\n\ndef corr2d(X, K):\n \"\"\"\n 定义二维互相关运算函数\n :param X:输入数组\n :param K: 核数组\n :return:二维互相关的运算结果\n \"\"\"\n h, w = K.shape\n Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))\n for i in range(Y.shape[0]):\n for j in range(Y.shape[1]):\n Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),\n dtype=tf.float32))\n return Y\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef corr2d(X, K):\n \"\"\"\n 定义二维互相关运算函数\n :param X:输入数组\n :param K: 核数组\n :return:二维互相关的运算结果\n \"\"\"\n h, w = K.shape\n Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))\n for i in range(Y.shape[0]):\n for j in range(Y.shape[1]):\n Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),\n dtype=tf.float32))\n return Y\n\n\nprint('----------验证二维互相关运算的结果--------------')\n<mask token>\nprint(corr2d(X, K))\n",
"step-3": "<mask token>\n\n\ndef corr2d(X, K):\n \"\"\"\n 定义二维互相关运算函数\n :param X:输入数组\n :param K: 核数组\n :return:二维互相关的运算结果\n \"\"\"\n h, w = K.shape\n Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))\n for i in range(Y.shape[0]):\n for j in range(Y.shape[1]):\n Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),\n dtype=tf.float32))\n return Y\n\n\nprint('----------验证二维互相关运算的结果--------------')\nX = tf.constant([[0, 1, 2], [3, 4, 5], [6, 7, 8]])\nK = tf.constant([[0, 1], [2, 3]])\n<mask token>\nprint(corr2d(X, K))\n",
"step-4": "<mask token>\nimport tensorflow as tf\n\n\ndef corr2d(X, K):\n \"\"\"\n 定义二维互相关运算函数\n :param X:输入数组\n :param K: 核数组\n :return:二维互相关的运算结果\n \"\"\"\n h, w = K.shape\n Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w + 1)))\n for i in range(Y.shape[0]):\n for j in range(Y.shape[1]):\n Y[i, j].assign(tf.cast(tf.reduce_sum(X[i:i + h, j:j + w] * K),\n dtype=tf.float32))\n return Y\n\n\nprint('----------验证二维互相关运算的结果--------------')\nX = tf.constant([[0, 1, 2], [3, 4, 5], [6, 7, 8]])\nK = tf.constant([[0, 1], [2, 3]])\n<mask token>\nprint(corr2d(X, K))\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n\"\"\"\n@File : corr2d.py\n@Author : jeffsheng\n@Date : 2020/1/3\n@Desc : 卷积层中的互相关(cross-correlation)运算\n卷积层需要学习的参数是:卷积核和偏置大小\n\"\"\"\nimport tensorflow as tf\n\n\ndef corr2d(X, K):\n \"\"\"\n 定义二维互相关运算函数\n :param X:输入数组\n :param K: 核数组\n :return:二维互相关的运算结果\n \"\"\"\n h, w = K.shape\n Y = tf.Variable(tf.zeros((X.shape[0] - h + 1, X.shape[1] - w +1)))\n for i in range(Y.shape[0]):\n for j in range(Y.shape[1]):\n Y[i,j].assign(tf.cast(tf.reduce_sum(X[i:i+h, j:j+w] * K), dtype=tf.float32))\n return Y\n\n\nprint(\"----------验证二维互相关运算的结果--------------\")\nX = tf.constant([[0,1,2], [3,4,5], [6,7,8]])\nK = tf.constant([[0,1], [2,3]])\n\"\"\"\n<tf.Variable 'Variable:0' shape=(2, 2) dtype=float32, numpy=\narray([[19., 25.],\n [37., 43.]], dtype=float32)>\n\"\"\"\nprint(corr2d(X, K))\n\n\n\n\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
def _get_single_variable(self, name, shape=None, dtype=dtypes.float32, initializer=None, regularizer=None, partition_info=None, reuse=None, trainable=True, collections=None, caching_device=None, validate_shape=True, use_resource=None):
'Get or create a single Variable (e.g. a shard or entire variable).\n\n See the documentation of get_variable above (ignore partitioning components)\n for details.\n\n Args:\n name: see get_variable.\n shape: see get_variable.\n dtype: see get_variable.\n initializer: see get_variable.\n regularizer: see get_variable.\n partition_info: _PartitionInfo object.\n reuse: see get_variable.\n trainable: see get_variable.\n collections: see get_variable.\n caching_device: see get_variable.\n validate_shape: see get_variable.\n use_resource: see get_variable.\n\n Returns:\n A Variable. See documentation of get_variable above.\n\n Raises:\n ValueError: See documentation of get_variable above.\n '
initializing_from_value = False
if ((initializer is not None) and (not callable(initializer))):
initializing_from_value = True
if ((shape is not None) and initializing_from_value):
raise ValueError('If initializer is a constant, do not specify shape.')
should_check = (reuse is not None)
dtype = dtypes.as_dtype(dtype)
shape = tensor_shape.as_shape(shape)
if (name in self._vars):
if (should_check and (not reuse)):
tb = self._vars[name].op.traceback[::(- 1)]
tb = [x for x in tb if ('tensorflow/python' not in x[0])][:3]
raise ValueError(('Variable %s already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:\n\n%s' % (name, ''.join(traceback.format_list(tb)))))
found_var = self._vars[name]
if (not shape.is_compatible_with(found_var.get_shape())):
raise ValueError(('Trying to share variable %s, but specified shape %s and found shape %s.' % (name, shape, found_var.get_shape())))
if (not dtype.is_compatible_with(found_var.dtype)):
dtype_str = dtype.name
found_type_str = found_var.dtype.name
raise ValueError(('Trying to share variable %s, but specified dtype %s and found dtype %s.' % (name, dtype_str, found_type_str)))
return found_var
if (should_check and reuse):
raise ValueError(('Variable %s does not exist, or was not created with tf.get_variable(). Did you mean to set reuse=None in VarScope?' % name))
if ((not shape.is_fully_defined()) and (not initializing_from_value)):
raise ValueError(('Shape of a new variable (%s) must be fully defined, but instead was %s.' % (name, shape)))
if (initializer is None):
(initializer, initializing_from_value) = self._get_default_initializer(name=name, shape=shape, dtype=dtype)
with ops.control_dependencies(None):
if initializing_from_value:
init_val = initializer
variable_dtype = None
else:
if isinstance(initializer, type(init_ops.Initializer)):
initializer = initializer(dtype=dtype)
init_val = (lambda : initializer(shape.as_list(), dtype=dtype, partition_info=partition_info))
variable_dtype = dtype.base_dtype
if (use_resource is None):
use_resource = False
if use_resource:
v = resource_variable_ops.ResourceVariable(initial_value=init_val, name=name, trainable=trainable, collections=collections, caching_device=caching_device, dtype=variable_dtype, validate_shape=validate_shape)
else:
v = variables.Variable(initial_value=init_val, name=name, trainable=trainable, collections=collections, caching_device=caching_device, dtype=variable_dtype, validate_shape=validate_shape)
self._vars[name] = v
logging.vlog(1, 'Created variable %s with shape %s and init %s', v.name, format(shape), initializer)
if regularizer:
with ops.colocate_with(v.op):
with ops.name_scope((name + '/Regularizer/')):
loss = regularizer(v)
if (loss is not None):
logging.vlog(1, 'Applied regularizer to %s and added the result %s to REGULARIZATION_LOSSES.', v.name, loss.name)
ops.add_to_collection(ops.GraphKeys.REGULARIZATION_LOSSES, loss)
return v
|
normal
|
{
"blob_id": "51ef1c0f6a17e12b2324a80f962b2ce47cc05bcc",
"index": 1348,
"step-1": "<mask token>\n",
"step-2": "def _get_single_variable(self, name, shape=None, dtype=dtypes.float32,\n initializer=None, regularizer=None, partition_info=None, reuse=None,\n trainable=True, collections=None, caching_device=None, validate_shape=\n True, use_resource=None):\n \"\"\"Get or create a single Variable (e.g. a shard or entire variable).\n\n See the documentation of get_variable above (ignore partitioning components)\n for details.\n\n Args:\n name: see get_variable.\n shape: see get_variable.\n dtype: see get_variable.\n initializer: see get_variable.\n regularizer: see get_variable.\n partition_info: _PartitionInfo object.\n reuse: see get_variable.\n trainable: see get_variable.\n collections: see get_variable.\n caching_device: see get_variable.\n validate_shape: see get_variable.\n use_resource: see get_variable.\n\n Returns:\n A Variable. See documentation of get_variable above.\n\n Raises:\n ValueError: See documentation of get_variable above.\n \"\"\"\n initializing_from_value = False\n if initializer is not None and not callable(initializer):\n initializing_from_value = True\n if shape is not None and initializing_from_value:\n raise ValueError('If initializer is a constant, do not specify shape.')\n should_check = reuse is not None\n dtype = dtypes.as_dtype(dtype)\n shape = tensor_shape.as_shape(shape)\n if name in self._vars:\n if should_check and not reuse:\n tb = self._vars[name].op.traceback[::-1]\n tb = [x for x in tb if 'tensorflow/python' not in x[0]][:3]\n raise ValueError(\n \"\"\"Variable %s already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:\n\n%s\"\"\"\n % (name, ''.join(traceback.format_list(tb))))\n found_var = self._vars[name]\n if not shape.is_compatible_with(found_var.get_shape()):\n raise ValueError(\n 'Trying to share variable %s, but specified shape %s and found shape %s.'\n % (name, shape, found_var.get_shape()))\n if not dtype.is_compatible_with(found_var.dtype):\n dtype_str = dtype.name\n found_type_str = found_var.dtype.name\n raise ValueError(\n 'Trying to share variable %s, but specified dtype %s and found dtype %s.'\n % (name, dtype_str, found_type_str))\n return found_var\n if should_check and reuse:\n raise ValueError(\n 'Variable %s does not exist, or was not created with tf.get_variable(). Did you mean to set reuse=None in VarScope?'\n % name)\n if not shape.is_fully_defined() and not initializing_from_value:\n raise ValueError(\n 'Shape of a new variable (%s) must be fully defined, but instead was %s.'\n % (name, shape))\n if initializer is None:\n initializer, initializing_from_value = self._get_default_initializer(\n name=name, shape=shape, dtype=dtype)\n with ops.control_dependencies(None):\n if initializing_from_value:\n init_val = initializer\n variable_dtype = None\n else:\n if isinstance(initializer, type(init_ops.Initializer)):\n initializer = initializer(dtype=dtype)\n init_val = lambda : initializer(shape.as_list(), dtype=dtype,\n partition_info=partition_info)\n variable_dtype = dtype.base_dtype\n if use_resource is None:\n use_resource = False\n if use_resource:\n v = resource_variable_ops.ResourceVariable(initial_value=init_val,\n name=name, trainable=trainable, collections=collections,\n caching_device=caching_device, dtype=variable_dtype,\n validate_shape=validate_shape)\n else:\n v = variables.Variable(initial_value=init_val, name=name, trainable\n =trainable, collections=collections, caching_device=\n caching_device, dtype=variable_dtype, validate_shape=validate_shape\n )\n self._vars[name] = v\n logging.vlog(1, 'Created variable %s with shape %s and init %s', v.name,\n format(shape), initializer)\n if regularizer:\n with ops.colocate_with(v.op):\n with ops.name_scope(name + '/Regularizer/'):\n loss = regularizer(v)\n if loss is not None:\n logging.vlog(1,\n 'Applied regularizer to %s and added the result %s to REGULARIZATION_LOSSES.'\n , v.name, loss.name)\n ops.add_to_collection(ops.GraphKeys.REGULARIZATION_LOSSES, loss\n )\n return v\n",
"step-3": "def _get_single_variable(self, name, shape=None, dtype=dtypes.float32, initializer=None, regularizer=None, partition_info=None, reuse=None, trainable=True, collections=None, caching_device=None, validate_shape=True, use_resource=None):\n 'Get or create a single Variable (e.g. a shard or entire variable).\\n\\n See the documentation of get_variable above (ignore partitioning components)\\n for details.\\n\\n Args:\\n name: see get_variable.\\n shape: see get_variable.\\n dtype: see get_variable.\\n initializer: see get_variable.\\n regularizer: see get_variable.\\n partition_info: _PartitionInfo object.\\n reuse: see get_variable.\\n trainable: see get_variable.\\n collections: see get_variable.\\n caching_device: see get_variable.\\n validate_shape: see get_variable.\\n use_resource: see get_variable.\\n\\n Returns:\\n A Variable. See documentation of get_variable above.\\n\\n Raises:\\n ValueError: See documentation of get_variable above.\\n '\n initializing_from_value = False\n if ((initializer is not None) and (not callable(initializer))):\n initializing_from_value = True\n if ((shape is not None) and initializing_from_value):\n raise ValueError('If initializer is a constant, do not specify shape.')\n should_check = (reuse is not None)\n dtype = dtypes.as_dtype(dtype)\n shape = tensor_shape.as_shape(shape)\n if (name in self._vars):\n if (should_check and (not reuse)):\n tb = self._vars[name].op.traceback[::(- 1)]\n tb = [x for x in tb if ('tensorflow/python' not in x[0])][:3]\n raise ValueError(('Variable %s already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:\\n\\n%s' % (name, ''.join(traceback.format_list(tb)))))\n found_var = self._vars[name]\n if (not shape.is_compatible_with(found_var.get_shape())):\n raise ValueError(('Trying to share variable %s, but specified shape %s and found shape %s.' % (name, shape, found_var.get_shape())))\n if (not dtype.is_compatible_with(found_var.dtype)):\n dtype_str = dtype.name\n found_type_str = found_var.dtype.name\n raise ValueError(('Trying to share variable %s, but specified dtype %s and found dtype %s.' % (name, dtype_str, found_type_str)))\n return found_var\n if (should_check and reuse):\n raise ValueError(('Variable %s does not exist, or was not created with tf.get_variable(). Did you mean to set reuse=None in VarScope?' % name))\n if ((not shape.is_fully_defined()) and (not initializing_from_value)):\n raise ValueError(('Shape of a new variable (%s) must be fully defined, but instead was %s.' % (name, shape)))\n if (initializer is None):\n (initializer, initializing_from_value) = self._get_default_initializer(name=name, shape=shape, dtype=dtype)\n with ops.control_dependencies(None):\n if initializing_from_value:\n init_val = initializer\n variable_dtype = None\n else:\n if isinstance(initializer, type(init_ops.Initializer)):\n initializer = initializer(dtype=dtype)\n init_val = (lambda : initializer(shape.as_list(), dtype=dtype, partition_info=partition_info))\n variable_dtype = dtype.base_dtype\n if (use_resource is None):\n use_resource = False\n if use_resource:\n v = resource_variable_ops.ResourceVariable(initial_value=init_val, name=name, trainable=trainable, collections=collections, caching_device=caching_device, dtype=variable_dtype, validate_shape=validate_shape)\n else:\n v = variables.Variable(initial_value=init_val, name=name, trainable=trainable, collections=collections, caching_device=caching_device, dtype=variable_dtype, validate_shape=validate_shape)\n self._vars[name] = v\n logging.vlog(1, 'Created variable %s with shape %s and init %s', v.name, format(shape), initializer)\n if regularizer:\n with ops.colocate_with(v.op):\n with ops.name_scope((name + '/Regularizer/')):\n loss = regularizer(v)\n if (loss is not None):\n logging.vlog(1, 'Applied regularizer to %s and added the result %s to REGULARIZATION_LOSSES.', v.name, loss.name)\n ops.add_to_collection(ops.GraphKeys.REGULARIZATION_LOSSES, loss)\n return v",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def suck(f):
hamdevall = spamdevall = 0.0, 0.0
cost = 0.0
bestcost = 0.0
fp = 0
fn = 0
un = 0
fpp = 0.0
fnp = 0.0
unp = 0.0
htest = 0
stest = 0
get = f.readline
while 1:
line = get()
if line.startswith('-> <stat> tested'):
print(line, end=' ')
elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:
vals = line.split(';')
mean = float(vals[1].split()[-1])
sdev = float(vals[2].split()[-1])
val = mean, sdev
ntested = int(vals[0].split()[-2])
typ = vals[0].split()[2]
if line.find('for all runs') != -1:
if typ == 'Ham':
hamdevall = val
htest = ntested
else:
spamdevall = val
stest = ntested
elif line.startswith('-> best cost for all runs: $'):
bestcost = float(line.split('$')[-1])
elif line.startswith('-> <stat> all runs false positives: '):
fp = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false negatives: '):
fn = int(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure: '):
un = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false positive %: '):
fpp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs false negative %: '):
fnp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure %: '):
unp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs cost: '):
cost = float(line.split('$')[-1])
break
return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,
hamdevall, spamdevall)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def suck(f):
hamdevall = spamdevall = 0.0, 0.0
cost = 0.0
bestcost = 0.0
fp = 0
fn = 0
un = 0
fpp = 0.0
fnp = 0.0
unp = 0.0
htest = 0
stest = 0
get = f.readline
while 1:
line = get()
if line.startswith('-> <stat> tested'):
print(line, end=' ')
elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:
vals = line.split(';')
mean = float(vals[1].split()[-1])
sdev = float(vals[2].split()[-1])
val = mean, sdev
ntested = int(vals[0].split()[-2])
typ = vals[0].split()[2]
if line.find('for all runs') != -1:
if typ == 'Ham':
hamdevall = val
htest = ntested
else:
spamdevall = val
stest = ntested
elif line.startswith('-> best cost for all runs: $'):
bestcost = float(line.split('$')[-1])
elif line.startswith('-> <stat> all runs false positives: '):
fp = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false negatives: '):
fn = int(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure: '):
un = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false positive %: '):
fpp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs false negative %: '):
fnp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure %: '):
unp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs cost: '):
cost = float(line.split('$')[-1])
break
return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,
hamdevall, spamdevall)
def windowsfy(fn):
import os
if os.path.exists(fn + '.txt'):
return fn + '.txt'
else:
return fn
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def suck(f):
hamdevall = spamdevall = 0.0, 0.0
cost = 0.0
bestcost = 0.0
fp = 0
fn = 0
un = 0
fpp = 0.0
fnp = 0.0
unp = 0.0
htest = 0
stest = 0
get = f.readline
while 1:
line = get()
if line.startswith('-> <stat> tested'):
print(line, end=' ')
elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:
vals = line.split(';')
mean = float(vals[1].split()[-1])
sdev = float(vals[2].split()[-1])
val = mean, sdev
ntested = int(vals[0].split()[-2])
typ = vals[0].split()[2]
if line.find('for all runs') != -1:
if typ == 'Ham':
hamdevall = val
htest = ntested
else:
spamdevall = val
stest = ntested
elif line.startswith('-> best cost for all runs: $'):
bestcost = float(line.split('$')[-1])
elif line.startswith('-> <stat> all runs false positives: '):
fp = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false negatives: '):
fn = int(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure: '):
un = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false positive %: '):
fpp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs false negative %: '):
fnp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure %: '):
unp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs cost: '):
cost = float(line.split('$')[-1])
break
return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,
hamdevall, spamdevall)
def windowsfy(fn):
import os
if os.path.exists(fn + '.txt'):
return fn + '.txt'
else:
return fn
def table():
import getopt, sys
showMean = 0
fname = 'filename: '
fnam2 = ' '
ratio = 'ham:spam: '
rat2 = ' '
fptot = 'fp total: '
fpper = 'fp %: '
fntot = 'fn total: '
fnper = 'fn %: '
untot = 'unsure t: '
unper = 'unsure %: '
rcost = 'real cost:'
bcost = 'best cost:'
hmean = 'h mean: '
hsdev = 'h sdev: '
smean = 's mean: '
ssdev = 's sdev: '
meand = 'mean diff:'
kval = 'k: '
(tfptot) = (tfpper) = (tfntot) = (tfnper) = (tuntot) = (tunper) = (trcost
) = (tbcost) = (thmean) = (thsdev) = (tsmean) = (tssdev) = (tmeand) = (
tkval) = 0
args, fileargs = getopt.getopt(sys.argv[1:], 'm')
for arg, val in args:
if arg == '-m':
showMean = 1
for filename in fileargs:
filename = windowsfy(filename)
(htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost, hamdevall,
spamdevall) = suck(file(filename))
if filename.endswith('.txt'):
filename = filename[:-4]
filename = filename[filename.rfind('/') + 1:]
filename = filename[filename.rfind('\\') + 1:]
if len(fname) > len(fnam2):
fname += ' '
fname = fname[0:len(fnam2) + 12]
fnam2 += ' %11s' % filename
else:
fnam2 += ' '
fnam2 = fnam2[0:len(fname) + 12]
fname += ' %11s' % filename
if len(ratio) > len(rat2):
ratio += ' '
ratio = ratio[0:len(rat2) + 12]
rat2 += ' %11s' % ('%d:%d' % (htest, stest))
else:
rat2 += ' '
rat2 = rat2[0:len(ratio) + 12]
ratio += ' %11s' % ('%d:%d' % (htest, stest))
fptot += '%12d' % fp
tfptot += fp
fpper += '%12.2f' % fpp
tfpper += fpp
fntot += '%12d' % fn
tfntot += fn
fnper += '%12.2f' % fnp
tfnper += fnp
untot += '%12d' % un
tuntot += un
unper += '%12.2f' % unp
tunper += unp
rcost += '%12s' % ('$%.2f' % cost)
trcost += cost
bcost += '%12s' % ('$%.2f' % bestcost)
tbcost += bestcost
hmean += '%12.2f' % hamdevall[0]
thmean += hamdevall[0]
hsdev += '%12.2f' % hamdevall[1]
thsdev += hamdevall[1]
smean += '%12.2f' % spamdevall[0]
tsmean += spamdevall[0]
ssdev += '%12.2f' % spamdevall[1]
tssdev += spamdevall[1]
meand += '%12.2f' % (spamdevall[0] - hamdevall[0])
tmeand += spamdevall[0] - hamdevall[0]
k = (spamdevall[0] - hamdevall[0]) / (spamdevall[1] + hamdevall[1])
kval += '%12.2f' % k
tkval += k
nfiles = len(fileargs)
if nfiles and showMean:
fptot += '%12d' % (tfptot / nfiles)
fpper += '%12.2f' % (tfpper / nfiles)
fntot += '%12d' % (tfntot / nfiles)
fnper += '%12.2f' % (tfnper / nfiles)
untot += '%12d' % (tuntot / nfiles)
unper += '%12.2f' % (tunper / nfiles)
rcost += '%12s' % ('$%.2f' % (trcost / nfiles))
bcost += '%12s' % ('$%.2f' % (tbcost / nfiles))
hmean += '%12.2f' % (thmean / nfiles)
hsdev += '%12.2f' % (thsdev / nfiles)
smean += '%12.2f' % (tsmean / nfiles)
ssdev += '%12.2f' % (tssdev / nfiles)
meand += '%12.2f' % (tmeand / nfiles)
kval += '%12.2f' % (tkval / nfiles)
print(fname)
if len(fnam2.strip()) > 0:
print(fnam2)
print(ratio)
if len(rat2.strip()) > 0:
print(rat2)
print(fptot)
print(fpper)
print(fntot)
print(fnper)
print(untot)
print(unper)
print(rcost)
print(bcost)
print(hmean)
print(hsdev)
print(smean)
print(ssdev)
print(meand)
print(kval)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def suck(f):
hamdevall = spamdevall = 0.0, 0.0
cost = 0.0
bestcost = 0.0
fp = 0
fn = 0
un = 0
fpp = 0.0
fnp = 0.0
unp = 0.0
htest = 0
stest = 0
get = f.readline
while 1:
line = get()
if line.startswith('-> <stat> tested'):
print(line, end=' ')
elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:
vals = line.split(';')
mean = float(vals[1].split()[-1])
sdev = float(vals[2].split()[-1])
val = mean, sdev
ntested = int(vals[0].split()[-2])
typ = vals[0].split()[2]
if line.find('for all runs') != -1:
if typ == 'Ham':
hamdevall = val
htest = ntested
else:
spamdevall = val
stest = ntested
elif line.startswith('-> best cost for all runs: $'):
bestcost = float(line.split('$')[-1])
elif line.startswith('-> <stat> all runs false positives: '):
fp = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false negatives: '):
fn = int(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure: '):
un = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false positive %: '):
fpp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs false negative %: '):
fnp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure %: '):
unp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs cost: '):
cost = float(line.split('$')[-1])
break
return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,
hamdevall, spamdevall)
def windowsfy(fn):
import os
if os.path.exists(fn + '.txt'):
return fn + '.txt'
else:
return fn
def table():
import getopt, sys
showMean = 0
fname = 'filename: '
fnam2 = ' '
ratio = 'ham:spam: '
rat2 = ' '
fptot = 'fp total: '
fpper = 'fp %: '
fntot = 'fn total: '
fnper = 'fn %: '
untot = 'unsure t: '
unper = 'unsure %: '
rcost = 'real cost:'
bcost = 'best cost:'
hmean = 'h mean: '
hsdev = 'h sdev: '
smean = 's mean: '
ssdev = 's sdev: '
meand = 'mean diff:'
kval = 'k: '
(tfptot) = (tfpper) = (tfntot) = (tfnper) = (tuntot) = (tunper) = (trcost
) = (tbcost) = (thmean) = (thsdev) = (tsmean) = (tssdev) = (tmeand) = (
tkval) = 0
args, fileargs = getopt.getopt(sys.argv[1:], 'm')
for arg, val in args:
if arg == '-m':
showMean = 1
for filename in fileargs:
filename = windowsfy(filename)
(htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost, hamdevall,
spamdevall) = suck(file(filename))
if filename.endswith('.txt'):
filename = filename[:-4]
filename = filename[filename.rfind('/') + 1:]
filename = filename[filename.rfind('\\') + 1:]
if len(fname) > len(fnam2):
fname += ' '
fname = fname[0:len(fnam2) + 12]
fnam2 += ' %11s' % filename
else:
fnam2 += ' '
fnam2 = fnam2[0:len(fname) + 12]
fname += ' %11s' % filename
if len(ratio) > len(rat2):
ratio += ' '
ratio = ratio[0:len(rat2) + 12]
rat2 += ' %11s' % ('%d:%d' % (htest, stest))
else:
rat2 += ' '
rat2 = rat2[0:len(ratio) + 12]
ratio += ' %11s' % ('%d:%d' % (htest, stest))
fptot += '%12d' % fp
tfptot += fp
fpper += '%12.2f' % fpp
tfpper += fpp
fntot += '%12d' % fn
tfntot += fn
fnper += '%12.2f' % fnp
tfnper += fnp
untot += '%12d' % un
tuntot += un
unper += '%12.2f' % unp
tunper += unp
rcost += '%12s' % ('$%.2f' % cost)
trcost += cost
bcost += '%12s' % ('$%.2f' % bestcost)
tbcost += bestcost
hmean += '%12.2f' % hamdevall[0]
thmean += hamdevall[0]
hsdev += '%12.2f' % hamdevall[1]
thsdev += hamdevall[1]
smean += '%12.2f' % spamdevall[0]
tsmean += spamdevall[0]
ssdev += '%12.2f' % spamdevall[1]
tssdev += spamdevall[1]
meand += '%12.2f' % (spamdevall[0] - hamdevall[0])
tmeand += spamdevall[0] - hamdevall[0]
k = (spamdevall[0] - hamdevall[0]) / (spamdevall[1] + hamdevall[1])
kval += '%12.2f' % k
tkval += k
nfiles = len(fileargs)
if nfiles and showMean:
fptot += '%12d' % (tfptot / nfiles)
fpper += '%12.2f' % (tfpper / nfiles)
fntot += '%12d' % (tfntot / nfiles)
fnper += '%12.2f' % (tfnper / nfiles)
untot += '%12d' % (tuntot / nfiles)
unper += '%12.2f' % (tunper / nfiles)
rcost += '%12s' % ('$%.2f' % (trcost / nfiles))
bcost += '%12s' % ('$%.2f' % (tbcost / nfiles))
hmean += '%12.2f' % (thmean / nfiles)
hsdev += '%12.2f' % (thsdev / nfiles)
smean += '%12.2f' % (tsmean / nfiles)
ssdev += '%12.2f' % (tssdev / nfiles)
meand += '%12.2f' % (tmeand / nfiles)
kval += '%12.2f' % (tkval / nfiles)
print(fname)
if len(fnam2.strip()) > 0:
print(fnam2)
print(ratio)
if len(rat2.strip()) > 0:
print(rat2)
print(fptot)
print(fpper)
print(fntot)
print(fnper)
print(untot)
print(unper)
print(rcost)
print(bcost)
print(hmean)
print(hsdev)
print(smean)
print(ssdev)
print(meand)
print(kval)
if __name__ == '__main__':
table()
<|reserved_special_token_1|>
"""
table.py [-m] base1 base2 ... baseN
Combines output from base1.txt, base2.txt, etc., which are created by
the TestDriver (such as timcv.py) output, and displays tabulated
comparison statistics to stdout. Each input file is represented by
one column in the table.
Optional argument -m shows a final column with the mean value of each
statistic.
"""
def suck(f):
hamdevall = spamdevall = (0.0, 0.0)
cost = 0.0
bestcost = 0.0
fp = 0
fn = 0
un = 0
fpp = 0.0
fnp = 0.0
unp = 0.0
htest = 0
stest = 0
get = f.readline
while 1:
line = get()
if line.startswith('-> <stat> tested'):
print(line, end=' ')
elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:
vals = line.split(';')
mean = float(vals[1].split()[-1])
sdev = float(vals[2].split()[-1])
val = (mean, sdev)
ntested = int(vals[0].split()[-2])
typ = vals[0].split()[2]
if line.find('for all runs') != -1:
if typ == 'Ham':
hamdevall = val
htest = ntested
else:
spamdevall = val
stest = ntested
elif line.startswith('-> best cost for all runs: $'):
bestcost = float(line.split('$')[-1])
elif line.startswith('-> <stat> all runs false positives: '):
fp = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false negatives: '):
fn = int(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure: '):
un = int(line.split()[-1])
elif line.startswith('-> <stat> all runs false positive %: '):
fpp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs false negative %: '):
fnp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs unsure %: '):
unp = float(line.split()[-1])
elif line.startswith('-> <stat> all runs cost: '):
cost = float(line.split('$')[-1])
break
return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,
hamdevall, spamdevall)
def windowsfy(fn):
import os
if os.path.exists(fn + '.txt'):
return fn + '.txt'
else:
return fn
def table():
import getopt, sys
showMean = 0
fname = "filename: "
fnam2 = " "
ratio = "ham:spam: "
rat2 = " "
fptot = "fp total: "
fpper = "fp %: "
fntot = "fn total: "
fnper = "fn %: "
untot = "unsure t: "
unper = "unsure %: "
rcost = "real cost:"
bcost = "best cost:"
hmean = "h mean: "
hsdev = "h sdev: "
smean = "s mean: "
ssdev = "s sdev: "
meand = "mean diff:"
kval = "k: "
tfptot = tfpper = tfntot = tfnper = tuntot = tunper = trcost = tbcost = \
thmean = thsdev = tsmean = tssdev = tmeand = tkval = 0
args, fileargs = getopt.getopt(sys.argv[1:], 'm')
for arg, val in args:
if arg == "-m":
showMean = 1
for filename in fileargs:
filename = windowsfy(filename)
(htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,
hamdevall, spamdevall) = suck(file(filename))
if filename.endswith('.txt'):
filename = filename[:-4]
filename = filename[filename.rfind('/')+1:]
filename = filename[filename.rfind("\\")+1:]
if len(fname) > len(fnam2):
fname += " "
fname = fname[0:(len(fnam2) + 12)]
fnam2 += " %11s" % filename
else:
fnam2 += " "
fnam2 = fnam2[0:(len(fname) + 12)]
fname += " %11s" % filename
if len(ratio) > len(rat2):
ratio += " "
ratio = ratio[0:(len(rat2) + 12)]
rat2 += " %11s" % ("%d:%d" % (htest, stest))
else:
rat2 += " "
rat2 = rat2[0:(len(ratio) + 12)]
ratio += " %11s" % ("%d:%d" % (htest, stest))
fptot += "%12d" % fp
tfptot += fp
fpper += "%12.2f" % fpp
tfpper += fpp
fntot += "%12d" % fn
tfntot += fn
fnper += "%12.2f" % fnp
tfnper += fnp
untot += "%12d" % un
tuntot += un
unper += "%12.2f" % unp
tunper += unp
rcost += "%12s" % ("$%.2f" % cost)
trcost += cost
bcost += "%12s" % ("$%.2f" % bestcost)
tbcost += bestcost
hmean += "%12.2f" % hamdevall[0]
thmean += hamdevall[0]
hsdev += "%12.2f" % hamdevall[1]
thsdev += hamdevall[1]
smean += "%12.2f" % spamdevall[0]
tsmean += spamdevall[0]
ssdev += "%12.2f" % spamdevall[1]
tssdev += spamdevall[1]
meand += "%12.2f" % (spamdevall[0] - hamdevall[0])
tmeand += (spamdevall[0] - hamdevall[0])
k = (spamdevall[0] - hamdevall[0]) / (spamdevall[1] + hamdevall[1])
kval += "%12.2f" % k
tkval += k
nfiles = len(fileargs)
if nfiles and showMean:
fptot += "%12d" % (tfptot/nfiles)
fpper += "%12.2f" % (tfpper/nfiles)
fntot += "%12d" % (tfntot/nfiles)
fnper += "%12.2f" % (tfnper/nfiles)
untot += "%12d" % (tuntot/nfiles)
unper += "%12.2f" % (tunper/nfiles)
rcost += "%12s" % ("$%.2f" % (trcost/nfiles))
bcost += "%12s" % ("$%.2f" % (tbcost/nfiles))
hmean += "%12.2f" % (thmean/nfiles)
hsdev += "%12.2f" % (thsdev/nfiles)
smean += "%12.2f" % (tsmean/nfiles)
ssdev += "%12.2f" % (tssdev/nfiles)
meand += "%12.2f" % (tmeand/nfiles)
kval += "%12.2f" % (tkval/nfiles)
print(fname)
if len(fnam2.strip()) > 0:
print(fnam2)
print(ratio)
if len(rat2.strip()) > 0:
print(rat2)
print(fptot)
print(fpper)
print(fntot)
print(fnper)
print(untot)
print(unper)
print(rcost)
print(bcost)
print(hmean)
print(hsdev)
print(smean)
print(ssdev)
print(meand)
print(kval)
if __name__ == "__main__":
table()
|
flexible
|
{
"blob_id": "4e94e9e2b45d3786aa86be800be882cc3d5a80b5",
"index": 8328,
"step-1": "<mask token>\n\n\ndef suck(f):\n hamdevall = spamdevall = 0.0, 0.0\n cost = 0.0\n bestcost = 0.0\n fp = 0\n fn = 0\n un = 0\n fpp = 0.0\n fnp = 0.0\n unp = 0.0\n htest = 0\n stest = 0\n get = f.readline\n while 1:\n line = get()\n if line.startswith('-> <stat> tested'):\n print(line, end=' ')\n elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:\n vals = line.split(';')\n mean = float(vals[1].split()[-1])\n sdev = float(vals[2].split()[-1])\n val = mean, sdev\n ntested = int(vals[0].split()[-2])\n typ = vals[0].split()[2]\n if line.find('for all runs') != -1:\n if typ == 'Ham':\n hamdevall = val\n htest = ntested\n else:\n spamdevall = val\n stest = ntested\n elif line.startswith('-> best cost for all runs: $'):\n bestcost = float(line.split('$')[-1])\n elif line.startswith('-> <stat> all runs false positives: '):\n fp = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negatives: '):\n fn = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure: '):\n un = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false positive %: '):\n fpp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negative %: '):\n fnp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure %: '):\n unp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs cost: '):\n cost = float(line.split('$')[-1])\n break\n return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,\n hamdevall, spamdevall)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef suck(f):\n hamdevall = spamdevall = 0.0, 0.0\n cost = 0.0\n bestcost = 0.0\n fp = 0\n fn = 0\n un = 0\n fpp = 0.0\n fnp = 0.0\n unp = 0.0\n htest = 0\n stest = 0\n get = f.readline\n while 1:\n line = get()\n if line.startswith('-> <stat> tested'):\n print(line, end=' ')\n elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:\n vals = line.split(';')\n mean = float(vals[1].split()[-1])\n sdev = float(vals[2].split()[-1])\n val = mean, sdev\n ntested = int(vals[0].split()[-2])\n typ = vals[0].split()[2]\n if line.find('for all runs') != -1:\n if typ == 'Ham':\n hamdevall = val\n htest = ntested\n else:\n spamdevall = val\n stest = ntested\n elif line.startswith('-> best cost for all runs: $'):\n bestcost = float(line.split('$')[-1])\n elif line.startswith('-> <stat> all runs false positives: '):\n fp = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negatives: '):\n fn = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure: '):\n un = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false positive %: '):\n fpp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negative %: '):\n fnp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure %: '):\n unp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs cost: '):\n cost = float(line.split('$')[-1])\n break\n return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,\n hamdevall, spamdevall)\n\n\ndef windowsfy(fn):\n import os\n if os.path.exists(fn + '.txt'):\n return fn + '.txt'\n else:\n return fn\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef suck(f):\n hamdevall = spamdevall = 0.0, 0.0\n cost = 0.0\n bestcost = 0.0\n fp = 0\n fn = 0\n un = 0\n fpp = 0.0\n fnp = 0.0\n unp = 0.0\n htest = 0\n stest = 0\n get = f.readline\n while 1:\n line = get()\n if line.startswith('-> <stat> tested'):\n print(line, end=' ')\n elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:\n vals = line.split(';')\n mean = float(vals[1].split()[-1])\n sdev = float(vals[2].split()[-1])\n val = mean, sdev\n ntested = int(vals[0].split()[-2])\n typ = vals[0].split()[2]\n if line.find('for all runs') != -1:\n if typ == 'Ham':\n hamdevall = val\n htest = ntested\n else:\n spamdevall = val\n stest = ntested\n elif line.startswith('-> best cost for all runs: $'):\n bestcost = float(line.split('$')[-1])\n elif line.startswith('-> <stat> all runs false positives: '):\n fp = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negatives: '):\n fn = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure: '):\n un = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false positive %: '):\n fpp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negative %: '):\n fnp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure %: '):\n unp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs cost: '):\n cost = float(line.split('$')[-1])\n break\n return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,\n hamdevall, spamdevall)\n\n\ndef windowsfy(fn):\n import os\n if os.path.exists(fn + '.txt'):\n return fn + '.txt'\n else:\n return fn\n\n\ndef table():\n import getopt, sys\n showMean = 0\n fname = 'filename: '\n fnam2 = ' '\n ratio = 'ham:spam: '\n rat2 = ' '\n fptot = 'fp total: '\n fpper = 'fp %: '\n fntot = 'fn total: '\n fnper = 'fn %: '\n untot = 'unsure t: '\n unper = 'unsure %: '\n rcost = 'real cost:'\n bcost = 'best cost:'\n hmean = 'h mean: '\n hsdev = 'h sdev: '\n smean = 's mean: '\n ssdev = 's sdev: '\n meand = 'mean diff:'\n kval = 'k: '\n (tfptot) = (tfpper) = (tfntot) = (tfnper) = (tuntot) = (tunper) = (trcost\n ) = (tbcost) = (thmean) = (thsdev) = (tsmean) = (tssdev) = (tmeand) = (\n tkval) = 0\n args, fileargs = getopt.getopt(sys.argv[1:], 'm')\n for arg, val in args:\n if arg == '-m':\n showMean = 1\n for filename in fileargs:\n filename = windowsfy(filename)\n (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost, hamdevall,\n spamdevall) = suck(file(filename))\n if filename.endswith('.txt'):\n filename = filename[:-4]\n filename = filename[filename.rfind('/') + 1:]\n filename = filename[filename.rfind('\\\\') + 1:]\n if len(fname) > len(fnam2):\n fname += ' '\n fname = fname[0:len(fnam2) + 12]\n fnam2 += ' %11s' % filename\n else:\n fnam2 += ' '\n fnam2 = fnam2[0:len(fname) + 12]\n fname += ' %11s' % filename\n if len(ratio) > len(rat2):\n ratio += ' '\n ratio = ratio[0:len(rat2) + 12]\n rat2 += ' %11s' % ('%d:%d' % (htest, stest))\n else:\n rat2 += ' '\n rat2 = rat2[0:len(ratio) + 12]\n ratio += ' %11s' % ('%d:%d' % (htest, stest))\n fptot += '%12d' % fp\n tfptot += fp\n fpper += '%12.2f' % fpp\n tfpper += fpp\n fntot += '%12d' % fn\n tfntot += fn\n fnper += '%12.2f' % fnp\n tfnper += fnp\n untot += '%12d' % un\n tuntot += un\n unper += '%12.2f' % unp\n tunper += unp\n rcost += '%12s' % ('$%.2f' % cost)\n trcost += cost\n bcost += '%12s' % ('$%.2f' % bestcost)\n tbcost += bestcost\n hmean += '%12.2f' % hamdevall[0]\n thmean += hamdevall[0]\n hsdev += '%12.2f' % hamdevall[1]\n thsdev += hamdevall[1]\n smean += '%12.2f' % spamdevall[0]\n tsmean += spamdevall[0]\n ssdev += '%12.2f' % spamdevall[1]\n tssdev += spamdevall[1]\n meand += '%12.2f' % (spamdevall[0] - hamdevall[0])\n tmeand += spamdevall[0] - hamdevall[0]\n k = (spamdevall[0] - hamdevall[0]) / (spamdevall[1] + hamdevall[1])\n kval += '%12.2f' % k\n tkval += k\n nfiles = len(fileargs)\n if nfiles and showMean:\n fptot += '%12d' % (tfptot / nfiles)\n fpper += '%12.2f' % (tfpper / nfiles)\n fntot += '%12d' % (tfntot / nfiles)\n fnper += '%12.2f' % (tfnper / nfiles)\n untot += '%12d' % (tuntot / nfiles)\n unper += '%12.2f' % (tunper / nfiles)\n rcost += '%12s' % ('$%.2f' % (trcost / nfiles))\n bcost += '%12s' % ('$%.2f' % (tbcost / nfiles))\n hmean += '%12.2f' % (thmean / nfiles)\n hsdev += '%12.2f' % (thsdev / nfiles)\n smean += '%12.2f' % (tsmean / nfiles)\n ssdev += '%12.2f' % (tssdev / nfiles)\n meand += '%12.2f' % (tmeand / nfiles)\n kval += '%12.2f' % (tkval / nfiles)\n print(fname)\n if len(fnam2.strip()) > 0:\n print(fnam2)\n print(ratio)\n if len(rat2.strip()) > 0:\n print(rat2)\n print(fptot)\n print(fpper)\n print(fntot)\n print(fnper)\n print(untot)\n print(unper)\n print(rcost)\n print(bcost)\n print(hmean)\n print(hsdev)\n print(smean)\n print(ssdev)\n print(meand)\n print(kval)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef suck(f):\n hamdevall = spamdevall = 0.0, 0.0\n cost = 0.0\n bestcost = 0.0\n fp = 0\n fn = 0\n un = 0\n fpp = 0.0\n fnp = 0.0\n unp = 0.0\n htest = 0\n stest = 0\n get = f.readline\n while 1:\n line = get()\n if line.startswith('-> <stat> tested'):\n print(line, end=' ')\n elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:\n vals = line.split(';')\n mean = float(vals[1].split()[-1])\n sdev = float(vals[2].split()[-1])\n val = mean, sdev\n ntested = int(vals[0].split()[-2])\n typ = vals[0].split()[2]\n if line.find('for all runs') != -1:\n if typ == 'Ham':\n hamdevall = val\n htest = ntested\n else:\n spamdevall = val\n stest = ntested\n elif line.startswith('-> best cost for all runs: $'):\n bestcost = float(line.split('$')[-1])\n elif line.startswith('-> <stat> all runs false positives: '):\n fp = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negatives: '):\n fn = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure: '):\n un = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false positive %: '):\n fpp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negative %: '):\n fnp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure %: '):\n unp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs cost: '):\n cost = float(line.split('$')[-1])\n break\n return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,\n hamdevall, spamdevall)\n\n\ndef windowsfy(fn):\n import os\n if os.path.exists(fn + '.txt'):\n return fn + '.txt'\n else:\n return fn\n\n\ndef table():\n import getopt, sys\n showMean = 0\n fname = 'filename: '\n fnam2 = ' '\n ratio = 'ham:spam: '\n rat2 = ' '\n fptot = 'fp total: '\n fpper = 'fp %: '\n fntot = 'fn total: '\n fnper = 'fn %: '\n untot = 'unsure t: '\n unper = 'unsure %: '\n rcost = 'real cost:'\n bcost = 'best cost:'\n hmean = 'h mean: '\n hsdev = 'h sdev: '\n smean = 's mean: '\n ssdev = 's sdev: '\n meand = 'mean diff:'\n kval = 'k: '\n (tfptot) = (tfpper) = (tfntot) = (tfnper) = (tuntot) = (tunper) = (trcost\n ) = (tbcost) = (thmean) = (thsdev) = (tsmean) = (tssdev) = (tmeand) = (\n tkval) = 0\n args, fileargs = getopt.getopt(sys.argv[1:], 'm')\n for arg, val in args:\n if arg == '-m':\n showMean = 1\n for filename in fileargs:\n filename = windowsfy(filename)\n (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost, hamdevall,\n spamdevall) = suck(file(filename))\n if filename.endswith('.txt'):\n filename = filename[:-4]\n filename = filename[filename.rfind('/') + 1:]\n filename = filename[filename.rfind('\\\\') + 1:]\n if len(fname) > len(fnam2):\n fname += ' '\n fname = fname[0:len(fnam2) + 12]\n fnam2 += ' %11s' % filename\n else:\n fnam2 += ' '\n fnam2 = fnam2[0:len(fname) + 12]\n fname += ' %11s' % filename\n if len(ratio) > len(rat2):\n ratio += ' '\n ratio = ratio[0:len(rat2) + 12]\n rat2 += ' %11s' % ('%d:%d' % (htest, stest))\n else:\n rat2 += ' '\n rat2 = rat2[0:len(ratio) + 12]\n ratio += ' %11s' % ('%d:%d' % (htest, stest))\n fptot += '%12d' % fp\n tfptot += fp\n fpper += '%12.2f' % fpp\n tfpper += fpp\n fntot += '%12d' % fn\n tfntot += fn\n fnper += '%12.2f' % fnp\n tfnper += fnp\n untot += '%12d' % un\n tuntot += un\n unper += '%12.2f' % unp\n tunper += unp\n rcost += '%12s' % ('$%.2f' % cost)\n trcost += cost\n bcost += '%12s' % ('$%.2f' % bestcost)\n tbcost += bestcost\n hmean += '%12.2f' % hamdevall[0]\n thmean += hamdevall[0]\n hsdev += '%12.2f' % hamdevall[1]\n thsdev += hamdevall[1]\n smean += '%12.2f' % spamdevall[0]\n tsmean += spamdevall[0]\n ssdev += '%12.2f' % spamdevall[1]\n tssdev += spamdevall[1]\n meand += '%12.2f' % (spamdevall[0] - hamdevall[0])\n tmeand += spamdevall[0] - hamdevall[0]\n k = (spamdevall[0] - hamdevall[0]) / (spamdevall[1] + hamdevall[1])\n kval += '%12.2f' % k\n tkval += k\n nfiles = len(fileargs)\n if nfiles and showMean:\n fptot += '%12d' % (tfptot / nfiles)\n fpper += '%12.2f' % (tfpper / nfiles)\n fntot += '%12d' % (tfntot / nfiles)\n fnper += '%12.2f' % (tfnper / nfiles)\n untot += '%12d' % (tuntot / nfiles)\n unper += '%12.2f' % (tunper / nfiles)\n rcost += '%12s' % ('$%.2f' % (trcost / nfiles))\n bcost += '%12s' % ('$%.2f' % (tbcost / nfiles))\n hmean += '%12.2f' % (thmean / nfiles)\n hsdev += '%12.2f' % (thsdev / nfiles)\n smean += '%12.2f' % (tsmean / nfiles)\n ssdev += '%12.2f' % (tssdev / nfiles)\n meand += '%12.2f' % (tmeand / nfiles)\n kval += '%12.2f' % (tkval / nfiles)\n print(fname)\n if len(fnam2.strip()) > 0:\n print(fnam2)\n print(ratio)\n if len(rat2.strip()) > 0:\n print(rat2)\n print(fptot)\n print(fpper)\n print(fntot)\n print(fnper)\n print(untot)\n print(unper)\n print(rcost)\n print(bcost)\n print(hmean)\n print(hsdev)\n print(smean)\n print(ssdev)\n print(meand)\n print(kval)\n\n\nif __name__ == '__main__':\n table()\n",
"step-5": "\"\"\"\ntable.py [-m] base1 base2 ... baseN\nCombines output from base1.txt, base2.txt, etc., which are created by\nthe TestDriver (such as timcv.py) output, and displays tabulated\ncomparison statistics to stdout. Each input file is represented by\none column in the table.\nOptional argument -m shows a final column with the mean value of each\nstatistic.\n\"\"\"\ndef suck(f):\n hamdevall = spamdevall = (0.0, 0.0)\n cost = 0.0\n bestcost = 0.0\n fp = 0\n fn = 0\n un = 0\n fpp = 0.0\n fnp = 0.0\n unp = 0.0\n htest = 0\n stest = 0\n get = f.readline\n while 1:\n line = get()\n if line.startswith('-> <stat> tested'):\n print(line, end=' ')\n elif line.find(' items; mean ') > 0 and line.find('for all runs') > 0:\n vals = line.split(';')\n mean = float(vals[1].split()[-1])\n sdev = float(vals[2].split()[-1])\n val = (mean, sdev)\n ntested = int(vals[0].split()[-2])\n typ = vals[0].split()[2]\n if line.find('for all runs') != -1:\n if typ == 'Ham':\n hamdevall = val\n htest = ntested\n else:\n spamdevall = val\n stest = ntested\n elif line.startswith('-> best cost for all runs: $'):\n bestcost = float(line.split('$')[-1])\n elif line.startswith('-> <stat> all runs false positives: '):\n fp = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negatives: '):\n fn = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure: '):\n un = int(line.split()[-1])\n elif line.startswith('-> <stat> all runs false positive %: '):\n fpp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs false negative %: '):\n fnp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs unsure %: '):\n unp = float(line.split()[-1])\n elif line.startswith('-> <stat> all runs cost: '):\n cost = float(line.split('$')[-1])\n break\n return (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,\n hamdevall, spamdevall)\ndef windowsfy(fn):\n import os\n if os.path.exists(fn + '.txt'):\n return fn + '.txt'\n else:\n return fn\ndef table():\n import getopt, sys\n showMean = 0\n fname = \"filename: \"\n fnam2 = \" \"\n ratio = \"ham:spam: \"\n rat2 = \" \"\n fptot = \"fp total: \"\n fpper = \"fp %: \"\n fntot = \"fn total: \"\n fnper = \"fn %: \"\n untot = \"unsure t: \"\n unper = \"unsure %: \"\n rcost = \"real cost:\"\n bcost = \"best cost:\"\n hmean = \"h mean: \"\n hsdev = \"h sdev: \"\n smean = \"s mean: \"\n ssdev = \"s sdev: \"\n meand = \"mean diff:\"\n kval = \"k: \"\n tfptot = tfpper = tfntot = tfnper = tuntot = tunper = trcost = tbcost = \\\n thmean = thsdev = tsmean = tssdev = tmeand = tkval = 0\n args, fileargs = getopt.getopt(sys.argv[1:], 'm')\n for arg, val in args:\n if arg == \"-m\":\n showMean = 1\n for filename in fileargs:\n filename = windowsfy(filename)\n (htest, stest, fp, fn, un, fpp, fnp, unp, cost, bestcost,\n hamdevall, spamdevall) = suck(file(filename))\n if filename.endswith('.txt'):\n filename = filename[:-4]\n filename = filename[filename.rfind('/')+1:]\n filename = filename[filename.rfind(\"\\\\\")+1:]\n if len(fname) > len(fnam2):\n fname += \" \"\n fname = fname[0:(len(fnam2) + 12)]\n fnam2 += \" %11s\" % filename\n else:\n fnam2 += \" \"\n fnam2 = fnam2[0:(len(fname) + 12)]\n fname += \" %11s\" % filename\n if len(ratio) > len(rat2):\n ratio += \" \"\n ratio = ratio[0:(len(rat2) + 12)]\n rat2 += \" %11s\" % (\"%d:%d\" % (htest, stest))\n else:\n rat2 += \" \"\n rat2 = rat2[0:(len(ratio) + 12)]\n ratio += \" %11s\" % (\"%d:%d\" % (htest, stest))\n fptot += \"%12d\" % fp\n tfptot += fp\n fpper += \"%12.2f\" % fpp\n tfpper += fpp\n fntot += \"%12d\" % fn\n tfntot += fn\n fnper += \"%12.2f\" % fnp\n tfnper += fnp\n untot += \"%12d\" % un\n tuntot += un\n unper += \"%12.2f\" % unp\n tunper += unp\n rcost += \"%12s\" % (\"$%.2f\" % cost)\n trcost += cost\n bcost += \"%12s\" % (\"$%.2f\" % bestcost)\n tbcost += bestcost\n hmean += \"%12.2f\" % hamdevall[0]\n thmean += hamdevall[0]\n hsdev += \"%12.2f\" % hamdevall[1]\n thsdev += hamdevall[1]\n smean += \"%12.2f\" % spamdevall[0]\n tsmean += spamdevall[0]\n ssdev += \"%12.2f\" % spamdevall[1]\n tssdev += spamdevall[1]\n meand += \"%12.2f\" % (spamdevall[0] - hamdevall[0])\n tmeand += (spamdevall[0] - hamdevall[0])\n k = (spamdevall[0] - hamdevall[0]) / (spamdevall[1] + hamdevall[1])\n kval += \"%12.2f\" % k\n tkval += k\n nfiles = len(fileargs)\n if nfiles and showMean:\n fptot += \"%12d\" % (tfptot/nfiles)\n fpper += \"%12.2f\" % (tfpper/nfiles)\n fntot += \"%12d\" % (tfntot/nfiles)\n fnper += \"%12.2f\" % (tfnper/nfiles)\n untot += \"%12d\" % (tuntot/nfiles)\n unper += \"%12.2f\" % (tunper/nfiles)\n rcost += \"%12s\" % (\"$%.2f\" % (trcost/nfiles))\n bcost += \"%12s\" % (\"$%.2f\" % (tbcost/nfiles))\n hmean += \"%12.2f\" % (thmean/nfiles)\n hsdev += \"%12.2f\" % (thsdev/nfiles)\n smean += \"%12.2f\" % (tsmean/nfiles)\n ssdev += \"%12.2f\" % (tssdev/nfiles)\n meand += \"%12.2f\" % (tmeand/nfiles)\n kval += \"%12.2f\" % (tkval/nfiles)\n print(fname)\n if len(fnam2.strip()) > 0:\n print(fnam2)\n print(ratio)\n if len(rat2.strip()) > 0:\n print(rat2)\n print(fptot)\n print(fpper)\n print(fntot)\n print(fnper)\n print(untot)\n print(unper)\n print(rcost)\n print(bcost)\n print(hmean)\n print(hsdev)\n print(smean)\n print(ssdev)\n print(meand)\n print(kval)\nif __name__ == \"__main__\":\n table()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import cv2
import numpy as np
import pandas as pd
import tkinter as tk
import random
from tkinter import *
from tkinter import ttk
from tkinter import messagebox
from tkinter import Scale,Tk
from tkinter.ttk import Notebook
refPt = []
PtBGR=[]
r=[]
g=[]
b=[]
refPt = []
Serial=[]
PtBGR=[]
r1=[]
r2=[]
r3=[]
r4=[]
rate=[]
rate2=[]
rate3=[]
r6=[]
r7=[]
r8=[]
r9=[]
add=[]
add2=[]
add3=[]
color_name=[]
locate=[]
brand=[]
boolean=False
root = tk.Tk()
root.geometry("400x200")
root.configure(background='white')
def quitScreen():
messagebox.showinfo("collecting data", "點擊視窗開始分析")
root.destroy()
root2=Tk()
root2.destroy()
def getTextInput():
global result,result2
result=text.get(1.0, tk.END+"-1c")
result2=text2.get(1.0, tk.END+"-1c")
img = PhotoImage(file="buttons/QJsmall.png")
panel = tk.Label(root, image = img)
panel.grid(row=0,column=0,columnspan=3)
labelmode = tk.Label(root,text = "請輸入圖片完整名稱\n ex:104432 w7.jpg",bg="white")
labelmode.configure(font=("微軟正黑體", 10))
labelmode.grid(row=1)
text=tk.Text(root, width=20,height=1)
text.insert("insert",".jpg")
text.configure(font=("微軟正黑體", 10))
text.grid(row=1,column=2)
labelmode2 = tk.Label(root,text = "請輸入讀取資料庫名稱\n ex:PureColorBig.csv",bg="white")
labelmode2.configure(font=("微軟正黑體", 10))
labelmode2.grid(row=2)
text2=tk.Text(root, width=20,height=1)
text2.insert("insert","PureColorBig.csv")
text2.configure(font=("微軟正黑體", 10))
text2.grid(row=2,column=2)
img_confirm=PhotoImage(file="buttons/confirm.png")
img_start=PhotoImage(file="buttons/start.png")
btnRead=tk.Button(root, image=img_confirm,text=" ",relief='flat',
command=getTextInput)
btnRead.grid(row=5,column=1)
btnRead2=tk.Button(root, image=img_start,text=" ",relief='flat',
command=quitScreen)
btnRead2.grid(row=5,column=2)
root.mainloop()
def Result_Print():
window=Tk()
window.title("分析結果")
window.geometry("600x900")
frame2=Frame(window)
frame2.pack(fill="both")
tablayout=Notebook(frame2)
tablayout2=Notebook(frame2)
#交叉配對
ntab1=Frame(tablayout2)
ntab1.pack(fill="both")
for row in range(len(name_n)):
for column in range(1):
label=Label(ntab1,width=25,height=2,text=name_n[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=column,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
for row in range(len(name_n)):
for column in range(1):
label=Label(ntab1,width=5,height=2,text="%s" %rate_n[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
for row in range(len(name_n)):
for column in range(1):
label=Label(ntab1,width=12,height=2,text="% 相似程度",bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=2,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
tablayout2.add(ntab1,text="交叉配對結果")
ntab2=Frame(tablayout2)
ntab2.pack(fill="both")
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab2,width=22,height=1,text=ncol[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab2,width=22,height=1,text=row_nf3[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
tablayout2.add(ntab2,text="配方1")
ntab3=Frame(tablayout2)
ntab3.pack(fill="both")
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab3,width=22,height=1,text=ncol[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab3,width=22,height=1,text=row_nf32[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
tablayout2.add(ntab3,text="配方2")
ntab4=Frame(tablayout2)
ntab4.pack(fill="both")
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab4,width=22,height=1,text=ncol[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab4,width=22,height=1,text=row_nf33[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
tablayout2.add(ntab4,text="配方3")
ntab5=Frame(tablayout2)
ntab5.pack(fill="both")
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab5,width=22,height=1,text=ncol[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
for row in range(len(ncol)):
for column in range(1):
label=Label(ntab5,width=22,height=1,text=row_nf3[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
ntab1.grid_columnconfigure(column,weight=1)
tablayout2.add(ntab5,text="最接近配方")
#顏色分類
tab1=Frame(tablayout)
tab1.pack(fill="both")
for row in range(len(name)):
for column in range(1):
label=Label(tab1,width=25,height=2,text=name[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=column,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
for row in range(len(name)):
for column in range(1):
label=Label(tab1,width=5,height=2,text="%s" %rate[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
for row in range(len(name)):
for column in range(1):
label=Label(tab1,width=12,height=2,text="% 相似程度",bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=2,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
tablayout.add(tab1,text="顏色分類結果")
tab2=Frame(tablayout)
tab2.pack(fill="both")
for row in range(len(col)):
for column in range(1):
label=Label(tab2,width=22,height=1,text=col[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
for row in range(len(col)):
for column in range(1):
label=Label(tab2,width=22,height=1,text=row_df3[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
tablayout.add(tab2,text="配方1")
tab3=Frame(tablayout)
tab3.pack(fill="both")
for row in range(len(col)):
for column in range(1):
label=Label(tab3,width=22,height=1,text=col[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
for row in range(len(col)):
for column in range(1):
label=Label(tab3,width=22,height=1,text=row_df32[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
tablayout.add(tab3,text="配方2")
tab4=Frame(tablayout)
tab4.pack(fill="both")
for row in range(len(col)):
for column in range(1):
label=Label(tab4,width=22,height=1,text=col[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
for row in range(len(col)):
for column in range(1):
label=Label(tab4,width=22,height=1,text=row_df33[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
tablayout.add(tab4,text="配方3")
tab5=Frame(tablayout)
tab5.pack(fill="both")
for row in range(len(col)):
for column in range(1):
label=Label(tab5,width=22,height=1,text=col[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=0,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
for row in range(len(col)):
for column in range(1):
label=Label(tab5,width=22,height=1,text=row_text[row],bg="black",fg="white",padx=1,pady=1)
label.grid(row=row,column=1,sticky="nsew",padx=1,pady=1)
tab1.grid_columnconfigure(column,weight=1)
tablayout.add(tab5,text="最接近配方")
tablayout.pack()
tablayout2.pack()
window.mainloop()
def CircleCallback(event,x,y,flags,param):
n=8
global refPt,PtBGR,w,h,Serial,r1,r2,r3,r4,rate,rate2,rate3,r6,r7,r8,r9,add,add2,add3,color,b,g,r,df3,name,rate,col,row_text
global row_df3,row_df32,row_df33,row_text2,row_nf3,row_nf32,nf3,row_nf33,name_n,rate_n,ncol
if event == cv2.EVENT_LBUTTONDOWN:
#下面n代表取樣點數 若n越大則越精準一般不建議超過1000
n=500
for c in range(0,n):
c+=1
#若n改變下面499改為n-1
ranx=(random.randint(0,499))
rany=(random.randint(0,499))
refPt.append((ranx,rany))
b, g, r = img[ranx,rany]
PtBGR.append((b,g,r))
#print(PtBGR[0:n])
b=[x[0] for x in PtBGR]
g=[x[1] for x in PtBGR]
r=[x[2] for x in PtBGR]
if len(refPt)==n:
BAvr=(round(sum(b[0:n])/n))
GAvr=(round(sum(g[0:n])/n))
RAvr=(round(sum(r[0:n])/n))
SumRGB=(BAvr+GAvr+RAvr)
SumAvr=(round(SumRGB/3))
color_def(BAvr,GAvr,RAvr)
color_name.append(color)
AvrRGB={'R':RAvr,'G':GAvr,'B':BAvr,'Sum':SumRGB,'Avr':SumAvr,'color':color_name}
df_test = pd.DataFrame(AvrRGB,index=[0])
dfread = pd.read_csv(".data base\\%s" %(result2))
dfread['A']= round((dfread['R'] + dfread['G'] + dfread['B'])/3)
dfread['S'] = dfread['R'] + dfread['G'] + dfread['B']
#交叉比對法
nf=pd.DataFrame(list(zip(r,g,b)),columns=['R','G','B'])
nfread=dfread[['Serial no','R','G','B']]
loan=pd.merge(nf,nfread)
group=loan.groupby('Serial no')
Newnf=group.count()
Newnf['P']=round((Newnf['R']/Newnf['R'].sum())* 100)
Newnf=Newnf.sort_values(by=['R'],ascending=False)
Rate=Newnf['P'].tolist()
Newnf.columns = [' '.join(col).strip() for col in Newnf.columns.values]
nf2=pd.DataFrame(Newnf.to_records())
nf2=nf2.head(5)
print(nf2)
if(len(nf2['Serial no'])==0):
i=0
j=0
k=0
elif(len(nf2['Serial no'])==1):
i=nf2.at[0,'Serial no']
j=0
k=0
elif(len(nf2['Serial no'])==2):
i=nf2.at[0,'Serial no']
j=nf2.at[1,'Serial no']
k=0
else:
i=nf2.at[0,'Serial no']
j=nf2.at[1,'Serial no']
k=nf2.at[2,'Serial no']
print(k)
nf3=dfread.loc[(dfread['Serial no']==i)].head(1)
nf4=dfread.loc[(dfread['Serial no']==j)].head(1)
nf5=dfread.loc[(dfread['Serial no']==k)].head(1)
nf3=nf3.drop(['R','G','B','color','A','S'],axis=1)
nf4=nf4.drop(['R','G','B','color','A','S'],axis=1)
nf5=nf5.drop(['R','G','B','color','A','S'],axis=1)
nf=pd.concat([nf3, nf4,nf5])
nf.to_csv(".data base\\test_result2.csv",index=False,encoding="utf_8_sig")
print(nf)
ncol=list(nf.columns)
if(len(nf2['Serial no'])==0):
root=tk.Tk()
root.withdraw()
messagebox.showinfo("失敗", "未找到符合資料")
elif(len(nf2['Serial no'])==1):
row_nf3=nf3.iloc[0].tolist()
row_nf32=['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x']
row_nf33=['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x']
elif(len(nf2['Serial no'])==2):
row_nf3=nf3.iloc[0].tolist()
row_nf32=nf4.iloc[0].tolist()
row_nf33=['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x']
else:
row_nf3=nf3.iloc[0].tolist()
row_nf32=nf4.iloc[0].tolist()
print(row_nf32)
row_nf33=nf5.iloc[0].tolist()
name_n=nf['Serial no'].tolist()
rate_n=Rate
#顏色分類法
#(可以改)當需要寬鬆一點的比對,刪除下面一段的上下兩個'''
'''
newdf1=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)]
newdf2=dfread.loc[(dfread['S']<=(SumRGB+2))&(dfread['S']>=(SumRGB-2))]
newdf=pd.concat([newdf1, newdf2])
'''
#(可以改)當需要嚴格一點的比對,刪除下面一段的上下兩個'''
'''
newdf=dfread.loc[(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]
newdf=newdf.loc[(newdf['color']==color)]
'''
#並在下面一行的開頭加上#
newdf=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]
newdf.insert(1,'Rdif',newdf[['R']].add(-RAvr))
newdf.insert(2,'Gdif',newdf[['G']].add(-GAvr))
newdf.insert(3,'Bdif',newdf[['B']].add(-BAvr))
newdf.insert(4,'Adif',abs(newdf[['A']].add(-SumAvr)))
newdf.insert(5,'Sdif',abs(newdf[['S']].add(-SumRGB)))
df=newdf.sort_values(by=['Sdif', 'Adif'], ascending=True).head(100)
df.insert(1,'dalta',abs(df['Rdif']+df['Gdif']+df['Bdif']))
df=df.sort_values(by=['dalta'],ascending=True)
data=df[['Serial no','color']]
group=data.groupby('Serial no')
datacount=group.count()
df=df.merge(datacount,left_on='Serial no',right_index=True)
df=df.sort_values(by=['color_y'],ascending=False)
df3=df.drop_duplicates('Serial no', keep='first', inplace=False).head()
print(df3)
df3.to_csv(".data base\\test_result.csv",index=False,encoding="utf_8_sig")
if df3.empty ==True:
root=tk.Tk()
root.withdraw()
messagebox.showinfo("失敗", "未找到符合資料")
elif len(df3)<=2:
root=tk.Tk()
root.withdraw()
messagebox.showinfo("失敗", "只找到少數資料\n 已存在test_result")
else:
Zero=df3.loc[(df3['Rdif']==0)&(df3['Gdif']==0)&(df3['Bdif']==0)]
Zero=Zero.head(3)
if Zero.empty==False:
Zero=Zero.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)
name=df3['Serial no'].tolist()
rate=df3['color_y'].tolist()
col=list(Zero.columns)
row_text=Zero.iloc[0].tolist()
df3=df3.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)
row_df3=df3.iloc[0].tolist()
row_df32=df3.iloc[1].tolist()
row_df33=df3.iloc[2].tolist()
Result_Print()
print('0')
print(Zero)
else:
filtdf=df3.loc[(df3['A']>=SumAvr)]
filtdf=filtdf.sort_values(by=['Rdif','Gdif','Bdif']).head()
Neg_filtdf=df3.loc[(df3['A']<SumAvr)]
Neg_filtdf=Neg_filtdf.sort_values(by=['Rdif','Gdif','Bdif']).head()
if Neg_filtdf.empty==True and filtdf.empty ==True:
root=tk.Tk()
root.withdraw()
messagebox.showinfo("失敗", "未找到符合資料")
else:
filtdf=filtdf.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)
name=df3['Serial no'].tolist()
rate=df3['color_y'].tolist()
col=list(filtdf.columns)
row_text=filtdf.iloc[0].tolist()
df3=df3.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)
row_df3=df3.iloc[0].tolist()
row_df32=df3.iloc[1].tolist()
row_df33=df3.iloc[2].tolist()
Result_Print()
print("最接近的為1",filtdf.head(1))
def color_def(BAvr,GAvr,RAvr):
global color
if abs(int(BAvr)-int(GAvr))<=1 and abs(int(BAvr)-int(RAvr))<=1:
color='White'
return color
elif BAvr>=GAvr and BAvr>=RAvr:
if BAvr-GAvr>3 and BAvr-RAvr>=3:
color='Blue'
return color
elif BAvr-GAvr<3:
color='Cyan'
return color
else:
color='Purple'
return color
elif GAvr>=RAvr and GAvr>=BAvr:
if GAvr-RAvr>3 or GAvr-BAvr>3:
color='Green'
return color
elif GAvr-RAvr<3:
color='Yellow'
return color
else:
color='Cyan'
return color
elif RAvr>=GAvr and RAvr>=BAvr:
if RAvr-GAvr>=3 and RAvr-BAvr>=3:
color='Red'
return color
elif RAvr-GAvr<3:
color='Yellow'
return color
else:
color='Purple'
return color
else:
color='White'
#img=cv2.imdecode(np.fromfile(r"D:\桌面\JA Material\JA-material\pure\%s" % (result),dtype=np.uint8),-1)
img=cv2.imdecode(np.fromfile(r".pure\%s" % (result),dtype=np.uint8),-1)
cv2.namedWindow('mouse_callback')
# bind the callback function to window
cv2.setMouseCallback('mouse_callback',CircleCallback)
def main():
while (True):
cv2.imshow('mouse_callback',img)
if cv2.waitKey(20) == 27:
break
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "a126b1775ffe1ba1aebc288ce17fac8ada0b0756",
"index": 312,
"step-1": "<mask token>\n\n\ndef quitScreen():\n messagebox.showinfo('collecting data', '點擊視窗開始分析')\n root.destroy()\n root2 = Tk()\n root2.destroy()\n\n\ndef getTextInput():\n global result, result2\n result = text.get(1.0, tk.END + '-1c')\n result2 = text2.get(1.0, tk.END + '-1c')\n\n\n<mask token>\n\n\ndef Result_Print():\n window = Tk()\n window.title('分析結果')\n window.geometry('600x900')\n frame2 = Frame(window)\n frame2.pack(fill='both')\n tablayout = Notebook(frame2)\n tablayout2 = Notebook(frame2)\n ntab1 = Frame(tablayout2)\n ntab1.pack(fill='both')\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=25, height=2, text=name_n[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=5, height=2, text='%s' % rate_n[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab1, text='交叉配對結果')\n ntab2 = Frame(tablayout2)\n ntab2.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab2, text='配方1')\n ntab3 = Frame(tablayout2)\n ntab3.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=row_nf32[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab3, text='配方2')\n ntab4 = Frame(tablayout2)\n ntab4.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=row_nf33[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab4, text='配方3')\n ntab5 = Frame(tablayout2)\n ntab5.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab5, text='最接近配方')\n tab1 = Frame(tablayout)\n tab1.pack(fill='both')\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=25, height=2, text=name[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=5, height=2, text='%s' % rate[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab1, text='顏色分類結果')\n tab2 = Frame(tablayout)\n tab2.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=row_df3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab2, text='配方1')\n tab3 = Frame(tablayout)\n tab3.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=row_df32[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab3, text='配方2')\n tab4 = Frame(tablayout)\n tab4.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=row_df33[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab4, text='配方3')\n tab5 = Frame(tablayout)\n tab5.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=row_text[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab5, text='最接近配方')\n tablayout.pack()\n tablayout2.pack()\n window.mainloop()\n\n\ndef CircleCallback(event, x, y, flags, param):\n n = 8\n global refPt, PtBGR, w, h, Serial, r1, r2, r3, r4, rate, rate2, rate3, r6, r7, r8, r9, add, add2, add3, color, b, g, r, df3, name, rate, col, row_text\n global row_df3, row_df32, row_df33, row_text2, row_nf3, row_nf32, nf3, row_nf33, name_n, rate_n, ncol\n if event == cv2.EVENT_LBUTTONDOWN:\n n = 500\n for c in range(0, n):\n c += 1\n ranx = random.randint(0, 499)\n rany = random.randint(0, 499)\n refPt.append((ranx, rany))\n b, g, r = img[ranx, rany]\n PtBGR.append((b, g, r))\n b = [x[0] for x in PtBGR]\n g = [x[1] for x in PtBGR]\n r = [x[2] for x in PtBGR]\n if len(refPt) == n:\n BAvr = round(sum(b[0:n]) / n)\n GAvr = round(sum(g[0:n]) / n)\n RAvr = round(sum(r[0:n]) / n)\n SumRGB = BAvr + GAvr + RAvr\n SumAvr = round(SumRGB / 3)\n color_def(BAvr, GAvr, RAvr)\n color_name.append(color)\n AvrRGB = {'R': RAvr, 'G': GAvr, 'B': BAvr, 'Sum': SumRGB,\n 'Avr': SumAvr, 'color': color_name}\n df_test = pd.DataFrame(AvrRGB, index=[0])\n dfread = pd.read_csv('.data base\\\\%s' % result2)\n dfread['A'] = round((dfread['R'] + dfread['G'] + dfread['B'\n ]) / 3)\n dfread['S'] = dfread['R'] + dfread['G'] + dfread['B']\n nf = pd.DataFrame(list(zip(r, g, b)), columns=['R', 'G', 'B'])\n nfread = dfread[['Serial no', 'R', 'G', 'B']]\n loan = pd.merge(nf, nfread)\n group = loan.groupby('Serial no')\n Newnf = group.count()\n Newnf['P'] = round(Newnf['R'] / Newnf['R'].sum() * 100)\n Newnf = Newnf.sort_values(by=['R'], ascending=False)\n Rate = Newnf['P'].tolist()\n Newnf.columns = [' '.join(col).strip() for col in Newnf.\n columns.values]\n nf2 = pd.DataFrame(Newnf.to_records())\n nf2 = nf2.head(5)\n print(nf2)\n if len(nf2['Serial no']) == 0:\n i = 0\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 1:\n i = nf2.at[0, 'Serial no']\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 2:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = 0\n else:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = nf2.at[2, 'Serial no']\n print(k)\n nf3 = dfread.loc[dfread['Serial no'] == i].head(1)\n nf4 = dfread.loc[dfread['Serial no'] == j].head(1)\n nf5 = dfread.loc[dfread['Serial no'] == k].head(1)\n nf3 = nf3.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf4 = nf4.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf5 = nf5.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf = pd.concat([nf3, nf4, nf5])\n nf.to_csv('.data base\\\\test_result2.csv', index=False,\n encoding='utf_8_sig')\n print(nf)\n ncol = list(nf.columns)\n if len(nf2['Serial no']) == 0:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(nf2['Serial no']) == 1:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n elif len(nf2['Serial no']) == 2:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n else:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n print(row_nf32)\n row_nf33 = nf5.iloc[0].tolist()\n name_n = nf['Serial no'].tolist()\n rate_n = Rate\n \"\"\"\n newdf1=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)]\n newdf2=dfread.loc[(dfread['S']<=(SumRGB+2))&(dfread['S']>=(SumRGB-2))]\n newdf=pd.concat([newdf1, newdf2])\n \"\"\"\n \"\"\"\n newdf=dfread.loc[(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]\n newdf=newdf.loc[(newdf['color']==color)]\n \"\"\"\n newdf = dfread.loc[(dfread['color'] == color) | (dfread['A'\n ] == SumAvr) | (dfread['S'] == SumRGB)]\n newdf.insert(1, 'Rdif', newdf[['R']].add(-RAvr))\n newdf.insert(2, 'Gdif', newdf[['G']].add(-GAvr))\n newdf.insert(3, 'Bdif', newdf[['B']].add(-BAvr))\n newdf.insert(4, 'Adif', abs(newdf[['A']].add(-SumAvr)))\n newdf.insert(5, 'Sdif', abs(newdf[['S']].add(-SumRGB)))\n df = newdf.sort_values(by=['Sdif', 'Adif'], ascending=True\n ).head(100)\n df.insert(1, 'dalta', abs(df['Rdif'] + df['Gdif'] + df['Bdif'])\n )\n df = df.sort_values(by=['dalta'], ascending=True)\n data = df[['Serial no', 'color']]\n group = data.groupby('Serial no')\n datacount = group.count()\n df = df.merge(datacount, left_on='Serial no', right_index=True)\n df = df.sort_values(by=['color_y'], ascending=False)\n df3 = df.drop_duplicates('Serial no', keep='first', inplace\n =False).head()\n print(df3)\n df3.to_csv('.data base\\\\test_result.csv', index=False,\n encoding='utf_8_sig')\n if df3.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(df3) <= 2:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '只找到少數資料\\n 已存在test_result')\n else:\n Zero = df3.loc[(df3['Rdif'] == 0) & (df3['Gdif'] == 0) &\n (df3['Bdif'] == 0)]\n Zero = Zero.head(3)\n if Zero.empty == False:\n Zero = Zero.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(Zero.columns)\n row_text = Zero.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('0')\n print(Zero)\n else:\n filtdf = df3.loc[df3['A'] >= SumAvr]\n filtdf = filtdf.sort_values(by=['Rdif', 'Gdif', 'Bdif']\n ).head()\n Neg_filtdf = df3.loc[df3['A'] < SumAvr]\n Neg_filtdf = Neg_filtdf.sort_values(by=['Rdif',\n 'Gdif', 'Bdif']).head()\n if Neg_filtdf.empty == True and filtdf.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n else:\n filtdf = filtdf.drop(['R', 'G', 'B', 'dalta',\n 'Rdif', 'Gdif', 'Bdif', 'A', 'S', 'Adif',\n 'Sdif', 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(filtdf.columns)\n row_text = filtdf.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('最接近的為1', filtdf.head(1))\n\n\n<mask token>\n\n\ndef main():\n while True:\n cv2.imshow('mouse_callback', img)\n if cv2.waitKey(20) == 27:\n break\n cv2.destroyAllWindows()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef quitScreen():\n messagebox.showinfo('collecting data', '點擊視窗開始分析')\n root.destroy()\n root2 = Tk()\n root2.destroy()\n\n\ndef getTextInput():\n global result, result2\n result = text.get(1.0, tk.END + '-1c')\n result2 = text2.get(1.0, tk.END + '-1c')\n\n\n<mask token>\n\n\ndef Result_Print():\n window = Tk()\n window.title('分析結果')\n window.geometry('600x900')\n frame2 = Frame(window)\n frame2.pack(fill='both')\n tablayout = Notebook(frame2)\n tablayout2 = Notebook(frame2)\n ntab1 = Frame(tablayout2)\n ntab1.pack(fill='both')\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=25, height=2, text=name_n[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=5, height=2, text='%s' % rate_n[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab1, text='交叉配對結果')\n ntab2 = Frame(tablayout2)\n ntab2.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab2, text='配方1')\n ntab3 = Frame(tablayout2)\n ntab3.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=row_nf32[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab3, text='配方2')\n ntab4 = Frame(tablayout2)\n ntab4.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=row_nf33[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab4, text='配方3')\n ntab5 = Frame(tablayout2)\n ntab5.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab5, text='最接近配方')\n tab1 = Frame(tablayout)\n tab1.pack(fill='both')\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=25, height=2, text=name[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=5, height=2, text='%s' % rate[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab1, text='顏色分類結果')\n tab2 = Frame(tablayout)\n tab2.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=row_df3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab2, text='配方1')\n tab3 = Frame(tablayout)\n tab3.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=row_df32[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab3, text='配方2')\n tab4 = Frame(tablayout)\n tab4.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=row_df33[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab4, text='配方3')\n tab5 = Frame(tablayout)\n tab5.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=row_text[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab5, text='最接近配方')\n tablayout.pack()\n tablayout2.pack()\n window.mainloop()\n\n\ndef CircleCallback(event, x, y, flags, param):\n n = 8\n global refPt, PtBGR, w, h, Serial, r1, r2, r3, r4, rate, rate2, rate3, r6, r7, r8, r9, add, add2, add3, color, b, g, r, df3, name, rate, col, row_text\n global row_df3, row_df32, row_df33, row_text2, row_nf3, row_nf32, nf3, row_nf33, name_n, rate_n, ncol\n if event == cv2.EVENT_LBUTTONDOWN:\n n = 500\n for c in range(0, n):\n c += 1\n ranx = random.randint(0, 499)\n rany = random.randint(0, 499)\n refPt.append((ranx, rany))\n b, g, r = img[ranx, rany]\n PtBGR.append((b, g, r))\n b = [x[0] for x in PtBGR]\n g = [x[1] for x in PtBGR]\n r = [x[2] for x in PtBGR]\n if len(refPt) == n:\n BAvr = round(sum(b[0:n]) / n)\n GAvr = round(sum(g[0:n]) / n)\n RAvr = round(sum(r[0:n]) / n)\n SumRGB = BAvr + GAvr + RAvr\n SumAvr = round(SumRGB / 3)\n color_def(BAvr, GAvr, RAvr)\n color_name.append(color)\n AvrRGB = {'R': RAvr, 'G': GAvr, 'B': BAvr, 'Sum': SumRGB,\n 'Avr': SumAvr, 'color': color_name}\n df_test = pd.DataFrame(AvrRGB, index=[0])\n dfread = pd.read_csv('.data base\\\\%s' % result2)\n dfread['A'] = round((dfread['R'] + dfread['G'] + dfread['B'\n ]) / 3)\n dfread['S'] = dfread['R'] + dfread['G'] + dfread['B']\n nf = pd.DataFrame(list(zip(r, g, b)), columns=['R', 'G', 'B'])\n nfread = dfread[['Serial no', 'R', 'G', 'B']]\n loan = pd.merge(nf, nfread)\n group = loan.groupby('Serial no')\n Newnf = group.count()\n Newnf['P'] = round(Newnf['R'] / Newnf['R'].sum() * 100)\n Newnf = Newnf.sort_values(by=['R'], ascending=False)\n Rate = Newnf['P'].tolist()\n Newnf.columns = [' '.join(col).strip() for col in Newnf.\n columns.values]\n nf2 = pd.DataFrame(Newnf.to_records())\n nf2 = nf2.head(5)\n print(nf2)\n if len(nf2['Serial no']) == 0:\n i = 0\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 1:\n i = nf2.at[0, 'Serial no']\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 2:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = 0\n else:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = nf2.at[2, 'Serial no']\n print(k)\n nf3 = dfread.loc[dfread['Serial no'] == i].head(1)\n nf4 = dfread.loc[dfread['Serial no'] == j].head(1)\n nf5 = dfread.loc[dfread['Serial no'] == k].head(1)\n nf3 = nf3.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf4 = nf4.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf5 = nf5.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf = pd.concat([nf3, nf4, nf5])\n nf.to_csv('.data base\\\\test_result2.csv', index=False,\n encoding='utf_8_sig')\n print(nf)\n ncol = list(nf.columns)\n if len(nf2['Serial no']) == 0:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(nf2['Serial no']) == 1:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n elif len(nf2['Serial no']) == 2:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n else:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n print(row_nf32)\n row_nf33 = nf5.iloc[0].tolist()\n name_n = nf['Serial no'].tolist()\n rate_n = Rate\n \"\"\"\n newdf1=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)]\n newdf2=dfread.loc[(dfread['S']<=(SumRGB+2))&(dfread['S']>=(SumRGB-2))]\n newdf=pd.concat([newdf1, newdf2])\n \"\"\"\n \"\"\"\n newdf=dfread.loc[(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]\n newdf=newdf.loc[(newdf['color']==color)]\n \"\"\"\n newdf = dfread.loc[(dfread['color'] == color) | (dfread['A'\n ] == SumAvr) | (dfread['S'] == SumRGB)]\n newdf.insert(1, 'Rdif', newdf[['R']].add(-RAvr))\n newdf.insert(2, 'Gdif', newdf[['G']].add(-GAvr))\n newdf.insert(3, 'Bdif', newdf[['B']].add(-BAvr))\n newdf.insert(4, 'Adif', abs(newdf[['A']].add(-SumAvr)))\n newdf.insert(5, 'Sdif', abs(newdf[['S']].add(-SumRGB)))\n df = newdf.sort_values(by=['Sdif', 'Adif'], ascending=True\n ).head(100)\n df.insert(1, 'dalta', abs(df['Rdif'] + df['Gdif'] + df['Bdif'])\n )\n df = df.sort_values(by=['dalta'], ascending=True)\n data = df[['Serial no', 'color']]\n group = data.groupby('Serial no')\n datacount = group.count()\n df = df.merge(datacount, left_on='Serial no', right_index=True)\n df = df.sort_values(by=['color_y'], ascending=False)\n df3 = df.drop_duplicates('Serial no', keep='first', inplace\n =False).head()\n print(df3)\n df3.to_csv('.data base\\\\test_result.csv', index=False,\n encoding='utf_8_sig')\n if df3.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(df3) <= 2:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '只找到少數資料\\n 已存在test_result')\n else:\n Zero = df3.loc[(df3['Rdif'] == 0) & (df3['Gdif'] == 0) &\n (df3['Bdif'] == 0)]\n Zero = Zero.head(3)\n if Zero.empty == False:\n Zero = Zero.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(Zero.columns)\n row_text = Zero.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('0')\n print(Zero)\n else:\n filtdf = df3.loc[df3['A'] >= SumAvr]\n filtdf = filtdf.sort_values(by=['Rdif', 'Gdif', 'Bdif']\n ).head()\n Neg_filtdf = df3.loc[df3['A'] < SumAvr]\n Neg_filtdf = Neg_filtdf.sort_values(by=['Rdif',\n 'Gdif', 'Bdif']).head()\n if Neg_filtdf.empty == True and filtdf.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n else:\n filtdf = filtdf.drop(['R', 'G', 'B', 'dalta',\n 'Rdif', 'Gdif', 'Bdif', 'A', 'S', 'Adif',\n 'Sdif', 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(filtdf.columns)\n row_text = filtdf.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('最接近的為1', filtdf.head(1))\n\n\ndef color_def(BAvr, GAvr, RAvr):\n global color\n if abs(int(BAvr) - int(GAvr)) <= 1 and abs(int(BAvr) - int(RAvr)) <= 1:\n color = 'White'\n return color\n elif BAvr >= GAvr and BAvr >= RAvr:\n if BAvr - GAvr > 3 and BAvr - RAvr >= 3:\n color = 'Blue'\n return color\n elif BAvr - GAvr < 3:\n color = 'Cyan'\n return color\n else:\n color = 'Purple'\n return color\n elif GAvr >= RAvr and GAvr >= BAvr:\n if GAvr - RAvr > 3 or GAvr - BAvr > 3:\n color = 'Green'\n return color\n elif GAvr - RAvr < 3:\n color = 'Yellow'\n return color\n else:\n color = 'Cyan'\n return color\n elif RAvr >= GAvr and RAvr >= BAvr:\n if RAvr - GAvr >= 3 and RAvr - BAvr >= 3:\n color = 'Red'\n return color\n elif RAvr - GAvr < 3:\n color = 'Yellow'\n return color\n else:\n color = 'Purple'\n return color\n else:\n color = 'White'\n\n\n<mask token>\n\n\ndef main():\n while True:\n cv2.imshow('mouse_callback', img)\n if cv2.waitKey(20) == 27:\n break\n cv2.destroyAllWindows()\n\n\n<mask token>\n",
"step-3": "<mask token>\nrefPt = []\nPtBGR = []\nr = []\ng = []\nb = []\nrefPt = []\nSerial = []\nPtBGR = []\nr1 = []\nr2 = []\nr3 = []\nr4 = []\nrate = []\nrate2 = []\nrate3 = []\nr6 = []\nr7 = []\nr8 = []\nr9 = []\nadd = []\nadd2 = []\nadd3 = []\ncolor_name = []\nlocate = []\nbrand = []\nboolean = False\nroot = tk.Tk()\nroot.geometry('400x200')\nroot.configure(background='white')\n\n\ndef quitScreen():\n messagebox.showinfo('collecting data', '點擊視窗開始分析')\n root.destroy()\n root2 = Tk()\n root2.destroy()\n\n\ndef getTextInput():\n global result, result2\n result = text.get(1.0, tk.END + '-1c')\n result2 = text2.get(1.0, tk.END + '-1c')\n\n\nimg = PhotoImage(file='buttons/QJsmall.png')\npanel = tk.Label(root, image=img)\npanel.grid(row=0, column=0, columnspan=3)\nlabelmode = tk.Label(root, text=\"\"\"請輸入圖片完整名稱\n ex:104432 w7.jpg\"\"\", bg='white')\nlabelmode.configure(font=('微軟正黑體', 10))\nlabelmode.grid(row=1)\ntext = tk.Text(root, width=20, height=1)\ntext.insert('insert', '.jpg')\ntext.configure(font=('微軟正黑體', 10))\ntext.grid(row=1, column=2)\nlabelmode2 = tk.Label(root, text=\"\"\"請輸入讀取資料庫名稱\n ex:PureColorBig.csv\"\"\", bg=\n 'white')\nlabelmode2.configure(font=('微軟正黑體', 10))\nlabelmode2.grid(row=2)\ntext2 = tk.Text(root, width=20, height=1)\ntext2.insert('insert', 'PureColorBig.csv')\ntext2.configure(font=('微軟正黑體', 10))\ntext2.grid(row=2, column=2)\nimg_confirm = PhotoImage(file='buttons/confirm.png')\nimg_start = PhotoImage(file='buttons/start.png')\nbtnRead = tk.Button(root, image=img_confirm, text=' ', relief='flat',\n command=getTextInput)\nbtnRead.grid(row=5, column=1)\nbtnRead2 = tk.Button(root, image=img_start, text=' ', relief='flat',\n command=quitScreen)\nbtnRead2.grid(row=5, column=2)\nroot.mainloop()\n\n\ndef Result_Print():\n window = Tk()\n window.title('分析結果')\n window.geometry('600x900')\n frame2 = Frame(window)\n frame2.pack(fill='both')\n tablayout = Notebook(frame2)\n tablayout2 = Notebook(frame2)\n ntab1 = Frame(tablayout2)\n ntab1.pack(fill='both')\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=25, height=2, text=name_n[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=5, height=2, text='%s' % rate_n[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab1, text='交叉配對結果')\n ntab2 = Frame(tablayout2)\n ntab2.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab2, text='配方1')\n ntab3 = Frame(tablayout2)\n ntab3.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=row_nf32[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab3, text='配方2')\n ntab4 = Frame(tablayout2)\n ntab4.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=row_nf33[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab4, text='配方3')\n ntab5 = Frame(tablayout2)\n ntab5.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab5, text='最接近配方')\n tab1 = Frame(tablayout)\n tab1.pack(fill='both')\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=25, height=2, text=name[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=5, height=2, text='%s' % rate[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab1, text='顏色分類結果')\n tab2 = Frame(tablayout)\n tab2.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=row_df3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab2, text='配方1')\n tab3 = Frame(tablayout)\n tab3.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=row_df32[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab3, text='配方2')\n tab4 = Frame(tablayout)\n tab4.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=row_df33[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab4, text='配方3')\n tab5 = Frame(tablayout)\n tab5.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=row_text[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab5, text='最接近配方')\n tablayout.pack()\n tablayout2.pack()\n window.mainloop()\n\n\ndef CircleCallback(event, x, y, flags, param):\n n = 8\n global refPt, PtBGR, w, h, Serial, r1, r2, r3, r4, rate, rate2, rate3, r6, r7, r8, r9, add, add2, add3, color, b, g, r, df3, name, rate, col, row_text\n global row_df3, row_df32, row_df33, row_text2, row_nf3, row_nf32, nf3, row_nf33, name_n, rate_n, ncol\n if event == cv2.EVENT_LBUTTONDOWN:\n n = 500\n for c in range(0, n):\n c += 1\n ranx = random.randint(0, 499)\n rany = random.randint(0, 499)\n refPt.append((ranx, rany))\n b, g, r = img[ranx, rany]\n PtBGR.append((b, g, r))\n b = [x[0] for x in PtBGR]\n g = [x[1] for x in PtBGR]\n r = [x[2] for x in PtBGR]\n if len(refPt) == n:\n BAvr = round(sum(b[0:n]) / n)\n GAvr = round(sum(g[0:n]) / n)\n RAvr = round(sum(r[0:n]) / n)\n SumRGB = BAvr + GAvr + RAvr\n SumAvr = round(SumRGB / 3)\n color_def(BAvr, GAvr, RAvr)\n color_name.append(color)\n AvrRGB = {'R': RAvr, 'G': GAvr, 'B': BAvr, 'Sum': SumRGB,\n 'Avr': SumAvr, 'color': color_name}\n df_test = pd.DataFrame(AvrRGB, index=[0])\n dfread = pd.read_csv('.data base\\\\%s' % result2)\n dfread['A'] = round((dfread['R'] + dfread['G'] + dfread['B'\n ]) / 3)\n dfread['S'] = dfread['R'] + dfread['G'] + dfread['B']\n nf = pd.DataFrame(list(zip(r, g, b)), columns=['R', 'G', 'B'])\n nfread = dfread[['Serial no', 'R', 'G', 'B']]\n loan = pd.merge(nf, nfread)\n group = loan.groupby('Serial no')\n Newnf = group.count()\n Newnf['P'] = round(Newnf['R'] / Newnf['R'].sum() * 100)\n Newnf = Newnf.sort_values(by=['R'], ascending=False)\n Rate = Newnf['P'].tolist()\n Newnf.columns = [' '.join(col).strip() for col in Newnf.\n columns.values]\n nf2 = pd.DataFrame(Newnf.to_records())\n nf2 = nf2.head(5)\n print(nf2)\n if len(nf2['Serial no']) == 0:\n i = 0\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 1:\n i = nf2.at[0, 'Serial no']\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 2:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = 0\n else:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = nf2.at[2, 'Serial no']\n print(k)\n nf3 = dfread.loc[dfread['Serial no'] == i].head(1)\n nf4 = dfread.loc[dfread['Serial no'] == j].head(1)\n nf5 = dfread.loc[dfread['Serial no'] == k].head(1)\n nf3 = nf3.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf4 = nf4.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf5 = nf5.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf = pd.concat([nf3, nf4, nf5])\n nf.to_csv('.data base\\\\test_result2.csv', index=False,\n encoding='utf_8_sig')\n print(nf)\n ncol = list(nf.columns)\n if len(nf2['Serial no']) == 0:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(nf2['Serial no']) == 1:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n elif len(nf2['Serial no']) == 2:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n else:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n print(row_nf32)\n row_nf33 = nf5.iloc[0].tolist()\n name_n = nf['Serial no'].tolist()\n rate_n = Rate\n \"\"\"\n newdf1=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)]\n newdf2=dfread.loc[(dfread['S']<=(SumRGB+2))&(dfread['S']>=(SumRGB-2))]\n newdf=pd.concat([newdf1, newdf2])\n \"\"\"\n \"\"\"\n newdf=dfread.loc[(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]\n newdf=newdf.loc[(newdf['color']==color)]\n \"\"\"\n newdf = dfread.loc[(dfread['color'] == color) | (dfread['A'\n ] == SumAvr) | (dfread['S'] == SumRGB)]\n newdf.insert(1, 'Rdif', newdf[['R']].add(-RAvr))\n newdf.insert(2, 'Gdif', newdf[['G']].add(-GAvr))\n newdf.insert(3, 'Bdif', newdf[['B']].add(-BAvr))\n newdf.insert(4, 'Adif', abs(newdf[['A']].add(-SumAvr)))\n newdf.insert(5, 'Sdif', abs(newdf[['S']].add(-SumRGB)))\n df = newdf.sort_values(by=['Sdif', 'Adif'], ascending=True\n ).head(100)\n df.insert(1, 'dalta', abs(df['Rdif'] + df['Gdif'] + df['Bdif'])\n )\n df = df.sort_values(by=['dalta'], ascending=True)\n data = df[['Serial no', 'color']]\n group = data.groupby('Serial no')\n datacount = group.count()\n df = df.merge(datacount, left_on='Serial no', right_index=True)\n df = df.sort_values(by=['color_y'], ascending=False)\n df3 = df.drop_duplicates('Serial no', keep='first', inplace\n =False).head()\n print(df3)\n df3.to_csv('.data base\\\\test_result.csv', index=False,\n encoding='utf_8_sig')\n if df3.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(df3) <= 2:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '只找到少數資料\\n 已存在test_result')\n else:\n Zero = df3.loc[(df3['Rdif'] == 0) & (df3['Gdif'] == 0) &\n (df3['Bdif'] == 0)]\n Zero = Zero.head(3)\n if Zero.empty == False:\n Zero = Zero.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(Zero.columns)\n row_text = Zero.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('0')\n print(Zero)\n else:\n filtdf = df3.loc[df3['A'] >= SumAvr]\n filtdf = filtdf.sort_values(by=['Rdif', 'Gdif', 'Bdif']\n ).head()\n Neg_filtdf = df3.loc[df3['A'] < SumAvr]\n Neg_filtdf = Neg_filtdf.sort_values(by=['Rdif',\n 'Gdif', 'Bdif']).head()\n if Neg_filtdf.empty == True and filtdf.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n else:\n filtdf = filtdf.drop(['R', 'G', 'B', 'dalta',\n 'Rdif', 'Gdif', 'Bdif', 'A', 'S', 'Adif',\n 'Sdif', 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(filtdf.columns)\n row_text = filtdf.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('最接近的為1', filtdf.head(1))\n\n\ndef color_def(BAvr, GAvr, RAvr):\n global color\n if abs(int(BAvr) - int(GAvr)) <= 1 and abs(int(BAvr) - int(RAvr)) <= 1:\n color = 'White'\n return color\n elif BAvr >= GAvr and BAvr >= RAvr:\n if BAvr - GAvr > 3 and BAvr - RAvr >= 3:\n color = 'Blue'\n return color\n elif BAvr - GAvr < 3:\n color = 'Cyan'\n return color\n else:\n color = 'Purple'\n return color\n elif GAvr >= RAvr and GAvr >= BAvr:\n if GAvr - RAvr > 3 or GAvr - BAvr > 3:\n color = 'Green'\n return color\n elif GAvr - RAvr < 3:\n color = 'Yellow'\n return color\n else:\n color = 'Cyan'\n return color\n elif RAvr >= GAvr and RAvr >= BAvr:\n if RAvr - GAvr >= 3 and RAvr - BAvr >= 3:\n color = 'Red'\n return color\n elif RAvr - GAvr < 3:\n color = 'Yellow'\n return color\n else:\n color = 'Purple'\n return color\n else:\n color = 'White'\n\n\nimg = cv2.imdecode(np.fromfile('.pure\\\\%s' % result, dtype=np.uint8), -1)\ncv2.namedWindow('mouse_callback')\ncv2.setMouseCallback('mouse_callback', CircleCallback)\n\n\ndef main():\n while True:\n cv2.imshow('mouse_callback', img)\n if cv2.waitKey(20) == 27:\n break\n cv2.destroyAllWindows()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import cv2\nimport numpy as np\nimport pandas as pd\nimport tkinter as tk\nimport random\nfrom tkinter import *\nfrom tkinter import ttk\nfrom tkinter import messagebox\nfrom tkinter import Scale, Tk\nfrom tkinter.ttk import Notebook\nrefPt = []\nPtBGR = []\nr = []\ng = []\nb = []\nrefPt = []\nSerial = []\nPtBGR = []\nr1 = []\nr2 = []\nr3 = []\nr4 = []\nrate = []\nrate2 = []\nrate3 = []\nr6 = []\nr7 = []\nr8 = []\nr9 = []\nadd = []\nadd2 = []\nadd3 = []\ncolor_name = []\nlocate = []\nbrand = []\nboolean = False\nroot = tk.Tk()\nroot.geometry('400x200')\nroot.configure(background='white')\n\n\ndef quitScreen():\n messagebox.showinfo('collecting data', '點擊視窗開始分析')\n root.destroy()\n root2 = Tk()\n root2.destroy()\n\n\ndef getTextInput():\n global result, result2\n result = text.get(1.0, tk.END + '-1c')\n result2 = text2.get(1.0, tk.END + '-1c')\n\n\nimg = PhotoImage(file='buttons/QJsmall.png')\npanel = tk.Label(root, image=img)\npanel.grid(row=0, column=0, columnspan=3)\nlabelmode = tk.Label(root, text=\"\"\"請輸入圖片完整名稱\n ex:104432 w7.jpg\"\"\", bg='white')\nlabelmode.configure(font=('微軟正黑體', 10))\nlabelmode.grid(row=1)\ntext = tk.Text(root, width=20, height=1)\ntext.insert('insert', '.jpg')\ntext.configure(font=('微軟正黑體', 10))\ntext.grid(row=1, column=2)\nlabelmode2 = tk.Label(root, text=\"\"\"請輸入讀取資料庫名稱\n ex:PureColorBig.csv\"\"\", bg=\n 'white')\nlabelmode2.configure(font=('微軟正黑體', 10))\nlabelmode2.grid(row=2)\ntext2 = tk.Text(root, width=20, height=1)\ntext2.insert('insert', 'PureColorBig.csv')\ntext2.configure(font=('微軟正黑體', 10))\ntext2.grid(row=2, column=2)\nimg_confirm = PhotoImage(file='buttons/confirm.png')\nimg_start = PhotoImage(file='buttons/start.png')\nbtnRead = tk.Button(root, image=img_confirm, text=' ', relief='flat',\n command=getTextInput)\nbtnRead.grid(row=5, column=1)\nbtnRead2 = tk.Button(root, image=img_start, text=' ', relief='flat',\n command=quitScreen)\nbtnRead2.grid(row=5, column=2)\nroot.mainloop()\n\n\ndef Result_Print():\n window = Tk()\n window.title('分析結果')\n window.geometry('600x900')\n frame2 = Frame(window)\n frame2.pack(fill='both')\n tablayout = Notebook(frame2)\n tablayout2 = Notebook(frame2)\n ntab1 = Frame(tablayout2)\n ntab1.pack(fill='both')\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=25, height=2, text=name_n[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=5, height=2, text='%s' % rate_n[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label = Label(ntab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab1, text='交叉配對結果')\n ntab2 = Frame(tablayout2)\n ntab2.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab2, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab2, text='配方1')\n ntab3 = Frame(tablayout2)\n ntab3.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab3, width=22, height=1, text=row_nf32[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab3, text='配方2')\n ntab4 = Frame(tablayout2)\n ntab4.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab4, width=22, height=1, text=row_nf33[row], bg\n ='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab4, text='配方3')\n ntab5 = Frame(tablayout2)\n ntab5.pack(fill='both')\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=ncol[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n for row in range(len(ncol)):\n for column in range(1):\n label = Label(ntab5, width=22, height=1, text=row_nf3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n ntab1.grid_columnconfigure(column, weight=1)\n tablayout2.add(ntab5, text='最接近配方')\n tab1 = Frame(tablayout)\n tab1.pack(fill='both')\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=25, height=2, text=name[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=column, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=5, height=2, text='%s' % rate[row],\n bg='black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(name)):\n for column in range(1):\n label = Label(tab1, width=12, height=2, text='% 相似程度', bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=2, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab1, text='顏色分類結果')\n tab2 = Frame(tablayout)\n tab2.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab2, width=22, height=1, text=row_df3[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab2, text='配方1')\n tab3 = Frame(tablayout)\n tab3.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab3, width=22, height=1, text=row_df32[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab3, text='配方2')\n tab4 = Frame(tablayout)\n tab4.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab4, width=22, height=1, text=row_df33[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab4, text='配方3')\n tab5 = Frame(tablayout)\n tab5.pack(fill='both')\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=col[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=0, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n for row in range(len(col)):\n for column in range(1):\n label = Label(tab5, width=22, height=1, text=row_text[row], bg=\n 'black', fg='white', padx=1, pady=1)\n label.grid(row=row, column=1, sticky='nsew', padx=1, pady=1)\n tab1.grid_columnconfigure(column, weight=1)\n tablayout.add(tab5, text='最接近配方')\n tablayout.pack()\n tablayout2.pack()\n window.mainloop()\n\n\ndef CircleCallback(event, x, y, flags, param):\n n = 8\n global refPt, PtBGR, w, h, Serial, r1, r2, r3, r4, rate, rate2, rate3, r6, r7, r8, r9, add, add2, add3, color, b, g, r, df3, name, rate, col, row_text\n global row_df3, row_df32, row_df33, row_text2, row_nf3, row_nf32, nf3, row_nf33, name_n, rate_n, ncol\n if event == cv2.EVENT_LBUTTONDOWN:\n n = 500\n for c in range(0, n):\n c += 1\n ranx = random.randint(0, 499)\n rany = random.randint(0, 499)\n refPt.append((ranx, rany))\n b, g, r = img[ranx, rany]\n PtBGR.append((b, g, r))\n b = [x[0] for x in PtBGR]\n g = [x[1] for x in PtBGR]\n r = [x[2] for x in PtBGR]\n if len(refPt) == n:\n BAvr = round(sum(b[0:n]) / n)\n GAvr = round(sum(g[0:n]) / n)\n RAvr = round(sum(r[0:n]) / n)\n SumRGB = BAvr + GAvr + RAvr\n SumAvr = round(SumRGB / 3)\n color_def(BAvr, GAvr, RAvr)\n color_name.append(color)\n AvrRGB = {'R': RAvr, 'G': GAvr, 'B': BAvr, 'Sum': SumRGB,\n 'Avr': SumAvr, 'color': color_name}\n df_test = pd.DataFrame(AvrRGB, index=[0])\n dfread = pd.read_csv('.data base\\\\%s' % result2)\n dfread['A'] = round((dfread['R'] + dfread['G'] + dfread['B'\n ]) / 3)\n dfread['S'] = dfread['R'] + dfread['G'] + dfread['B']\n nf = pd.DataFrame(list(zip(r, g, b)), columns=['R', 'G', 'B'])\n nfread = dfread[['Serial no', 'R', 'G', 'B']]\n loan = pd.merge(nf, nfread)\n group = loan.groupby('Serial no')\n Newnf = group.count()\n Newnf['P'] = round(Newnf['R'] / Newnf['R'].sum() * 100)\n Newnf = Newnf.sort_values(by=['R'], ascending=False)\n Rate = Newnf['P'].tolist()\n Newnf.columns = [' '.join(col).strip() for col in Newnf.\n columns.values]\n nf2 = pd.DataFrame(Newnf.to_records())\n nf2 = nf2.head(5)\n print(nf2)\n if len(nf2['Serial no']) == 0:\n i = 0\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 1:\n i = nf2.at[0, 'Serial no']\n j = 0\n k = 0\n elif len(nf2['Serial no']) == 2:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = 0\n else:\n i = nf2.at[0, 'Serial no']\n j = nf2.at[1, 'Serial no']\n k = nf2.at[2, 'Serial no']\n print(k)\n nf3 = dfread.loc[dfread['Serial no'] == i].head(1)\n nf4 = dfread.loc[dfread['Serial no'] == j].head(1)\n nf5 = dfread.loc[dfread['Serial no'] == k].head(1)\n nf3 = nf3.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf4 = nf4.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf5 = nf5.drop(['R', 'G', 'B', 'color', 'A', 'S'], axis=1)\n nf = pd.concat([nf3, nf4, nf5])\n nf.to_csv('.data base\\\\test_result2.csv', index=False,\n encoding='utf_8_sig')\n print(nf)\n ncol = list(nf.columns)\n if len(nf2['Serial no']) == 0:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(nf2['Serial no']) == 1:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n elif len(nf2['Serial no']) == 2:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n row_nf33 = ['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x',\n 'x', 'x', 'x', 'x', 'x', 'x', 'x']\n else:\n row_nf3 = nf3.iloc[0].tolist()\n row_nf32 = nf4.iloc[0].tolist()\n print(row_nf32)\n row_nf33 = nf5.iloc[0].tolist()\n name_n = nf['Serial no'].tolist()\n rate_n = Rate\n \"\"\"\n newdf1=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)]\n newdf2=dfread.loc[(dfread['S']<=(SumRGB+2))&(dfread['S']>=(SumRGB-2))]\n newdf=pd.concat([newdf1, newdf2])\n \"\"\"\n \"\"\"\n newdf=dfread.loc[(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]\n newdf=newdf.loc[(newdf['color']==color)]\n \"\"\"\n newdf = dfread.loc[(dfread['color'] == color) | (dfread['A'\n ] == SumAvr) | (dfread['S'] == SumRGB)]\n newdf.insert(1, 'Rdif', newdf[['R']].add(-RAvr))\n newdf.insert(2, 'Gdif', newdf[['G']].add(-GAvr))\n newdf.insert(3, 'Bdif', newdf[['B']].add(-BAvr))\n newdf.insert(4, 'Adif', abs(newdf[['A']].add(-SumAvr)))\n newdf.insert(5, 'Sdif', abs(newdf[['S']].add(-SumRGB)))\n df = newdf.sort_values(by=['Sdif', 'Adif'], ascending=True\n ).head(100)\n df.insert(1, 'dalta', abs(df['Rdif'] + df['Gdif'] + df['Bdif'])\n )\n df = df.sort_values(by=['dalta'], ascending=True)\n data = df[['Serial no', 'color']]\n group = data.groupby('Serial no')\n datacount = group.count()\n df = df.merge(datacount, left_on='Serial no', right_index=True)\n df = df.sort_values(by=['color_y'], ascending=False)\n df3 = df.drop_duplicates('Serial no', keep='first', inplace\n =False).head()\n print(df3)\n df3.to_csv('.data base\\\\test_result.csv', index=False,\n encoding='utf_8_sig')\n if df3.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n elif len(df3) <= 2:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '只找到少數資料\\n 已存在test_result')\n else:\n Zero = df3.loc[(df3['Rdif'] == 0) & (df3['Gdif'] == 0) &\n (df3['Bdif'] == 0)]\n Zero = Zero.head(3)\n if Zero.empty == False:\n Zero = Zero.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(Zero.columns)\n row_text = Zero.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('0')\n print(Zero)\n else:\n filtdf = df3.loc[df3['A'] >= SumAvr]\n filtdf = filtdf.sort_values(by=['Rdif', 'Gdif', 'Bdif']\n ).head()\n Neg_filtdf = df3.loc[df3['A'] < SumAvr]\n Neg_filtdf = Neg_filtdf.sort_values(by=['Rdif',\n 'Gdif', 'Bdif']).head()\n if Neg_filtdf.empty == True and filtdf.empty == True:\n root = tk.Tk()\n root.withdraw()\n messagebox.showinfo('失敗', '未找到符合資料')\n else:\n filtdf = filtdf.drop(['R', 'G', 'B', 'dalta',\n 'Rdif', 'Gdif', 'Bdif', 'A', 'S', 'Adif',\n 'Sdif', 'color_x', 'color_y'], axis=1)\n name = df3['Serial no'].tolist()\n rate = df3['color_y'].tolist()\n col = list(filtdf.columns)\n row_text = filtdf.iloc[0].tolist()\n df3 = df3.drop(['R', 'G', 'B', 'dalta', 'Rdif',\n 'Gdif', 'Bdif', 'A', 'S', 'Adif', 'Sdif',\n 'color_x', 'color_y'], axis=1)\n row_df3 = df3.iloc[0].tolist()\n row_df32 = df3.iloc[1].tolist()\n row_df33 = df3.iloc[2].tolist()\n Result_Print()\n print('最接近的為1', filtdf.head(1))\n\n\ndef color_def(BAvr, GAvr, RAvr):\n global color\n if abs(int(BAvr) - int(GAvr)) <= 1 and abs(int(BAvr) - int(RAvr)) <= 1:\n color = 'White'\n return color\n elif BAvr >= GAvr and BAvr >= RAvr:\n if BAvr - GAvr > 3 and BAvr - RAvr >= 3:\n color = 'Blue'\n return color\n elif BAvr - GAvr < 3:\n color = 'Cyan'\n return color\n else:\n color = 'Purple'\n return color\n elif GAvr >= RAvr and GAvr >= BAvr:\n if GAvr - RAvr > 3 or GAvr - BAvr > 3:\n color = 'Green'\n return color\n elif GAvr - RAvr < 3:\n color = 'Yellow'\n return color\n else:\n color = 'Cyan'\n return color\n elif RAvr >= GAvr and RAvr >= BAvr:\n if RAvr - GAvr >= 3 and RAvr - BAvr >= 3:\n color = 'Red'\n return color\n elif RAvr - GAvr < 3:\n color = 'Yellow'\n return color\n else:\n color = 'Purple'\n return color\n else:\n color = 'White'\n\n\nimg = cv2.imdecode(np.fromfile('.pure\\\\%s' % result, dtype=np.uint8), -1)\ncv2.namedWindow('mouse_callback')\ncv2.setMouseCallback('mouse_callback', CircleCallback)\n\n\ndef main():\n while True:\n cv2.imshow('mouse_callback', img)\n if cv2.waitKey(20) == 27:\n break\n cv2.destroyAllWindows()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import cv2\nimport numpy as np\nimport pandas as pd\nimport tkinter as tk\nimport random\nfrom tkinter import *\nfrom tkinter import ttk\nfrom tkinter import messagebox\nfrom tkinter import Scale,Tk\nfrom tkinter.ttk import Notebook\n\nrefPt = []\nPtBGR=[]\nr=[]\ng=[]\nb=[]\nrefPt = []\nSerial=[]\nPtBGR=[]\nr1=[]\nr2=[]\nr3=[]\nr4=[]\nrate=[]\nrate2=[]\nrate3=[]\nr6=[]\nr7=[]\nr8=[]\nr9=[]\nadd=[]\nadd2=[]\nadd3=[]\ncolor_name=[]\nlocate=[]\nbrand=[]\nboolean=False\n\n\nroot = tk.Tk()\nroot.geometry(\"400x200\")\nroot.configure(background='white')\n\ndef quitScreen():\n messagebox.showinfo(\"collecting data\", \"點擊視窗開始分析\")\n root.destroy()\n root2=Tk()\n root2.destroy()\n \ndef getTextInput():\n global result,result2\n result=text.get(1.0, tk.END+\"-1c\")\n result2=text2.get(1.0, tk.END+\"-1c\")\n\nimg = PhotoImage(file=\"buttons/QJsmall.png\")\npanel = tk.Label(root, image = img)\npanel.grid(row=0,column=0,columnspan=3)\n\nlabelmode = tk.Label(root,text = \"請輸入圖片完整名稱\\n ex:104432 w7.jpg\",bg=\"white\")\nlabelmode.configure(font=(\"微軟正黑體\", 10))\nlabelmode.grid(row=1)\ntext=tk.Text(root, width=20,height=1)\ntext.insert(\"insert\",\".jpg\")\ntext.configure(font=(\"微軟正黑體\", 10))\ntext.grid(row=1,column=2)\n\nlabelmode2 = tk.Label(root,text = \"請輸入讀取資料庫名稱\\n ex:PureColorBig.csv\",bg=\"white\")\nlabelmode2.configure(font=(\"微軟正黑體\", 10))\nlabelmode2.grid(row=2)\ntext2=tk.Text(root, width=20,height=1)\ntext2.insert(\"insert\",\"PureColorBig.csv\")\ntext2.configure(font=(\"微軟正黑體\", 10))\ntext2.grid(row=2,column=2)\n\nimg_confirm=PhotoImage(file=\"buttons/confirm.png\")\nimg_start=PhotoImage(file=\"buttons/start.png\")\nbtnRead=tk.Button(root, image=img_confirm,text=\" \",relief='flat', \n command=getTextInput)\n\nbtnRead.grid(row=5,column=1)\n\nbtnRead2=tk.Button(root, image=img_start,text=\" \",relief='flat', \n command=quitScreen)\n\nbtnRead2.grid(row=5,column=2)\n\nroot.mainloop()\n\n\n\n\ndef Result_Print():\n window=Tk()\n window.title(\"分析結果\")\n window.geometry(\"600x900\")\n \n frame2=Frame(window)\n frame2.pack(fill=\"both\")\n\n \n tablayout=Notebook(frame2)\n tablayout2=Notebook(frame2)\n\n\n #交叉配對\n ntab1=Frame(tablayout2)\n ntab1.pack(fill=\"both\")\n for row in range(len(name_n)):\n for column in range(1):\n label=Label(ntab1,width=25,height=2,text=name_n[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=column,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n for row in range(len(name_n)):\n for column in range(1):\n label=Label(ntab1,width=5,height=2,text=\"%s\" %rate_n[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(name_n)):\n for column in range(1):\n label=Label(ntab1,width=12,height=2,text=\"% 相似程度\",bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=2,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n tablayout2.add(ntab1,text=\"交叉配對結果\")\n\n ntab2=Frame(tablayout2)\n ntab2.pack(fill=\"both\")\n \n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab2,width=22,height=1,text=ncol[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab2,width=22,height=1,text=row_nf3[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n \n tablayout2.add(ntab2,text=\"配方1\")\n\n ntab3=Frame(tablayout2)\n ntab3.pack(fill=\"both\")\n \n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab3,width=22,height=1,text=ncol[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab3,width=22,height=1,text=row_nf32[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n \n tablayout2.add(ntab3,text=\"配方2\")\n\n ntab4=Frame(tablayout2)\n ntab4.pack(fill=\"both\")\n \n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab4,width=22,height=1,text=ncol[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab4,width=22,height=1,text=row_nf33[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n \n tablayout2.add(ntab4,text=\"配方3\")\n\n ntab5=Frame(tablayout2)\n ntab5.pack(fill=\"both\")\n \n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab5,width=22,height=1,text=ncol[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(ncol)):\n for column in range(1):\n label=Label(ntab5,width=22,height=1,text=row_nf3[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n ntab1.grid_columnconfigure(column,weight=1)\n \n tablayout2.add(ntab5,text=\"最接近配方\")\n\n\n\n #顏色分類\n tab1=Frame(tablayout)\n tab1.pack(fill=\"both\")\n for row in range(len(name)):\n for column in range(1):\n label=Label(tab1,width=25,height=2,text=name[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=column,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n for row in range(len(name)):\n for column in range(1):\n label=Label(tab1,width=5,height=2,text=\"%s\" %rate[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(name)):\n for column in range(1):\n label=Label(tab1,width=12,height=2,text=\"% 相似程度\",bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=2,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n tablayout.add(tab1,text=\"顏色分類結果\")\n \n tab2=Frame(tablayout)\n tab2.pack(fill=\"both\")\n \n for row in range(len(col)):\n for column in range(1):\n label=Label(tab2,width=22,height=1,text=col[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(col)):\n for column in range(1):\n label=Label(tab2,width=22,height=1,text=row_df3[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n \n tablayout.add(tab2,text=\"配方1\")\n\n tab3=Frame(tablayout)\n tab3.pack(fill=\"both\")\n \n for row in range(len(col)):\n for column in range(1):\n label=Label(tab3,width=22,height=1,text=col[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(col)):\n for column in range(1):\n label=Label(tab3,width=22,height=1,text=row_df32[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n \n tablayout.add(tab3,text=\"配方2\")\n\n tab4=Frame(tablayout)\n tab4.pack(fill=\"both\")\n \n for row in range(len(col)):\n for column in range(1):\n label=Label(tab4,width=22,height=1,text=col[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(col)):\n for column in range(1):\n label=Label(tab4,width=22,height=1,text=row_df33[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n \n tablayout.add(tab4,text=\"配方3\")\n\n tab5=Frame(tablayout)\n tab5.pack(fill=\"both\")\n \n for row in range(len(col)):\n for column in range(1):\n label=Label(tab5,width=22,height=1,text=col[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=0,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n\n for row in range(len(col)):\n for column in range(1):\n label=Label(tab5,width=22,height=1,text=row_text[row],bg=\"black\",fg=\"white\",padx=1,pady=1)\n label.grid(row=row,column=1,sticky=\"nsew\",padx=1,pady=1)\n tab1.grid_columnconfigure(column,weight=1)\n \n tablayout.add(tab5,text=\"最接近配方\")\n\n tablayout.pack()\n tablayout2.pack()\n window.mainloop()\n \n\n\ndef CircleCallback(event,x,y,flags,param):\n n=8\n global refPt,PtBGR,w,h,Serial,r1,r2,r3,r4,rate,rate2,rate3,r6,r7,r8,r9,add,add2,add3,color,b,g,r,df3,name,rate,col,row_text\n global row_df3,row_df32,row_df33,row_text2,row_nf3,row_nf32,nf3,row_nf33,name_n,rate_n,ncol\n if event == cv2.EVENT_LBUTTONDOWN:\n\n #下面n代表取樣點數 若n越大則越精準一般不建議超過1000\n n=500\n for c in range(0,n):\n c+=1\n #若n改變下面499改為n-1\n ranx=(random.randint(0,499))\n rany=(random.randint(0,499))\n refPt.append((ranx,rany))\n b, g, r = img[ranx,rany]\n PtBGR.append((b,g,r)) \n #print(PtBGR[0:n])\n b=[x[0] for x in PtBGR]\n g=[x[1] for x in PtBGR]\n r=[x[2] for x in PtBGR]\n if len(refPt)==n:\n BAvr=(round(sum(b[0:n])/n))\n GAvr=(round(sum(g[0:n])/n))\n RAvr=(round(sum(r[0:n])/n))\n SumRGB=(BAvr+GAvr+RAvr)\n SumAvr=(round(SumRGB/3))\n color_def(BAvr,GAvr,RAvr)\n color_name.append(color)\n AvrRGB={'R':RAvr,'G':GAvr,'B':BAvr,'Sum':SumRGB,'Avr':SumAvr,'color':color_name}\n df_test = pd.DataFrame(AvrRGB,index=[0])\n dfread = pd.read_csv(\".data base\\\\%s\" %(result2))\n dfread['A']= round((dfread['R'] + dfread['G'] + dfread['B'])/3)\n dfread['S'] = dfread['R'] + dfread['G'] + dfread['B']\n\n #交叉比對法\n nf=pd.DataFrame(list(zip(r,g,b)),columns=['R','G','B'])\n nfread=dfread[['Serial no','R','G','B']]\n loan=pd.merge(nf,nfread)\n group=loan.groupby('Serial no')\n Newnf=group.count()\n Newnf['P']=round((Newnf['R']/Newnf['R'].sum())* 100)\n Newnf=Newnf.sort_values(by=['R'],ascending=False)\n Rate=Newnf['P'].tolist()\n Newnf.columns = [' '.join(col).strip() for col in Newnf.columns.values]\n nf2=pd.DataFrame(Newnf.to_records())\n nf2=nf2.head(5)\n \n print(nf2)\n if(len(nf2['Serial no'])==0):\n i=0\n j=0\n k=0\n elif(len(nf2['Serial no'])==1):\n i=nf2.at[0,'Serial no']\n j=0\n k=0\n elif(len(nf2['Serial no'])==2):\n i=nf2.at[0,'Serial no']\n j=nf2.at[1,'Serial no']\n k=0\n else:\n i=nf2.at[0,'Serial no']\n j=nf2.at[1,'Serial no']\n k=nf2.at[2,'Serial no']\n print(k)\n nf3=dfread.loc[(dfread['Serial no']==i)].head(1)\n nf4=dfread.loc[(dfread['Serial no']==j)].head(1)\n nf5=dfread.loc[(dfread['Serial no']==k)].head(1)\n nf3=nf3.drop(['R','G','B','color','A','S'],axis=1)\n nf4=nf4.drop(['R','G','B','color','A','S'],axis=1)\n nf5=nf5.drop(['R','G','B','color','A','S'],axis=1)\n nf=pd.concat([nf3, nf4,nf5])\n nf.to_csv(\".data base\\\\test_result2.csv\",index=False,encoding=\"utf_8_sig\")\n print(nf)\n ncol=list(nf.columns)\n if(len(nf2['Serial no'])==0):\n root=tk.Tk()\n root.withdraw()\n messagebox.showinfo(\"失敗\", \"未找到符合資料\")\n elif(len(nf2['Serial no'])==1):\n row_nf3=nf3.iloc[0].tolist()\n row_nf32=['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x']\n row_nf33=['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x']\n\n elif(len(nf2['Serial no'])==2):\n row_nf3=nf3.iloc[0].tolist()\n row_nf32=nf4.iloc[0].tolist()\n row_nf33=['x','x','x','x','x','x','x','x','x','x','x','x','x','x','x','x']\n \n else:\n row_nf3=nf3.iloc[0].tolist()\n row_nf32=nf4.iloc[0].tolist()\n print(row_nf32)\n row_nf33=nf5.iloc[0].tolist()\n name_n=nf['Serial no'].tolist()\n rate_n=Rate\n \n \n #顏色分類法\n #(可以改)當需要寬鬆一點的比對,刪除下面一段的上下兩個'''\n \n '''\n newdf1=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)]\n newdf2=dfread.loc[(dfread['S']<=(SumRGB+2))&(dfread['S']>=(SumRGB-2))]\n newdf=pd.concat([newdf1, newdf2])\n '''\n\n #(可以改)當需要嚴格一點的比對,刪除下面一段的上下兩個'''\n '''\n newdf=dfread.loc[(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]\n newdf=newdf.loc[(newdf['color']==color)]\n '''\n\n #並在下面一行的開頭加上#\n newdf=dfread.loc[(dfread['color']==color)|(dfread['A']==SumAvr)|(dfread['S']==SumRGB)]\n\n \n newdf.insert(1,'Rdif',newdf[['R']].add(-RAvr))\n newdf.insert(2,'Gdif',newdf[['G']].add(-GAvr))\n newdf.insert(3,'Bdif',newdf[['B']].add(-BAvr))\n newdf.insert(4,'Adif',abs(newdf[['A']].add(-SumAvr)))\n newdf.insert(5,'Sdif',abs(newdf[['S']].add(-SumRGB)))\n df=newdf.sort_values(by=['Sdif', 'Adif'], ascending=True).head(100)\n df.insert(1,'dalta',abs(df['Rdif']+df['Gdif']+df['Bdif']))\n df=df.sort_values(by=['dalta'],ascending=True)\n data=df[['Serial no','color']]\n group=data.groupby('Serial no')\n datacount=group.count()\n df=df.merge(datacount,left_on='Serial no',right_index=True)\n df=df.sort_values(by=['color_y'],ascending=False)\n df3=df.drop_duplicates('Serial no', keep='first', inplace=False).head()\n print(df3)\n df3.to_csv(\".data base\\\\test_result.csv\",index=False,encoding=\"utf_8_sig\")\n if df3.empty ==True:\n root=tk.Tk()\n root.withdraw()\n messagebox.showinfo(\"失敗\", \"未找到符合資料\")\n \n elif len(df3)<=2:\n \n root=tk.Tk()\n root.withdraw()\n messagebox.showinfo(\"失敗\", \"只找到少數資料\\n 已存在test_result\")\n \n else:\n Zero=df3.loc[(df3['Rdif']==0)&(df3['Gdif']==0)&(df3['Bdif']==0)]\n Zero=Zero.head(3)\n if Zero.empty==False:\n Zero=Zero.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)\n name=df3['Serial no'].tolist()\n rate=df3['color_y'].tolist()\n col=list(Zero.columns)\n row_text=Zero.iloc[0].tolist()\n df3=df3.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)\n row_df3=df3.iloc[0].tolist()\n row_df32=df3.iloc[1].tolist()\n row_df33=df3.iloc[2].tolist()\n Result_Print()\n print('0')\n print(Zero)\n \n else:\n filtdf=df3.loc[(df3['A']>=SumAvr)]\n filtdf=filtdf.sort_values(by=['Rdif','Gdif','Bdif']).head()\n Neg_filtdf=df3.loc[(df3['A']<SumAvr)]\n Neg_filtdf=Neg_filtdf.sort_values(by=['Rdif','Gdif','Bdif']).head()\n \n if Neg_filtdf.empty==True and filtdf.empty ==True:\n root=tk.Tk()\n root.withdraw()\n messagebox.showinfo(\"失敗\", \"未找到符合資料\")\n else:\n filtdf=filtdf.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)\n name=df3['Serial no'].tolist()\n rate=df3['color_y'].tolist()\n col=list(filtdf.columns)\n row_text=filtdf.iloc[0].tolist()\n df3=df3.drop(['R','G','B','dalta','Rdif','Gdif','Bdif','A','S','Adif','Sdif','color_x','color_y'],axis=1)\n row_df3=df3.iloc[0].tolist()\n row_df32=df3.iloc[1].tolist()\n row_df33=df3.iloc[2].tolist()\n Result_Print()\n print(\"最接近的為1\",filtdf.head(1))\n \n\n \n\ndef color_def(BAvr,GAvr,RAvr):\n \n global color\n if abs(int(BAvr)-int(GAvr))<=1 and abs(int(BAvr)-int(RAvr))<=1:\n color='White'\n return color\n \n elif BAvr>=GAvr and BAvr>=RAvr:\n if BAvr-GAvr>3 and BAvr-RAvr>=3:\n color='Blue'\n return color\n \n elif BAvr-GAvr<3:\n color='Cyan'\n return color\n \n else:\n color='Purple'\n return color\n \n \n elif GAvr>=RAvr and GAvr>=BAvr:\n if GAvr-RAvr>3 or GAvr-BAvr>3:\n color='Green'\n return color\n \n elif GAvr-RAvr<3:\n color='Yellow'\n return color\n \n else:\n color='Cyan'\n return color\n \n \n elif RAvr>=GAvr and RAvr>=BAvr:\n if RAvr-GAvr>=3 and RAvr-BAvr>=3:\n color='Red'\n return color\n\n elif RAvr-GAvr<3:\n color='Yellow'\n return color\n\n else:\n color='Purple'\n return color\n \n\n else:\n color='White'\n\n\n#img=cv2.imdecode(np.fromfile(r\"D:\\桌面\\JA Material\\JA-material\\pure\\%s\" % (result),dtype=np.uint8),-1) \nimg=cv2.imdecode(np.fromfile(r\".pure\\%s\" % (result),dtype=np.uint8),-1)\ncv2.namedWindow('mouse_callback')\n\n# bind the callback function to window\n\ncv2.setMouseCallback('mouse_callback',CircleCallback)\n \ndef main():\n while (True):\n cv2.imshow('mouse_callback',img)\n if cv2.waitKey(20) == 27:\n break\n \n cv2.destroyAllWindows()\n \n \nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def backupToZip(folder):
folder = os.path.abspath(folder)
os.chdir(folder)
number = 1
while True:
zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'
if not os.path.exists(zipFilename):
break
number = number + 1
print('creating %s...' % zipFilename)
backupZip = zipfile.ZipFile(zipFilename, 'w')
for foldername, subfolders, filenames in os.walk(folder):
print('adding files in %s...' % foldername)
backupZip.write(foldername)
for filename in filenames:
newBase = os.path.basename(folder) + '_'
if filename.startswith(newBase) and filename.endswith('.zip'):
continue
backupZip.write(os.path.join(foldername, filename))
backupZip.close()
print('......Done......')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def backupToZip(folder):
folder = os.path.abspath(folder)
os.chdir(folder)
number = 1
while True:
zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'
if not os.path.exists(zipFilename):
break
number = number + 1
print('creating %s...' % zipFilename)
backupZip = zipfile.ZipFile(zipFilename, 'w')
for foldername, subfolders, filenames in os.walk(folder):
print('adding files in %s...' % foldername)
backupZip.write(foldername)
for filename in filenames:
newBase = os.path.basename(folder) + '_'
if filename.startswith(newBase) and filename.endswith('.zip'):
continue
backupZip.write(os.path.join(foldername, filename))
backupZip.close()
print('......Done......')
backupToZip('.')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import zipfile, os
def backupToZip(folder):
folder = os.path.abspath(folder)
os.chdir(folder)
number = 1
while True:
zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'
if not os.path.exists(zipFilename):
break
number = number + 1
print('creating %s...' % zipFilename)
backupZip = zipfile.ZipFile(zipFilename, 'w')
for foldername, subfolders, filenames in os.walk(folder):
print('adding files in %s...' % foldername)
backupZip.write(foldername)
for filename in filenames:
newBase = os.path.basename(folder) + '_'
if filename.startswith(newBase) and filename.endswith('.zip'):
continue
backupZip.write(os.path.join(foldername, filename))
backupZip.close()
print('......Done......')
backupToZip('.')
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 13 14:10:15 2018
9.5 项目:将一个文件夹备份到一个 ZIP 文件
@author: NEVERGUVEIP
"""
#! python3
import zipfile,os
def backupToZip(folder):
#backup the entire contents of 'folder' into a ZIP file
folder = os.path.abspath(folder)
os.chdir(folder)
#figure out the filename this code should use based on
#what files already exist.
number = 1
#从1循环检查文件名存不存在,_1.zip,_2.zip,,防止备份以前的备份文件
while True:
zipFilename = os.path.basename(folder) +'_'+str(number)+'.zip'
if not os.path.exists(zipFilename):
break
number = number +1
#creat the zip file
print('creating %s...'%(zipFilename))
backupZip = zipfile.ZipFile(zipFilename,'w')
#TODO: walk the entire folder tree and compress the files in each folder.
for foldername,subfolders,filenames in os.walk(folder):#
print('adding files in %s...'%(foldername))
#add the current folder to the zip file.
backupZip.write(foldername)
#add all the files in this folder to the ZIP file.
for filename in filenames:
newBase = os.path.basename(folder)+'_'
if filename.startswith(newBase) and filename.endswith('.zip'):
continue# don't backup the backup ZIP files
backupZip.write(os.path.join(foldername,filename))
backupZip.close()
print('......Done......')
#backupToZip(r'C:\Users\NEVERGUVEIP\Documents\GitHub\python_test')
backupToZip('.')
|
flexible
|
{
"blob_id": "7af19f69e6c419649a5999f594118ad13833a537",
"index": 7398,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef backupToZip(folder):\n folder = os.path.abspath(folder)\n os.chdir(folder)\n number = 1\n while True:\n zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'\n if not os.path.exists(zipFilename):\n break\n number = number + 1\n print('creating %s...' % zipFilename)\n backupZip = zipfile.ZipFile(zipFilename, 'w')\n for foldername, subfolders, filenames in os.walk(folder):\n print('adding files in %s...' % foldername)\n backupZip.write(foldername)\n for filename in filenames:\n newBase = os.path.basename(folder) + '_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue\n backupZip.write(os.path.join(foldername, filename))\n backupZip.close()\n print('......Done......')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef backupToZip(folder):\n folder = os.path.abspath(folder)\n os.chdir(folder)\n number = 1\n while True:\n zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'\n if not os.path.exists(zipFilename):\n break\n number = number + 1\n print('creating %s...' % zipFilename)\n backupZip = zipfile.ZipFile(zipFilename, 'w')\n for foldername, subfolders, filenames in os.walk(folder):\n print('adding files in %s...' % foldername)\n backupZip.write(foldername)\n for filename in filenames:\n newBase = os.path.basename(folder) + '_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue\n backupZip.write(os.path.join(foldername, filename))\n backupZip.close()\n print('......Done......')\n\n\nbackupToZip('.')\n",
"step-4": "<mask token>\nimport zipfile, os\n\n\ndef backupToZip(folder):\n folder = os.path.abspath(folder)\n os.chdir(folder)\n number = 1\n while True:\n zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'\n if not os.path.exists(zipFilename):\n break\n number = number + 1\n print('creating %s...' % zipFilename)\n backupZip = zipfile.ZipFile(zipFilename, 'w')\n for foldername, subfolders, filenames in os.walk(folder):\n print('adding files in %s...' % foldername)\n backupZip.write(foldername)\n for filename in filenames:\n newBase = os.path.basename(folder) + '_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue\n backupZip.write(os.path.join(foldername, filename))\n backupZip.close()\n print('......Done......')\n\n\nbackupToZip('.')\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Aug 13 14:10:15 2018\n9.5 项目:将一个文件夹备份到一个 ZIP 文件 \n\n@author: NEVERGUVEIP\n\"\"\"\n#! python3\n\nimport zipfile,os\n\ndef backupToZip(folder):\n #backup the entire contents of 'folder' into a ZIP file\n \n folder = os.path.abspath(folder)\n os.chdir(folder)\n #figure out the filename this code should use based on\n #what files already exist. \n number = 1 \n #从1循环检查文件名存不存在,_1.zip,_2.zip,,防止备份以前的备份文件\n while True:\n zipFilename = os.path.basename(folder) +'_'+str(number)+'.zip'\n if not os.path.exists(zipFilename):\n break\n number = number +1\n \n \n #creat the zip file\n print('creating %s...'%(zipFilename))\n backupZip = zipfile.ZipFile(zipFilename,'w')\n \n #TODO: walk the entire folder tree and compress the files in each folder. \n\n for foldername,subfolders,filenames in os.walk(folder):#\n \n print('adding files in %s...'%(foldername)) \n #add the current folder to the zip file.\n backupZip.write(foldername)\n #add all the files in this folder to the ZIP file.\n for filename in filenames:\n newBase = os.path.basename(folder)+'_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue# don't backup the backup ZIP files\n backupZip.write(os.path.join(foldername,filename))\n \n backupZip.close() \n print('......Done......')\n\n#backupToZip(r'C:\\Users\\NEVERGUVEIP\\Documents\\GitHub\\python_test')\nbackupToZip('.')\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
positivo = float(1.0000001)
negativo = float(-1.000001)
print(negativo, positivo)
b_pos = bin(positivo)
b_neg = bin(negativo)
print(b_neg, b_pos)
|
normal
|
{
"blob_id": "5c908697000247056bb63a443f837eef88b4c957",
"index": 9196,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(negativo, positivo)\n<mask token>\nprint(b_neg, b_pos)\n",
"step-3": "positivo = float(1.0000001)\nnegativo = float(-1.000001)\nprint(negativo, positivo)\nb_pos = bin(positivo)\nb_neg = bin(negativo)\nprint(b_neg, b_pos)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pygame
import numpy as np
import glob
from entities.base import AnimatedSprite
images_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))
trophy_im_dict = {'sparkle':[pygame.transform.scale(pygame.image.load(img_path),(400,400)) for img_path in images_path]}
class Trophy(AnimatedSprite):
def __init__(self, position, image_dict, hold_for_n_frames=3,):
super().__init__(position, image_dict, hold_for_n_frames)
self.initial_position = position
self.frames_alive = 0
def update(self):
super().next_frame()
|
normal
|
{
"blob_id": "883cb1e3ea227bb5ac5aa3b4348336ab1a7fba70",
"index": 3476,
"step-1": "<mask token>\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n",
"step-3": "<mask token>\nimages_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))\ntrophy_im_dict = {'sparkle': [pygame.transform.scale(pygame.image.load(\n img_path), (400, 400)) for img_path in images_path]}\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n",
"step-4": "import pygame\nimport numpy as np\nimport glob\nfrom entities.base import AnimatedSprite\nimages_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))\ntrophy_im_dict = {'sparkle': [pygame.transform.scale(pygame.image.load(\n img_path), (400, 400)) for img_path in images_path]}\n\n\nclass Trophy(AnimatedSprite):\n\n def __init__(self, position, image_dict, hold_for_n_frames=3):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n",
"step-5": "import pygame\nimport numpy as np\nimport glob\nfrom entities.base import AnimatedSprite\n\n\nimages_path = sorted(glob.glob('./resources/trophy_sparkle_*.png'))\ntrophy_im_dict = {'sparkle':[pygame.transform.scale(pygame.image.load(img_path),(400,400)) for img_path in images_path]}\n\nclass Trophy(AnimatedSprite):\n def __init__(self, position, image_dict, hold_for_n_frames=3,):\n super().__init__(position, image_dict, hold_for_n_frames)\n self.initial_position = position\n self.frames_alive = 0\n\n def update(self):\n super().next_frame()\n\n\n\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# jan 2014 bbb garden shield attempt
# AKA
'''
Sensors:
analog level sensor, pin AIN0
TMP102 i2c temperature sensor, address 0x48
(if add0 is grounded) or 0x49 (if pulled up)
Outputs:
Analog RGB LED strip
I2C display(?)
Pump Activate/Deactivate (GPIO pin)
Some measurements as of mid-March 2014:
Tank can be pumped for 15 minutes without sun exposure to liquid.
Seems like after 10 minutes of pumping, the autosiphon engages, though.
Tank takes about 17 minutes to drain from a 15-minute pump
11 gals in reservoir reads as 0.42 on the adc.read scale from 0 to 1
8 gals in reservoir reads as 0.175 on the adc.read scale from 0 to 1
7 gals in reservoir reads as 0.15 on the adc.read scale from 0 to 1
'''
from __future__ import division
import Adafruit_SSD1306 as ssd
import Adafruit_BBIO.UART as uart
import Image
import ImageDraw
import ImageFont
# import Adafruit_GPIO.PWM as pwm
import Adafruit_BBIO.GPIO as gpio
import Adafruit_BBIO.ADC as adc
# import TMP102 as tmp102
import datetime
from dateutil.tz import tzlocal
import time
import serial
import atexit
from math import log
import requests
import key as k
import logging
BCOEFFICIENT = 3950 # thermistor beta coefficient
THERMISTORNOMINAL = 10000
TEMPERATURENOMINAL = 25.0
SERIESRESISTOR = 10000
# a1 = blue and white, which is bed temp
# a2 = white and orange, which is tank temp
interval = 60 # seconds between samples
greenPin = 'P8_13'
bluePin = 'P9_14'
redPin = 'P8_19'
servoPin = 'P9_16'
tankPin = 'P9_39'
photoPin = 'P9_38'
thermistor1 = 'P9_40' # AIN1, bed temp
thermistor2 = 'P9_37' # AIN2, reservoir temp
pumpPin = 'P8_10'
RST = 'P8_10' # OLED screen reset pin, not always necessary
readings = {}
PUMP_INTERVAL = 60 # minutes between pump actuations
PUMP_DURATION = 12 # minutes to run pump
def exit_handler():
print 'exiting'
gpio.output(pumpPin,gpio.LOW)
gpio.cleanup()
uart.cleanup()
def do_sensor_read():
print 'sensor read'
global readings
readings = {}
# value = ADC.read("AIN1")
# adc returns value from 0 to 1.
# use read_raw(pin) to get V values
# tank = adc.read(tankPin)
tank = adc.read(tankPin) # have to read twice due to bbio bug
print 'tank is %s' % tank
time.sleep(1)
# photo = adc.read(photoPin) # have to read twice due to bbio bug
photo = 1.0-adc.read(photoPin) # reverse range so that 0 is darkest
print 'photo is %s' % photo
time.sleep(1)
# temp1 = adc.read_raw(thermistor1)
temp1 = adc.read_raw(thermistor1)
time.sleep(1)
print 'temp1 raw %s' % temp1
temp1 = convert_thermistor_special(temp1)
readings['bedTemp'] = temp1
print 'converted bed_temp is %s' % temp1
# # do conversion per
# # http://learn.adafruit.com/thermistor/using-a-thermistor
# temp2 = adc.read_raw(thermistor2)
temp2 = adc.read_raw(thermistor2)
time.sleep(1)
print 'temp2 raw %s' % temp2
print temp2
temp2 = convert_thermistor(temp2)
readings['tankTemp'] = temp2
print 'converted reservoir_temp is %s' % temp2
# do conversion per
# http://learn.adafruit.com/thermistor/using-a-thermistor
# tmp36reading = adc.read_raw(tmp36Pin)
# tmp36reading = adc.read_raw(tmp36Pin) # have to read twice due to bbio bug
# millivolts = tmp36reading * 1800 # 1.8V reference = 1800 mV
# temp_c = (millivolts - 500) / 10
# print temp_c
# ph_val = get_ph()
# print 'ph_val was thoght to be %s' % ph_val
readings['tankLevel'] = tank # tank level
readings['photocell'] = photo # photocell
def convert_thermistor(raw):
# convert the value to resistance
# print 'was given %s' % raw
raw = SERIESRESISTOR/((1800.0/raw) - 1.0)
# raw = float(SERIESRESISTOR / float(raw))
print 'Thermistor resistance '
print raw
steinhart = raw/THERMISTORNOMINAL # (R/Ro)
steinhart = log(steinhart) # ln(R/Ro)
steinhart /= BCOEFFICIENT # 1/B * ln(R/Ro)
steinhart += float(1.0 / (TEMPERATURENOMINAL + 273.15)) # + (1/To)
steinhart = float(1.0 / steinhart) # Invert
steinhart -= 273.15 # convert to C
print 'we think converted temperature is %s' % steinhart
return steinhart
def convert_thermistor_special(raw):
# convert the value to resistance
# print 'was given %s' % raw
# raw = (1800/raw) - 1
# fuck me, a1 is only up against 3.73kOhm - even though it's a properly-labeled resistor!
raw = 3730.0/((1800.0/raw) - 1.0)
print 'Thermistor resistance '
print raw
steinhart = raw/THERMISTORNOMINAL # (R/Ro)
steinhart = log(steinhart) # ln(R/Ro)
steinhart /= BCOEFFICIENT # 1/B * ln(R/Ro)
steinhart += float(1.0 / (TEMPERATURENOMINAL + 273.15)) # + (1/To)
steinhart = float(1.0 / steinhart) # Invert
steinhart -= 273.15 # convert to C
print 'we think converted temperature is %s' % steinhart
return steinhart
def do_db_update():
print 'db update'
global readings
# print readings
if len(readings) != 0:
# data.sparkfun.com is expecting:
# bedTemp, photo, tankLevel, tankTemp
bedTemp = float('{0:.2f}'.format(readings['bedTemp']))
tankTemp = float('{0:.2f}'.format(readings['tankTemp']))
payload = {
'photo':readings['photocell'],
'tankLevel':readings['tankLevel'],
'bedTemp':readings['bedTemp'],
'tankTemp':readings['tankTemp']
}
h = {'Phant-Private-Key':k.key['phant_private']}
r = requests.post(k.key['phant_url'], data=payload, headers=h)
print 'wrote a result set to the DB'
else:
print 'NULL readings, nothing written to DB'
def get_ph():
print 'we are in get_ph'
uart.setup('UART2')
ser = serial.Serial(port = '/dev/ttyO2', baudrate=38400)
print 'opened serial port'
ser.open()
ser.write('R\r')
data = ser.read()
print 'ph received raw as %s' % data
ser.close()
uart.cleanup()
return data
def do_state_display():
print 'state_display'
width = disp.width
height = disp.height
image = Image.new('1', (width, height))
# Get drawing object to draw on image.
draw = ImageDraw.Draw(image)
# Load default font.
# font = ImageFont.load_default()
# Alternatively load a TTF font.
# Some other nice fonts to try: http://www.dafont.com/bitmap.php
font = ImageFont.truetype('Vdj.ttf', 8)
# Draw a black filled box to clear the image.
draw.rectangle((0,0,width,height), outline=0, fill=0)
# Draw some shapes.
# First define some constants to allow easy resizing of shapes.
padding = 2
shape_width = 20
top = padding
bottom = height-padding
# Move left to right keeping track of the current x position for drawing shapes.
x = padding
draw.text((x, top), 'photo: ', font=font, fill=255)
draw.text((x, top+16), 'tankLevel: ', font=font, fill=255)
draw.text((x, top+32), 'tankTemp: ', font=font, fill=255)
draw.text((x, top+48), 'bedTemp: ', font=font, fill=255)
draw.text((x+64, top), str(readings['photocell'])[:4], font=font, fill=255)
draw.text((x+64, top+16), str(readings['tankLevel'])[:4], font=font, fill=255)
draw.text((x+64, top+32), str(readings['tankTemp'])[:4], font=font, fill=255)
draw.text((x+64, top+48), str(readings['bedTemp'])[:4], font=font, fill=255)
# Draw an ellipse.
# draw.ellipse((x, top , x+shape_width, bottom), outline=255, fill=0)
# x += shape_width+padding
# Draw a rectangle.
# draw.rectangle((x, top, x+shape_width, bottom), outline=255, fill=0)
# x += shape_width+padding
# Draw a triangle.
# draw.polygon([(x, bottom), (x+shape_width/2, top), (x+shape_width, bottom)], outline=255, fill=0)
# x += shape_width+padding
# Draw an X.
# draw.line((x, bottom, x+shape_width, top), fill=255)
# draw.line((x, top, x+shape_width, bottom), fill=255)
# x += shape_width+padding
# Display image.
disp.image(image)
disp.display()
# so, what will state display be?
# I2C display of tank temp?
def do_pump_toggle():
print 'pump actuate'
'''
this should actually work like:
if currentMinute mod PUMP_DURATION < PUMP_INTERVAL:
activate pump
else:
turn off pump
'''
if (datetime.datetime.today().hour>6 and datetime.datetime.today().hour<23):
print 'within actuating timeframe'
# changed this to just pump for the first PUMP_DURATION minutes every hour
if(datetime.datetime.today().minute <= PUMP_DURATION):
print 'we are in the first %s minutes of the hour, so pump should be on.' % PUMP_DURATION
gpio.output(pumpPin,gpio.HIGH)
else:
print 'shutting off pump at %s' % datetime.datetime.today().minute
gpio.output(pumpPin,gpio.LOW)
else:
print 'it is the actuator quiet period, between 11pm and 6am'
gpio.output(pumpPin,gpio.LOW)
print 'starting sampling at'
print datetime.datetime.now(tzlocal())
logging.basicConfig(filename='example.log',level=logging.DEBUG)
# adc.setup(thermistor1)
# adc.setup(thermistor2)
# adc.setup(photoPin)
adc.setup()
# uart.setup('UART2')
# print 'uart setup'
gpio.setup(pumpPin,gpio.OUT)
# t = tmp102.TMP102()
disp = ssd.SSD1306_128_64(rst=RST,i2c_address=0x3D)
disp.begin()
disp.clear()
disp.display()
# NOTE
# There is currently a bug in the ADC driver.
# You'll need to read the values twice
# in order to get the latest value.
# pwm.start(greenPin, 10.0, 2000.0)
# pwm.start(redPin, 10.0, 2000.0)
# pwm.start(bluePin, 10.0, 2000.0)
atexit.register(exit_handler)
while True:
try:
do_sensor_read()
except Exception, e:
print e
print 'sensor_read error!'
try:
do_db_update()
except Exception, e:
print e
print 'do_db_update error!'
try:
do_state_display()
# pass
except Exception, e:
print e
print 'do_state_display error!'
try:
do_pump_toggle()
except Exception, e:
print e
print 'do_pump_toggle error!'
print 'done with cycle, now waiting %s' % datetime.datetime.today()
time.sleep(interval)
|
normal
|
{
"blob_id": "06992263599fe3290c87ec00c6cb8af3748920c8",
"index": 5497,
"step-1": "\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# jan 2014 bbb garden shield attempt\n# AKA\n\n'''\nSensors:\nanalog level sensor, pin AIN0\nTMP102 i2c temperature sensor, address 0x48\n(if add0 is grounded) or 0x49 (if pulled up)\n\n\nOutputs:\nAnalog RGB LED strip\nI2C display(?)\nPump Activate/Deactivate (GPIO pin)\n\nSome measurements as of mid-March 2014:\n\nTank can be pumped for 15 minutes without sun exposure to liquid.\nSeems like after 10 minutes of pumping, the autosiphon engages, though.\nTank takes about 17 minutes to drain from a 15-minute pump\n\n11 gals in reservoir reads as 0.42 on the adc.read scale from 0 to 1\n8 gals in reservoir reads as 0.175 on the adc.read scale from 0 to 1\n7 gals in reservoir reads as 0.15 on the adc.read scale from 0 to 1\n'''\nfrom __future__ import division\nimport Adafruit_SSD1306 as ssd\nimport Adafruit_BBIO.UART as uart\nimport Image\nimport ImageDraw\nimport ImageFont\n# import Adafruit_GPIO.PWM as pwm\nimport Adafruit_BBIO.GPIO as gpio\nimport Adafruit_BBIO.ADC as adc\n# import TMP102 as tmp102\nimport datetime\nfrom dateutil.tz import tzlocal\nimport time\nimport serial\nimport atexit\nfrom math import log\nimport requests\nimport key as k\nimport logging\n\nBCOEFFICIENT = 3950 # thermistor beta coefficient\nTHERMISTORNOMINAL = 10000\nTEMPERATURENOMINAL = 25.0\nSERIESRESISTOR = 10000\n# a1 = blue and white, which is bed temp\n# a2 = white and orange, which is tank temp\ninterval = 60 # seconds between samples\ngreenPin = 'P8_13'\nbluePin = 'P9_14'\nredPin = 'P8_19'\nservoPin = 'P9_16'\ntankPin = 'P9_39'\nphotoPin = 'P9_38'\nthermistor1 = 'P9_40' # AIN1, bed temp\nthermistor2 = 'P9_37' # AIN2, reservoir temp\npumpPin = 'P8_10'\nRST = 'P8_10' # OLED screen reset pin, not always necessary\nreadings = {}\nPUMP_INTERVAL = 60 # minutes between pump actuations\nPUMP_DURATION = 12 # minutes to run pump\n\ndef exit_handler():\n print 'exiting'\n gpio.output(pumpPin,gpio.LOW)\n gpio.cleanup()\n uart.cleanup()\n\ndef do_sensor_read():\n print 'sensor read'\n global readings\n readings = {}\n # value = ADC.read(\"AIN1\")\n # adc returns value from 0 to 1.\n # use read_raw(pin) to get V values\n # tank = adc.read(tankPin)\n tank = adc.read(tankPin) # have to read twice due to bbio bug\n print 'tank is %s' % tank\n time.sleep(1)\n \n \n # photo = adc.read(photoPin) # have to read twice due to bbio bug\n photo = 1.0-adc.read(photoPin) # reverse range so that 0 is darkest\n print 'photo is %s' % photo\n time.sleep(1)\n \n\n # temp1 = adc.read_raw(thermistor1)\n temp1 = adc.read_raw(thermistor1)\n time.sleep(1)\n print 'temp1 raw %s' % temp1\n temp1 = convert_thermistor_special(temp1)\n readings['bedTemp'] = temp1\n print 'converted bed_temp is %s' % temp1\n \n # # do conversion per\n # # http://learn.adafruit.com/thermistor/using-a-thermistor\n\n # temp2 = adc.read_raw(thermistor2)\n temp2 = adc.read_raw(thermistor2)\n time.sleep(1)\n print 'temp2 raw %s' % temp2\n print temp2\n temp2 = convert_thermistor(temp2)\n readings['tankTemp'] = temp2\n print 'converted reservoir_temp is %s' % temp2\n\n # do conversion per\n # http://learn.adafruit.com/thermistor/using-a-thermistor\n # tmp36reading = adc.read_raw(tmp36Pin)\n # tmp36reading = adc.read_raw(tmp36Pin) # have to read twice due to bbio bug\n # millivolts = tmp36reading * 1800 # 1.8V reference = 1800 mV\n # temp_c = (millivolts - 500) / 10\n # print temp_c\n\n # ph_val = get_ph()\n # print 'ph_val was thoght to be %s' % ph_val\n\n readings['tankLevel'] = tank # tank level\n readings['photocell'] = photo # photocell\n\ndef convert_thermistor(raw):\n # convert the value to resistance\n # print 'was given %s' % raw\n raw = SERIESRESISTOR/((1800.0/raw) - 1.0)\n # raw = float(SERIESRESISTOR / float(raw))\n print 'Thermistor resistance ' \n print raw\n steinhart = raw/THERMISTORNOMINAL # (R/Ro)\n steinhart = log(steinhart) # ln(R/Ro)\n steinhart /= BCOEFFICIENT # 1/B * ln(R/Ro)\n steinhart += float(1.0 / (TEMPERATURENOMINAL + 273.15)) # + (1/To)\n steinhart = float(1.0 / steinhart) # Invert\n steinhart -= 273.15 # convert to C\n print 'we think converted temperature is %s' % steinhart\n return steinhart\n\ndef convert_thermistor_special(raw):\n # convert the value to resistance\n # print 'was given %s' % raw\n # raw = (1800/raw) - 1\n # fuck me, a1 is only up against 3.73kOhm - even though it's a properly-labeled resistor!\n raw = 3730.0/((1800.0/raw) - 1.0)\n print 'Thermistor resistance ' \n print raw\n steinhart = raw/THERMISTORNOMINAL # (R/Ro)\n steinhart = log(steinhart) # ln(R/Ro)\n steinhart /= BCOEFFICIENT # 1/B * ln(R/Ro)\n steinhart += float(1.0 / (TEMPERATURENOMINAL + 273.15)) # + (1/To)\n steinhart = float(1.0 / steinhart) # Invert\n steinhart -= 273.15 # convert to C\n print 'we think converted temperature is %s' % steinhart\n return steinhart\n\ndef do_db_update():\n print 'db update'\n global readings\n # print readings\n if len(readings) != 0:\n # data.sparkfun.com is expecting:\n # bedTemp, photo, tankLevel, tankTemp\n bedTemp = float('{0:.2f}'.format(readings['bedTemp']))\n tankTemp = float('{0:.2f}'.format(readings['tankTemp']))\n payload = {\n 'photo':readings['photocell'],\n 'tankLevel':readings['tankLevel'],\n 'bedTemp':readings['bedTemp'],\n 'tankTemp':readings['tankTemp']\n }\n h = {'Phant-Private-Key':k.key['phant_private']}\n r = requests.post(k.key['phant_url'], data=payload, headers=h)\n print 'wrote a result set to the DB'\n else:\n print 'NULL readings, nothing written to DB'\n\ndef get_ph():\n print 'we are in get_ph'\n uart.setup('UART2')\n ser = serial.Serial(port = '/dev/ttyO2', baudrate=38400)\n print 'opened serial port'\n ser.open()\n ser.write('R\\r')\n data = ser.read()\n print 'ph received raw as %s' % data\n ser.close()\n uart.cleanup()\n return data\n\ndef do_state_display():\n print 'state_display'\n width = disp.width\n height = disp.height\n image = Image.new('1', (width, height))\n\n # Get drawing object to draw on image.\n draw = ImageDraw.Draw(image)\n # Load default font.\n # font = ImageFont.load_default()\n # Alternatively load a TTF font.\n # Some other nice fonts to try: http://www.dafont.com/bitmap.php\n font = ImageFont.truetype('Vdj.ttf', 8)\n # Draw a black filled box to clear the image.\n draw.rectangle((0,0,width,height), outline=0, fill=0)\n\n # Draw some shapes.\n # First define some constants to allow easy resizing of shapes.\n padding = 2\n shape_width = 20\n top = padding\n bottom = height-padding\n\n # Move left to right keeping track of the current x position for drawing shapes.\n x = padding\n\n draw.text((x, top), 'photo: ', font=font, fill=255)\n draw.text((x, top+16), 'tankLevel: ', font=font, fill=255)\n draw.text((x, top+32), 'tankTemp: ', font=font, fill=255)\n draw.text((x, top+48), 'bedTemp: ', font=font, fill=255)\n draw.text((x+64, top), str(readings['photocell'])[:4], font=font, fill=255)\n draw.text((x+64, top+16), str(readings['tankLevel'])[:4], font=font, fill=255)\n draw.text((x+64, top+32), str(readings['tankTemp'])[:4], font=font, fill=255) \n draw.text((x+64, top+48), str(readings['bedTemp'])[:4], font=font, fill=255)\n \n # Draw an ellipse.\n # draw.ellipse((x, top , x+shape_width, bottom), outline=255, fill=0)\n # x += shape_width+padding\n # Draw a rectangle.\n # draw.rectangle((x, top, x+shape_width, bottom), outline=255, fill=0)\n # x += shape_width+padding\n # Draw a triangle.\n # draw.polygon([(x, bottom), (x+shape_width/2, top), (x+shape_width, bottom)], outline=255, fill=0)\n # x += shape_width+padding\n # Draw an X.\n # draw.line((x, bottom, x+shape_width, top), fill=255)\n # draw.line((x, top, x+shape_width, bottom), fill=255)\n # x += shape_width+padding\n \n # Display image.\n disp.image(image)\n disp.display()\n # so, what will state display be?\n # I2C display of tank temp?\n\ndef do_pump_toggle():\n print 'pump actuate'\n '''\n this should actually work like:\n if currentMinute mod PUMP_DURATION < PUMP_INTERVAL:\n activate pump\n else:\n turn off pump\n '''\n if (datetime.datetime.today().hour>6 and datetime.datetime.today().hour<23):\n print 'within actuating timeframe'\n # changed this to just pump for the first PUMP_DURATION minutes every hour\n if(datetime.datetime.today().minute <= PUMP_DURATION):\n print 'we are in the first %s minutes of the hour, so pump should be on.' % PUMP_DURATION\n gpio.output(pumpPin,gpio.HIGH)\n else:\n print 'shutting off pump at %s' % datetime.datetime.today().minute\n gpio.output(pumpPin,gpio.LOW)\n else:\n print 'it is the actuator quiet period, between 11pm and 6am'\n gpio.output(pumpPin,gpio.LOW)\n\nprint 'starting sampling at'\nprint datetime.datetime.now(tzlocal())\nlogging.basicConfig(filename='example.log',level=logging.DEBUG)\n# adc.setup(thermistor1)\n# adc.setup(thermistor2)\n# adc.setup(photoPin)\nadc.setup()\n# uart.setup('UART2')\n# print 'uart setup'\ngpio.setup(pumpPin,gpio.OUT)\n# t = tmp102.TMP102()\ndisp = ssd.SSD1306_128_64(rst=RST,i2c_address=0x3D)\ndisp.begin()\ndisp.clear()\ndisp.display()\n# NOTE\n# There is currently a bug in the ADC driver.\n# You'll need to read the values twice\n# in order to get the latest value.\n# pwm.start(greenPin, 10.0, 2000.0)\n# pwm.start(redPin, 10.0, 2000.0)\n# pwm.start(bluePin, 10.0, 2000.0)\natexit.register(exit_handler)\n\nwhile True:\n try:\n do_sensor_read()\n except Exception, e:\n print e\n print 'sensor_read error!'\n try:\n do_db_update()\n except Exception, e:\n print e\n print 'do_db_update error!'\n try:\n do_state_display()\n # pass\n except Exception, e:\n print e\n print 'do_state_display error!'\n try:\n do_pump_toggle()\n except Exception, e:\n print e\n print 'do_pump_toggle error!'\n print 'done with cycle, now waiting %s' % datetime.datetime.today()\n time.sleep(interval)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if year % 4 == 0 and year % 100 != 0:
print('閏年')
pass
elif year % 400 == 0:
print('閏年')
pass
else:
print('平年')
pass
<|reserved_special_token_1|>
year = int(input('西暦>'))
if year % 4 == 0 and year % 100 != 0:
print('閏年')
pass
elif year % 400 == 0:
print('閏年')
pass
else:
print('平年')
pass
|
flexible
|
{
"blob_id": "b381d1110e6a7570cd872d689a43aba2d2580a23",
"index": 8449,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif year % 4 == 0 and year % 100 != 0:\n print('閏年')\n pass\nelif year % 400 == 0:\n print('閏年')\n pass\nelse:\n print('平年')\n pass\n",
"step-3": "year = int(input('西暦>'))\nif year % 4 == 0 and year % 100 != 0:\n print('閏年')\n pass\nelif year % 400 == 0:\n print('閏年')\n pass\nelse:\n print('平年')\n pass\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#n = int(input())
#s = input()
n, m = map(int, input().split())
#s, t = input().split()
#n, m, l = map(int, input().split())
#s, t, r = input().split()
#a = map(int, input().split())
#a = input().split()
a = [int(input()) for _ in range(n)]
#a = [input() for _ in range(n)]
#t = input()
#m = int(input())
#p, q = map(int, input().split())
#p, q = input().split()
#p, q, r = map(int, input().split())
#p, q, r = input().split()
#b = map(int, input().split())
#b = input().split()
#b = [int(input()) for _ in range(m)]
#b = [input() for _ in range(m)]
cnt, ans, mx, mn = 0, m, 0, 100000000
for i in range(n - 1):
if a[i + 1] - a[i] < m:
ans += a[i + 1] - a[i]
else:
ans += m
print(ans)
|
normal
|
{
"blob_id": "a09bc84a14718422894127a519d67dc0c6b13bc9",
"index": 746,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(n - 1):\n if a[i + 1] - a[i] < m:\n ans += a[i + 1] - a[i]\n else:\n ans += m\nprint(ans)\n",
"step-3": "n, m = map(int, input().split())\na = [int(input()) for _ in range(n)]\ncnt, ans, mx, mn = 0, m, 0, 100000000\nfor i in range(n - 1):\n if a[i + 1] - a[i] < m:\n ans += a[i + 1] - a[i]\n else:\n ans += m\nprint(ans)\n",
"step-4": "#n = int(input())\n#s = input()\nn, m = map(int, input().split())\n#s, t = input().split()\n#n, m, l = map(int, input().split())\n#s, t, r = input().split()\n#a = map(int, input().split())\n#a = input().split()\na = [int(input()) for _ in range(n)]\n#a = [input() for _ in range(n)]\n\n#t = input()\n#m = int(input())\n#p, q = map(int, input().split())\n#p, q = input().split()\n#p, q, r = map(int, input().split())\n#p, q, r = input().split()\n#b = map(int, input().split())\n#b = input().split()\n#b = [int(input()) for _ in range(m)]\n#b = [input() for _ in range(m)]\ncnt, ans, mx, mn = 0, m, 0, 100000000\nfor i in range(n - 1):\n if a[i + 1] - a[i] < m:\n ans += a[i + 1] - a[i]\n else:\n ans += m\nprint(ans)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from functions2 import *
import numpy as np
#from functions import TermStructure,load_data
import numpy as np
import math
from scipy import optimize
import pylab as pl
from IPython import display as dp
class Vasicek():
def __init__(self,rs,vol):
self.t = rs.columns
self.ps= rs[-1:]
self.sigma = vol
def get_TheoreticalP(self,x=0):
sigma = self.sigma
try:
_ = x.shape
except:
x = self.t
a = self.a
b = self.b
B = (1-np.exp(-a*x))/a
A = np.exp(((B-x)*(a**2*b-(sigma**2)/2))/a**2-(sigma**2*B**2)/(4*a))
self.B=B
self.A=A
self.sim_p = A*np.exp(-B*x)
self.r = -1*np.log(self.sim_p)/x
return self.r
def loss(self,x):
self.a = x[0]
self.b = x[1]
self.sim_rs = apply(self.get_TheoreticalP,self.ps)
loss = np.array(self.ps.as_matrix())-np.array(self.sim_rs)
loss = 10000*np.sum(loss**2)
return loss
def solve(self,x0=np.random.rand(2)):
self.opt_results = optimize.fmin(self.loss,x0=x0)#,tol=1e-10,method='Nelder-Mead',options={'maxiter':1800})
self.a = self.opt_results[0]
self.b = self.opt_results[1]
print(self.opt_results)
def get_price_rate(self,T,r):
sigma = list(self.sigma)[T]
T = self.t[T]
a = self.a
b = self.b
B = (1-np.exp(-a*T))/a
A = np.exp(((B-T)*(a**2*b-(sigma**2)/2))/a**2)-(sigma**2*B**2)/(4*a)
p = A*np.exp(-B*r)
r = -1*np.log(p)/T
return p,r
def option_pricing(V,r,t,T,X):
#print('Expiration: {}'.format(t))
#print('Maturity: {}'.format(T))
time_dict = dict(zip(V.t,np.arange(len(V.t))))
r = r[-1:][t].item()
P = V.get_price_rate(time_dict[T],r)
p = V.get_price_rate(time_dict[t],r)
sigmap = V.sigma[t]*(1/V.a)*(1/np.sqrt(t))*(1-np.exp(-V.a*(T-t)))*np.sqrt((1-np.exp(-2*V.a*t))/(2*V.a))
d = (1/sigmap)*np.log(P[0]/(p[0]*X))+0.5*sigmap
c = P[0]*norm.cdf(d)-X*p[0]*norm.cdf(d-sigmap)
return c
|
normal
|
{
"blob_id": "b6470ffda9040223951a99abc600ce1e99fe146b",
"index": 7902,
"step-1": "<mask token>\n\n\nclass Vasicek:\n\n def __init__(self, rs, vol):\n self.t = rs.columns\n self.ps = rs[-1:]\n self.sigma = vol\n <mask token>\n\n def loss(self, x):\n self.a = x[0]\n self.b = x[1]\n self.sim_rs = apply(self.get_TheoreticalP, self.ps)\n loss = np.array(self.ps.as_matrix()) - np.array(self.sim_rs)\n loss = 10000 * np.sum(loss ** 2)\n return loss\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Vasicek:\n\n def __init__(self, rs, vol):\n self.t = rs.columns\n self.ps = rs[-1:]\n self.sigma = vol\n\n def get_TheoreticalP(self, x=0):\n sigma = self.sigma\n try:\n _ = x.shape\n except:\n x = self.t\n a = self.a\n b = self.b\n B = (1 - np.exp(-a * x)) / a\n A = np.exp((B - x) * (a ** 2 * b - sigma ** 2 / 2) / a ** 2 - sigma **\n 2 * B ** 2 / (4 * a))\n self.B = B\n self.A = A\n self.sim_p = A * np.exp(-B * x)\n self.r = -1 * np.log(self.sim_p) / x\n return self.r\n\n def loss(self, x):\n self.a = x[0]\n self.b = x[1]\n self.sim_rs = apply(self.get_TheoreticalP, self.ps)\n loss = np.array(self.ps.as_matrix()) - np.array(self.sim_rs)\n loss = 10000 * np.sum(loss ** 2)\n return loss\n\n def solve(self, x0=np.random.rand(2)):\n self.opt_results = optimize.fmin(self.loss, x0=x0)\n self.a = self.opt_results[0]\n self.b = self.opt_results[1]\n print(self.opt_results)\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Vasicek:\n\n def __init__(self, rs, vol):\n self.t = rs.columns\n self.ps = rs[-1:]\n self.sigma = vol\n\n def get_TheoreticalP(self, x=0):\n sigma = self.sigma\n try:\n _ = x.shape\n except:\n x = self.t\n a = self.a\n b = self.b\n B = (1 - np.exp(-a * x)) / a\n A = np.exp((B - x) * (a ** 2 * b - sigma ** 2 / 2) / a ** 2 - sigma **\n 2 * B ** 2 / (4 * a))\n self.B = B\n self.A = A\n self.sim_p = A * np.exp(-B * x)\n self.r = -1 * np.log(self.sim_p) / x\n return self.r\n\n def loss(self, x):\n self.a = x[0]\n self.b = x[1]\n self.sim_rs = apply(self.get_TheoreticalP, self.ps)\n loss = np.array(self.ps.as_matrix()) - np.array(self.sim_rs)\n loss = 10000 * np.sum(loss ** 2)\n return loss\n\n def solve(self, x0=np.random.rand(2)):\n self.opt_results = optimize.fmin(self.loss, x0=x0)\n self.a = self.opt_results[0]\n self.b = self.opt_results[1]\n print(self.opt_results)\n\n def get_price_rate(self, T, r):\n sigma = list(self.sigma)[T]\n T = self.t[T]\n a = self.a\n b = self.b\n B = (1 - np.exp(-a * T)) / a\n A = np.exp((B - T) * (a ** 2 * b - sigma ** 2 / 2) / a ** 2\n ) - sigma ** 2 * B ** 2 / (4 * a)\n p = A * np.exp(-B * r)\n r = -1 * np.log(p) / T\n return p, r\n\n\n<mask token>\n",
"step-4": "from functions2 import *\nimport numpy as np\nimport numpy as np\nimport math\nfrom scipy import optimize\nimport pylab as pl\nfrom IPython import display as dp\n\n\nclass Vasicek:\n\n def __init__(self, rs, vol):\n self.t = rs.columns\n self.ps = rs[-1:]\n self.sigma = vol\n\n def get_TheoreticalP(self, x=0):\n sigma = self.sigma\n try:\n _ = x.shape\n except:\n x = self.t\n a = self.a\n b = self.b\n B = (1 - np.exp(-a * x)) / a\n A = np.exp((B - x) * (a ** 2 * b - sigma ** 2 / 2) / a ** 2 - sigma **\n 2 * B ** 2 / (4 * a))\n self.B = B\n self.A = A\n self.sim_p = A * np.exp(-B * x)\n self.r = -1 * np.log(self.sim_p) / x\n return self.r\n\n def loss(self, x):\n self.a = x[0]\n self.b = x[1]\n self.sim_rs = apply(self.get_TheoreticalP, self.ps)\n loss = np.array(self.ps.as_matrix()) - np.array(self.sim_rs)\n loss = 10000 * np.sum(loss ** 2)\n return loss\n\n def solve(self, x0=np.random.rand(2)):\n self.opt_results = optimize.fmin(self.loss, x0=x0)\n self.a = self.opt_results[0]\n self.b = self.opt_results[1]\n print(self.opt_results)\n\n def get_price_rate(self, T, r):\n sigma = list(self.sigma)[T]\n T = self.t[T]\n a = self.a\n b = self.b\n B = (1 - np.exp(-a * T)) / a\n A = np.exp((B - T) * (a ** 2 * b - sigma ** 2 / 2) / a ** 2\n ) - sigma ** 2 * B ** 2 / (4 * a)\n p = A * np.exp(-B * r)\n r = -1 * np.log(p) / T\n return p, r\n\n\ndef option_pricing(V, r, t, T, X):\n time_dict = dict(zip(V.t, np.arange(len(V.t))))\n r = r[-1:][t].item()\n P = V.get_price_rate(time_dict[T], r)\n p = V.get_price_rate(time_dict[t], r)\n sigmap = V.sigma[t] * (1 / V.a) * (1 / np.sqrt(t)) * (1 - np.exp(-V.a *\n (T - t))) * np.sqrt((1 - np.exp(-2 * V.a * t)) / (2 * V.a))\n d = 1 / sigmap * np.log(P[0] / (p[0] * X)) + 0.5 * sigmap\n c = P[0] * norm.cdf(d) - X * p[0] * norm.cdf(d - sigmap)\n return c\n",
"step-5": "from functions2 import *\nimport numpy as np\n#from functions import TermStructure,load_data\nimport numpy as np\nimport math\nfrom scipy import optimize\nimport pylab as pl\nfrom IPython import display as dp\n\n\n\n\nclass Vasicek():\n def __init__(self,rs,vol):\n self.t = rs.columns\n self.ps= rs[-1:]\n self.sigma = vol \n \n def get_TheoreticalP(self,x=0):\n sigma = self.sigma\n try:\n _ = x.shape\n except:\n x = self.t\n \n a = self.a\n b = self.b\n B = (1-np.exp(-a*x))/a\n A = np.exp(((B-x)*(a**2*b-(sigma**2)/2))/a**2-(sigma**2*B**2)/(4*a))\n self.B=B\n self.A=A\n self.sim_p = A*np.exp(-B*x)\n self.r = -1*np.log(self.sim_p)/x\n return self.r\n\n \n def loss(self,x):\n self.a = x[0]\n self.b = x[1] \n self.sim_rs = apply(self.get_TheoreticalP,self.ps)\n loss = np.array(self.ps.as_matrix())-np.array(self.sim_rs)\n\n loss = 10000*np.sum(loss**2)\n \n return loss\n\n \n def solve(self,x0=np.random.rand(2)):\n self.opt_results = optimize.fmin(self.loss,x0=x0)#,tol=1e-10,method='Nelder-Mead',options={'maxiter':1800})\n self.a = self.opt_results[0]\n self.b = self.opt_results[1]\n print(self.opt_results)\n \n def get_price_rate(self,T,r):\n \n sigma = list(self.sigma)[T]\n T = self.t[T]\n a = self.a\n b = self.b\n B = (1-np.exp(-a*T))/a\n A = np.exp(((B-T)*(a**2*b-(sigma**2)/2))/a**2)-(sigma**2*B**2)/(4*a)\n p = A*np.exp(-B*r)\n r = -1*np.log(p)/T\n return p,r\n\n\ndef option_pricing(V,r,t,T,X):\n #print('Expiration: {}'.format(t))\n #print('Maturity: {}'.format(T))\n \n time_dict = dict(zip(V.t,np.arange(len(V.t))))\n \n r = r[-1:][t].item()\n \n P = V.get_price_rate(time_dict[T],r)\n \n p = V.get_price_rate(time_dict[t],r)\n \n\n \n sigmap = V.sigma[t]*(1/V.a)*(1/np.sqrt(t))*(1-np.exp(-V.a*(T-t)))*np.sqrt((1-np.exp(-2*V.a*t))/(2*V.a))\n \n d = (1/sigmap)*np.log(P[0]/(p[0]*X))+0.5*sigmap\n \n c = P[0]*norm.cdf(d)-X*p[0]*norm.cdf(d-sigmap)\n \n return c",
"step-ids": [
3,
5,
6,
8,
9
]
}
|
[
3,
5,
6,
8,
9
] |
<|reserved_special_token_0|>
def multiprocessing_start(obj):
cov = init()
if cov:
multiprocessing.util.Finalize(None, multiprocessing_finish, args=(
cov,), exitpriority=1000)
<|reserved_special_token_0|>
def init():
cov_source = os.environ.get('COV_CORE_SOURCE')
cov_config = os.environ.get('COV_CORE_CONFIG')
cov_datafile = os.environ.get('COV_CORE_DATAFILE')
if cov_datafile:
import coverage
if not cov_source:
cov_source = None
else:
cov_source = cov_source.split(os.pathsep)
if not cov_config:
cov_config = True
cov = coverage.coverage(source=cov_source, data_suffix=True,
config_file=cov_config, auto_data=True, data_file=cov_datafile)
cov.load()
cov.start()
cov._warn_no_data = False
cov._warn_unimported_source = False
return cov
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def multiprocessing_start(obj):
cov = init()
if cov:
multiprocessing.util.Finalize(None, multiprocessing_finish, args=(
cov,), exitpriority=1000)
def multiprocessing_finish(cov):
cov.stop()
cov.save()
<|reserved_special_token_0|>
def init():
cov_source = os.environ.get('COV_CORE_SOURCE')
cov_config = os.environ.get('COV_CORE_CONFIG')
cov_datafile = os.environ.get('COV_CORE_DATAFILE')
if cov_datafile:
import coverage
if not cov_source:
cov_source = None
else:
cov_source = cov_source.split(os.pathsep)
if not cov_config:
cov_config = True
cov = coverage.coverage(source=cov_source, data_suffix=True,
config_file=cov_config, auto_data=True, data_file=cov_datafile)
cov.load()
cov.start()
cov._warn_no_data = False
cov._warn_unimported_source = False
return cov
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def multiprocessing_start(obj):
cov = init()
if cov:
multiprocessing.util.Finalize(None, multiprocessing_finish, args=(
cov,), exitpriority=1000)
def multiprocessing_finish(cov):
cov.stop()
cov.save()
try:
import multiprocessing.util
except ImportError:
pass
else:
multiprocessing.util.register_after_fork(multiprocessing_start,
multiprocessing_start)
def init():
cov_source = os.environ.get('COV_CORE_SOURCE')
cov_config = os.environ.get('COV_CORE_CONFIG')
cov_datafile = os.environ.get('COV_CORE_DATAFILE')
if cov_datafile:
import coverage
if not cov_source:
cov_source = None
else:
cov_source = cov_source.split(os.pathsep)
if not cov_config:
cov_config = True
cov = coverage.coverage(source=cov_source, data_suffix=True,
config_file=cov_config, auto_data=True, data_file=cov_datafile)
cov.load()
cov.start()
cov._warn_no_data = False
cov._warn_unimported_source = False
return cov
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os
def multiprocessing_start(obj):
cov = init()
if cov:
multiprocessing.util.Finalize(None, multiprocessing_finish, args=(
cov,), exitpriority=1000)
def multiprocessing_finish(cov):
cov.stop()
cov.save()
try:
import multiprocessing.util
except ImportError:
pass
else:
multiprocessing.util.register_after_fork(multiprocessing_start,
multiprocessing_start)
def init():
cov_source = os.environ.get('COV_CORE_SOURCE')
cov_config = os.environ.get('COV_CORE_CONFIG')
cov_datafile = os.environ.get('COV_CORE_DATAFILE')
if cov_datafile:
import coverage
if not cov_source:
cov_source = None
else:
cov_source = cov_source.split(os.pathsep)
if not cov_config:
cov_config = True
cov = coverage.coverage(source=cov_source, data_suffix=True,
config_file=cov_config, auto_data=True, data_file=cov_datafile)
cov.load()
cov.start()
cov._warn_no_data = False
cov._warn_unimported_source = False
return cov
<|reserved_special_token_1|>
"""Activate coverage at python startup if appropriate.
The python site initialisation will ensure that anything we import
will be removed and not visible at the end of python startup. However
we minimise all work by putting these init actions in this separate
module and only importing what is needed when needed.
For normal python startup when coverage should not be activated the pth
file checks a single env var and does not import or call the init fn
here.
For python startup when an ancestor process has set the env indicating
that code coverage is being collected we activate coverage based on
info passed via env vars.
"""
import os
def multiprocessing_start(obj):
cov = init()
if cov:
multiprocessing.util.Finalize(None, multiprocessing_finish, args=(cov,), exitpriority=1000)
def multiprocessing_finish(cov):
cov.stop()
cov.save()
try:
import multiprocessing.util
except ImportError:
pass
else:
multiprocessing.util.register_after_fork(multiprocessing_start, multiprocessing_start)
def init():
# Only continue if ancestor process has set everything needed in
# the env.
cov_source = os.environ.get('COV_CORE_SOURCE')
cov_config = os.environ.get('COV_CORE_CONFIG')
cov_datafile = os.environ.get('COV_CORE_DATAFILE')
if cov_datafile:
# Import what we need to activate coverage.
import coverage
# Determine all source roots.
if not cov_source:
cov_source = None
else:
cov_source = cov_source.split(os.pathsep)
if not cov_config:
cov_config = True
# Activate coverage for this process.
cov = coverage.coverage(
source=cov_source,
data_suffix=True,
config_file=cov_config,
auto_data=True,
data_file=cov_datafile
)
cov.load()
cov.start()
cov._warn_no_data = False
cov._warn_unimported_source = False
return cov
|
flexible
|
{
"blob_id": "243794d36a1c6861c2c3308fe6a52ec19b73df72",
"index": 7820,
"step-1": "<mask token>\n\n\ndef multiprocessing_start(obj):\n cov = init()\n if cov:\n multiprocessing.util.Finalize(None, multiprocessing_finish, args=(\n cov,), exitpriority=1000)\n\n\n<mask token>\n\n\ndef init():\n cov_source = os.environ.get('COV_CORE_SOURCE')\n cov_config = os.environ.get('COV_CORE_CONFIG')\n cov_datafile = os.environ.get('COV_CORE_DATAFILE')\n if cov_datafile:\n import coverage\n if not cov_source:\n cov_source = None\n else:\n cov_source = cov_source.split(os.pathsep)\n if not cov_config:\n cov_config = True\n cov = coverage.coverage(source=cov_source, data_suffix=True,\n config_file=cov_config, auto_data=True, data_file=cov_datafile)\n cov.load()\n cov.start()\n cov._warn_no_data = False\n cov._warn_unimported_source = False\n return cov\n",
"step-2": "<mask token>\n\n\ndef multiprocessing_start(obj):\n cov = init()\n if cov:\n multiprocessing.util.Finalize(None, multiprocessing_finish, args=(\n cov,), exitpriority=1000)\n\n\ndef multiprocessing_finish(cov):\n cov.stop()\n cov.save()\n\n\n<mask token>\n\n\ndef init():\n cov_source = os.environ.get('COV_CORE_SOURCE')\n cov_config = os.environ.get('COV_CORE_CONFIG')\n cov_datafile = os.environ.get('COV_CORE_DATAFILE')\n if cov_datafile:\n import coverage\n if not cov_source:\n cov_source = None\n else:\n cov_source = cov_source.split(os.pathsep)\n if not cov_config:\n cov_config = True\n cov = coverage.coverage(source=cov_source, data_suffix=True,\n config_file=cov_config, auto_data=True, data_file=cov_datafile)\n cov.load()\n cov.start()\n cov._warn_no_data = False\n cov._warn_unimported_source = False\n return cov\n",
"step-3": "<mask token>\n\n\ndef multiprocessing_start(obj):\n cov = init()\n if cov:\n multiprocessing.util.Finalize(None, multiprocessing_finish, args=(\n cov,), exitpriority=1000)\n\n\ndef multiprocessing_finish(cov):\n cov.stop()\n cov.save()\n\n\ntry:\n import multiprocessing.util\nexcept ImportError:\n pass\nelse:\n multiprocessing.util.register_after_fork(multiprocessing_start,\n multiprocessing_start)\n\n\ndef init():\n cov_source = os.environ.get('COV_CORE_SOURCE')\n cov_config = os.environ.get('COV_CORE_CONFIG')\n cov_datafile = os.environ.get('COV_CORE_DATAFILE')\n if cov_datafile:\n import coverage\n if not cov_source:\n cov_source = None\n else:\n cov_source = cov_source.split(os.pathsep)\n if not cov_config:\n cov_config = True\n cov = coverage.coverage(source=cov_source, data_suffix=True,\n config_file=cov_config, auto_data=True, data_file=cov_datafile)\n cov.load()\n cov.start()\n cov._warn_no_data = False\n cov._warn_unimported_source = False\n return cov\n",
"step-4": "<mask token>\nimport os\n\n\ndef multiprocessing_start(obj):\n cov = init()\n if cov:\n multiprocessing.util.Finalize(None, multiprocessing_finish, args=(\n cov,), exitpriority=1000)\n\n\ndef multiprocessing_finish(cov):\n cov.stop()\n cov.save()\n\n\ntry:\n import multiprocessing.util\nexcept ImportError:\n pass\nelse:\n multiprocessing.util.register_after_fork(multiprocessing_start,\n multiprocessing_start)\n\n\ndef init():\n cov_source = os.environ.get('COV_CORE_SOURCE')\n cov_config = os.environ.get('COV_CORE_CONFIG')\n cov_datafile = os.environ.get('COV_CORE_DATAFILE')\n if cov_datafile:\n import coverage\n if not cov_source:\n cov_source = None\n else:\n cov_source = cov_source.split(os.pathsep)\n if not cov_config:\n cov_config = True\n cov = coverage.coverage(source=cov_source, data_suffix=True,\n config_file=cov_config, auto_data=True, data_file=cov_datafile)\n cov.load()\n cov.start()\n cov._warn_no_data = False\n cov._warn_unimported_source = False\n return cov\n",
"step-5": "\"\"\"Activate coverage at python startup if appropriate.\n\nThe python site initialisation will ensure that anything we import\nwill be removed and not visible at the end of python startup. However\nwe minimise all work by putting these init actions in this separate\nmodule and only importing what is needed when needed.\n\nFor normal python startup when coverage should not be activated the pth\nfile checks a single env var and does not import or call the init fn\nhere.\n\nFor python startup when an ancestor process has set the env indicating\nthat code coverage is being collected we activate coverage based on\ninfo passed via env vars.\n\"\"\"\nimport os\n\n\ndef multiprocessing_start(obj):\n cov = init()\n if cov:\n multiprocessing.util.Finalize(None, multiprocessing_finish, args=(cov,), exitpriority=1000)\n\n\ndef multiprocessing_finish(cov):\n cov.stop()\n cov.save()\n\n\ntry:\n import multiprocessing.util\nexcept ImportError:\n pass\nelse:\n multiprocessing.util.register_after_fork(multiprocessing_start, multiprocessing_start)\n\n\ndef init():\n # Only continue if ancestor process has set everything needed in\n # the env.\n\n cov_source = os.environ.get('COV_CORE_SOURCE')\n cov_config = os.environ.get('COV_CORE_CONFIG')\n cov_datafile = os.environ.get('COV_CORE_DATAFILE')\n if cov_datafile:\n # Import what we need to activate coverage.\n import coverage\n\n # Determine all source roots.\n if not cov_source:\n cov_source = None\n else:\n cov_source = cov_source.split(os.pathsep)\n if not cov_config:\n cov_config = True\n\n # Activate coverage for this process.\n cov = coverage.coverage(\n source=cov_source,\n data_suffix=True,\n config_file=cov_config,\n auto_data=True,\n data_file=cov_datafile\n )\n cov.load()\n cov.start()\n cov._warn_no_data = False\n cov._warn_unimported_source = False\n return cov\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import collections
import cPickle as pickle
import os
import shutil
import warnings
import numpy as np
import theano
import theano.tensor as T
import tables
#theano.config.compute_test_value = 'warn'
class SGD_Trainer(object):
"""Implementation of a stochastic gradient descent trainer
"""
#{{{ Properties
@property
def inputs(self):
return self._inputs
@inputs.setter
def inputs(self, val):
#FIXME: make this work for other input types
if not isinstance(val, np.ndarray):
raise TypeError('Resetting trainer inputs currently only works for '
'ndarray inputs!')
self._inputs = val
self._inputs_theano = theano.shared(
self._inputs[:self._loadsize],
name='inputs')
self._numcases = self._inputs.shape[0]
self._numloads = self._numcases // self._loadsize
print 'recompiling trainer functions...'
self._compile_functions()
@property
def gradient_clip_threshold(self):
return self._gradient_clip_threshold.get_value()
@property
def learningrate_decay_factor(self):
return self._learningrate_decay_factor.get_value()
@learningrate_decay_factor.setter
def learningrate_decay_factor(self, val):
self._learningrate_decay_factor.set_value(np.float32(val))
@property
def learningrate_decay_interval(self):
return self._learningrate_decay_interval.get_value()
@learningrate_decay_interval.setter
def learningrate_decay_interval(self, val):
self._learningrate_decay_interval.set_value(np.int64(val))
@gradient_clip_threshold.setter
def gradient_clip_threshold(self, val):
self._gradient_clip_threshold.set_value(np.float32(val))
@property
def learningrate(self):
return self._learningrate.get_value()
@learningrate.setter
def learningrate(self, value):
self._learningrate.set_value(np.float32(value))
@property
def momentum(self):
return self._momentum.get_value()
@momentum.setter
def momentum(self, val):
self._momentum.set_value(np.float32(val))
@property
def batchsize(self):
return self._batchsize
@property
def loadsize(self):
return self._loadsize
@property
def numcases(self):
return self._numcases
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, val):
self._verbose = bool(val)
@property
def epochcount(self):
return self._epochcount
@epochcount.setter
def epochcount(self, val):
self._epochcount = int(val)
@property
def momentum_batchcounter(self):
return self._momentum_batchcounter
#}}}
def __init__(self, model=None, inputs=None, batchsize=100, learningrate=.01,
momentum=0.9, loadsize=None,
rng=None, verbose=True,
numcases=None, gradient_clip_threshold=1000,
numepochs_per_load=1,
rmsprop=None, cost=None, params=None, inputvar=None,
grads=None):
#{{{ Initialization of Properties
assert model is not None or (
cost is not None and params is not None and
inputvar is not None and grads is not None), (
"either a model instance or cost, params and inputvar "
"have to be passed to the SGD_Trainer constructor")
if model is not None:
self._model = model
self._params = model.params
self._cost = model._cost
self._inputvar = model.inputs
self._grads = model._grads
else:
self._params = params
self._cost = cost
self._inputvar = inputvar
self._grads = grads
self._learningrate = theano.shared(np.float32(learningrate),
name='learningrate')
self.numepochs_per_load = numepochs_per_load
self._momentum = theano.shared(np.float32(momentum),
name='momentum')
self._total_stepcount = 0
self._gradient_clip_threshold = theano.shared(
np.float32(gradient_clip_threshold),
name='gradient_clip_threshold')
self._avg_gradnorm = theano.shared(np.float32(0.), name='avg_gradnorm')
self._learningrate_decay_factor = theano.shared(
np.float32,
name='learningrate_decay_factor')
self._learningrate_decay_interval = theano.shared(
np.int64,
name='learningrate_decay_interval')
if isinstance(inputs, str):
self._inputs_type = 'h5'
self._inputsfile = tables.openFile(inputs, 'r')
self._inputs = self._inputsfile.root.inputs_white
elif hasattr(inputs, '__call__'):
self._inputs_type = 'function'
self._inputs_fn = inputs
else:
self._inputs_type = 'numpy'
self._inputs = inputs
self._model = model
self._numparams = reduce(lambda x,y: x+y,
[p.get_value().size for p in self._params])
if self._inputs_type == 'function':
numcases = loadsize
else:
if numcases is None or numcases > self._inputs.shape[0]:
numcases = self._inputs.shape[0]
self._numcases = numcases
self._batchsize = batchsize
self._loadsize = loadsize
self._verbose = verbose
if self._batchsize > self._numcases:
self._batchsize = self._numcases
if self._loadsize == None:
self._loadsize = self._batchsize * 100
if self._loadsize > self._numcases:
self._loadsize = self._numcases
self._numloads = self._numcases // self._loadsize
self._numbatches = self._loadsize // self._batchsize
if self._inputs_type == 'h5':
self._inputs_theano = theano.shared(
self._inputs.read(stop=self._loadsize))
elif self._inputs_type == 'function':
# TODO: generate inputs for first load
print "generating first load..."
inp = np.empty((self._loadsize, ) + (self._inputs_fn().shape),
dtype=np.float32)
for i in xrange(self._loadsize):
inp[i] = self._inputs_fn()
if (i + 1) % 100 == 0:
print '{0}/{1}'.format(i + 1, self.loadsize)
self._inputs_theano = theano.shared(
inp)
else:
self._inputs_theano = theano.shared(
self._inputs[:self._loadsize],
name='inputs')
#self._inputs_theano.tag.test_value = np.random.randn(100, model.n_vis*4)
self._momentum_batchcounter = 0
if rng is None:
self._rng = np.random.RandomState(1)
else:
self._rng = rng
self._epochcount = 0
self._index = T.lscalar()
self._incs = \
dict([(p, theano.shared(value=np.zeros(p.get_value().shape,
dtype=theano.config.floatX), name='inc_'+p.name))
for p in self._params])
self._inc_updates = collections.OrderedDict()
self.rmsprop = rmsprop
if self.rmsprop is not None:
self.averaging_coeff=0.95
self.stabilizer=1e-2
self._avg_grad_sqrs = \
dict([(p, theano.shared(value=np.zeros(p.get_value().shape,
dtype=theano.config.floatX), name='avg_grad_sqr_'+p.name))
for p in self._params])
self._avg_grad_sqrs_updates = collections.OrderedDict()
self._updates_nomomentum = collections.OrderedDict()
self._updates = collections.OrderedDict()
self._n = T.lscalar('n')
self._n.tag.test_value = 0.
self._noop = 0.0 * self._n
self._batch_idx = theano.shared(
value=np.array(0, dtype=np.int64), name='batch_idx')
self.costs = []
self._compile_functions()
#}}}
def __del__(self):
if self._inputs_type == 'h5':
self._inputsfile.close()
def save(self, filename):
"""Saves the trainers parameters to a file
Params:
filename: path to the file
"""
ext = os.path.splitext(filename)[1]
if ext == '.pkl':
print 'saving trainer params to a pkl file'
self.save_pkl(filename)
else:
print 'saving trainer params to a hdf5 file'
self.save_h5(filename)
def save_h5(self, filename):
"""Saves a HDF5 file containing the trainers parameters
Params:
filename: path to the file
"""
try:
shutil.copyfile(filename, '{0}_bak'.format(filename))
except IOError:
print 'could not make backup of trainer param file (which is \
normal if we haven\'t saved one until now)'
paramfile = tables.openFile(filename, 'w')
paramfile.createArray(paramfile.root, 'learningrate',
self.learningrate)
paramfile.createArray(paramfile.root, 'verbose', self.verbose)
paramfile.createArray(paramfile.root, 'loadsize', self.loadsize)
paramfile.createArray(paramfile.root, 'batchsize', self.batchsize)
paramfile.createArray(paramfile.root, 'momentum',
self.momentum)
paramfile.createArray(paramfile.root, 'epochcount',
self.epochcount)
paramfile.createArray(paramfile.root, 'momentum_batchcounter',
self.momentum_batchcounter)
incsgrp = paramfile.createGroup(paramfile.root, 'incs', 'increments')
for p in self._params:
paramfile.createArray(incsgrp, p.name, self._incs[p].get_value())
if self.rmsprop is not None:
avg_grad_sqrs_grp = paramfile.createGroup(paramfile.root, 'avg_grad_sqrs')
for p in self._params:
paramfile.createArray(avg_grad_sqrs_grp, p.name, self._avg_grad_sqrs[p].get_value())
paramfile.close()
def save_pkl(self, filename):
"""Saves a pickled dictionary containing the parameters to a file
Params:
filename: path to the file
"""
param_dict = {}
param_dict['learningrate'] = self.learningrate
param_dict['verbose'] = self.verbose
param_dict['loadsize'] = self.loadsize
param_dict['batchsize'] = self.batchsize
param_dict['momentum'] = self.momentum
param_dict['epochcount'] = self.epochcount
param_dict['momentum_batchcounter'] = self.momentum_batchcounter
param_dict['incs'] = dict(
[(p.name, self._incs[p].get_value()) for p in self._params])
if self.rmsprop is not None:
param_dict['avg_grad_sqrs'] = dict(
[(p.name, self._avg_grad_sqrs[p].get_value()) for p in self._params])
pickle.dump(param_dict, open(filename, 'wb'))
def load(self, filename):
"""Loads pickled dictionary containing parameters from a file
Params:
filename: path to the file
"""
param_dict = pickle.load(open('%s' % filename, 'rb'))
self.learningrate = param_dict['learningrate']
self.verbose = param_dict['verbose']
self._loadsize = param_dict['loadsize']
self._batchsize = param_dict['batchsize']
self.momentum = param_dict['momentum']
self.epochcount = param_dict['epochcount']
self._momentum_batchcounter = param_dict['momentum_batchcounter']
for param_name in param_dict['incs'].keys():
for p in self._params:
if p.name == param_name:
self._incs[p].set_value(param_dict['incs'][param_name])
if self.rmsprop is not None:
for param_name in param_dict['avg_grad_sqrs'].keys():
for p in self._params:
if p.name == param_name:
self._avg_grad_sqrs[p].set_value(param_dict['avg_grad_sqrs'][param_name])
self._numbatches = self._loadsize // self._batchsize
if self._inputs_type != 'function':
self._numloads = self._inputs.shape[0] // self._loadsize
if self._inputs_type == 'h5':
self._inputs_theano.set_value(
self._inputs.read(stop=self._loadsize))
else:
self._inputs_theano.set_value(self._inputs[:self._loadsize])
def reset_incs(self):
for p in self._params:
self._incs[p].set_value(
np.zeros(p.get_value().shape, dtype=theano.config.floatX))
def reset_avg_grad_sqrs(self):
for p in self._params:
self._avg_grad_sqrs[p].set_value(
np.zeros(p.get_value().shape, dtype=theano.config.floatX))
def _compile_functions(self):
self._gradnorm = T.zeros([])
for _param, _grad in zip(self._params, self._grads):
# apply rmsprop to before clipping gradients
if self.rmsprop is not None:
avg_grad_sqr = self._avg_grad_sqrs[_param]
new_avg_grad_sqr = self.averaging_coeff * avg_grad_sqr + \
(1 - self.averaging_coeff) * T.sqr(_grad)
self._avg_grad_sqrs_updates[avg_grad_sqr] = new_avg_grad_sqr
rms_grad_t = T.sqrt(new_avg_grad_sqr)
rms_grad_t = T.maximum(rms_grad_t, self.stabilizer)
_grad = _grad / rms_grad_t
self._gradnorm += T.sum(_grad**2) # calculated on the rmsprop 'grad'
self._gradnorm = T.sqrt(self._gradnorm)
self.gradnorm = theano.function(
inputs=[],
outputs=self._gradnorm,
givens={
self._inputvar:
self._inputs_theano[
self._batch_idx*self.batchsize:
(self._batch_idx+1)*self.batchsize]})
avg_gradnorm_update = {
self._avg_gradnorm: self._avg_gradnorm * .8 + self._gradnorm * .2}
for _param, _grad in zip(self._params, self._grads):
if hasattr(self._model, 'skip_params'):
if _param.name in self._model.skip_params:
continue
_clip_grad = T.switch(
T.gt(self._gradnorm, self._gradient_clip_threshold),
_grad * self._gradient_clip_threshold / self._gradnorm, _grad)
try: # ... to apply learningrate_modifiers
# Cliphid version:
self._inc_updates[self._incs[_param]] = \
self._momentum * self._incs[_param] - \
self._learningrate * \
self._model.layer.learningrate_modifiers[
_param.name] * _clip_grad
self._updates[_param] = _param + self._incs[_param]
self._updates_nomomentum[_param] = _param - \
self._learningrate * \
self._model.layer.learningrate_modifiers[_param.name] * \
_clip_grad
except AttributeError:
self._inc_updates[self._incs[_param]] = self._momentum * \
self._incs[_param] - self._learningrate * _clip_grad
self._updates[_param] = _param + self._incs[_param]
self._updates_nomomentum[_param] = _param - \
self._learningrate * _clip_grad
# first update gradient norm running avg
ordered_updates = collections.OrderedDict(avg_gradnorm_update)
# so that it is considered in the parameter update computations
ordered_updates.update(self._inc_updates)
self._updateincs = theano.function(
[], [self._cost, self._avg_gradnorm], updates = ordered_updates,
givens = {self._inputvar:self._inputs_theano[
self._batch_idx*self._batchsize:(self._batch_idx+1)* \
self._batchsize]})
self._trainmodel = theano.function(
[self._n], self._noop, updates = self._updates)
self._trainmodel_nomomentum = theano.function(
[self._n], self._noop, updates = self._updates_nomomentum,
givens = {self._inputvar:self._inputs_theano[
self._batch_idx*self._batchsize:(self._batch_idx+1)* \
self._batchsize]})
self._momentum_batchcounter = 0
def _trainsubstep(self, batchidx):
self._batch_idx.set_value(batchidx)
stepcost, avg_gradnorm = self._updateincs()
# catch NaN, before updating params
if np.isnan(stepcost):
raise ValueError, 'Cost function returned nan!'
elif np.isinf(stepcost):
raise ValueError, 'Cost function returned infinity!'
if self._momentum_batchcounter < 10:
self._momentum_batchcounter += 1
self._trainmodel_nomomentum(0)
else:
self._momentum_batchcounter = 10
self._trainmodel(0)
return stepcost, avg_gradnorm
def get_avg_gradnorm(self):
avg_gradnorm = 0.0
print self.gradnorm()
for batch_idx in range(self._numbatches):
self._batch_idx.set_value(batch_idx)
tmp = self.gradnorm()
avg_gradnorm += tmp / self._numbatches
print avg_gradnorm
return avg_gradnorm
def step(self):
total_cost = 0.0
cost = 0.0
stepcount = 0.0
self._epochcount += 1
for load_index in range(self._numloads):
indices = np.random.permutation(self._loadsize)
if self._inputs_type == 'h5':
self._inputs_theano.set_value(
self._inputs.read(
start=load_index * self._loadsize,
stop=(load_index + 1) * self._loadsize)[indices])
elif self._inputs_type == 'function':
# if load has been used n times, gen new load
if self._epochcount % self.numepochs_per_load == 0:
print 'using data function to generate new load...'
inp = np.empty((self._loadsize, ) + (self._inputs_fn().shape),
dtype=np.float32)
for i in xrange(self._loadsize):
inp[i] = self._inputs_fn()
if (i + 1) % 100 == 0:
print '{0}/{1}'.format(i + 1, self.loadsize)
self._inputs_theano.set_value(inp)
print 'done'
else:
self._inputs_theano.set_value(
self._inputs[load_index * self._loadsize + indices])
for batch_index in self._rng.permutation(self._numbatches):
stepcount += 1.0
self._total_stepcount += 1.0
stepcost, avg_gradnorm = self._trainsubstep(batch_index)
cost = (1.0-1.0/stepcount)*cost + (1.0/stepcount)* \
stepcost
if self._verbose:
print '> epoch {0:d}, load {1:d}/{2:d}, cost: {3:f}, avg. gradnorm: {4}'.format(
self._epochcount, load_index + 1, self._numloads, cost, avg_gradnorm)
if hasattr(self._model, 'monitor'):
self._model.monitor()
self.costs.append(cost)
return cost
|
normal
|
{
"blob_id": "17ac827d181650cd8bd6e75ca7ff363d70d3c4a7",
"index": 2138,
"step-1": "import collections\nimport cPickle as pickle\nimport os\nimport shutil\nimport warnings\n\nimport numpy as np\nimport theano\nimport theano.tensor as T\nimport tables\n#theano.config.compute_test_value = 'warn'\n\n\nclass SGD_Trainer(object):\n \"\"\"Implementation of a stochastic gradient descent trainer\n \"\"\"\n\n#{{{ Properties\n\n @property\n def inputs(self):\n return self._inputs\n\n @inputs.setter\n def inputs(self, val):\n #FIXME: make this work for other input types\n if not isinstance(val, np.ndarray):\n raise TypeError('Resetting trainer inputs currently only works for '\n 'ndarray inputs!')\n self._inputs = val\n self._inputs_theano = theano.shared(\n self._inputs[:self._loadsize],\n name='inputs')\n self._numcases = self._inputs.shape[0]\n self._numloads = self._numcases // self._loadsize\n print 'recompiling trainer functions...'\n self._compile_functions()\n\n\n @property\n def gradient_clip_threshold(self):\n return self._gradient_clip_threshold.get_value()\n\n @property\n def learningrate_decay_factor(self):\n return self._learningrate_decay_factor.get_value()\n\n @learningrate_decay_factor.setter\n def learningrate_decay_factor(self, val):\n self._learningrate_decay_factor.set_value(np.float32(val))\n\n @property\n def learningrate_decay_interval(self):\n return self._learningrate_decay_interval.get_value()\n\n @learningrate_decay_interval.setter\n def learningrate_decay_interval(self, val):\n self._learningrate_decay_interval.set_value(np.int64(val))\n\n @gradient_clip_threshold.setter\n def gradient_clip_threshold(self, val):\n self._gradient_clip_threshold.set_value(np.float32(val))\n\n @property\n def learningrate(self):\n return self._learningrate.get_value()\n\n @learningrate.setter\n def learningrate(self, value):\n self._learningrate.set_value(np.float32(value))\n\n @property\n def momentum(self):\n return self._momentum.get_value()\n\n @momentum.setter\n def momentum(self, val):\n self._momentum.set_value(np.float32(val))\n\n @property\n def batchsize(self):\n return self._batchsize\n\n @property\n def loadsize(self):\n return self._loadsize\n\n @property\n def numcases(self):\n return self._numcases\n\n @property\n def verbose(self):\n return self._verbose\n\n @verbose.setter\n def verbose(self, val):\n self._verbose = bool(val)\n\n @property\n def epochcount(self):\n return self._epochcount\n\n @epochcount.setter\n def epochcount(self, val):\n self._epochcount = int(val)\n\n @property\n def momentum_batchcounter(self):\n return self._momentum_batchcounter\n#}}}\n\n def __init__(self, model=None, inputs=None, batchsize=100, learningrate=.01,\n momentum=0.9, loadsize=None,\n rng=None, verbose=True,\n numcases=None, gradient_clip_threshold=1000,\n numepochs_per_load=1,\n rmsprop=None, cost=None, params=None, inputvar=None,\n grads=None):\n\n#{{{ Initialization of Properties\n assert model is not None or (\n cost is not None and params is not None and\n inputvar is not None and grads is not None), (\n \"either a model instance or cost, params and inputvar \"\n \"have to be passed to the SGD_Trainer constructor\")\n\n if model is not None:\n self._model = model\n self._params = model.params\n self._cost = model._cost\n self._inputvar = model.inputs\n self._grads = model._grads\n else:\n self._params = params\n self._cost = cost\n self._inputvar = inputvar\n self._grads = grads\n\n self._learningrate = theano.shared(np.float32(learningrate),\n name='learningrate')\n self.numepochs_per_load = numepochs_per_load\n\n self._momentum = theano.shared(np.float32(momentum),\n name='momentum')\n self._total_stepcount = 0\n\n self._gradient_clip_threshold = theano.shared(\n np.float32(gradient_clip_threshold),\n name='gradient_clip_threshold')\n self._avg_gradnorm = theano.shared(np.float32(0.), name='avg_gradnorm')\n\n self._learningrate_decay_factor = theano.shared(\n np.float32,\n name='learningrate_decay_factor')\n\n self._learningrate_decay_interval = theano.shared(\n np.int64,\n name='learningrate_decay_interval')\n\n if isinstance(inputs, str):\n self._inputs_type = 'h5'\n self._inputsfile = tables.openFile(inputs, 'r')\n self._inputs = self._inputsfile.root.inputs_white\n elif hasattr(inputs, '__call__'):\n self._inputs_type = 'function'\n self._inputs_fn = inputs\n else:\n self._inputs_type = 'numpy'\n self._inputs = inputs\n\n self._model = model\n\n self._numparams = reduce(lambda x,y: x+y,\n [p.get_value().size for p in self._params])\n\n if self._inputs_type == 'function':\n numcases = loadsize\n else:\n if numcases is None or numcases > self._inputs.shape[0]:\n numcases = self._inputs.shape[0]\n self._numcases = numcases\n\n self._batchsize = batchsize\n self._loadsize = loadsize\n self._verbose = verbose\n if self._batchsize > self._numcases:\n self._batchsize = self._numcases\n if self._loadsize == None:\n self._loadsize = self._batchsize * 100\n if self._loadsize > self._numcases:\n self._loadsize = self._numcases\n self._numloads = self._numcases // self._loadsize\n self._numbatches = self._loadsize // self._batchsize\n\n if self._inputs_type == 'h5':\n self._inputs_theano = theano.shared(\n self._inputs.read(stop=self._loadsize))\n elif self._inputs_type == 'function':\n # TODO: generate inputs for first load\n print \"generating first load...\"\n inp = np.empty((self._loadsize, ) + (self._inputs_fn().shape),\n dtype=np.float32)\n for i in xrange(self._loadsize):\n inp[i] = self._inputs_fn()\n if (i + 1) % 100 == 0:\n print '{0}/{1}'.format(i + 1, self.loadsize)\n\n self._inputs_theano = theano.shared(\n inp)\n else:\n self._inputs_theano = theano.shared(\n self._inputs[:self._loadsize],\n name='inputs')\n #self._inputs_theano.tag.test_value = np.random.randn(100, model.n_vis*4)\n\n self._momentum_batchcounter = 0\n\n if rng is None:\n self._rng = np.random.RandomState(1)\n else:\n self._rng = rng\n\n self._epochcount = 0\n self._index = T.lscalar()\n self._incs = \\\n dict([(p, theano.shared(value=np.zeros(p.get_value().shape,\n dtype=theano.config.floatX), name='inc_'+p.name))\n for p in self._params])\n self._inc_updates = collections.OrderedDict()\n self.rmsprop = rmsprop\n if self.rmsprop is not None:\n self.averaging_coeff=0.95\n self.stabilizer=1e-2\n self._avg_grad_sqrs = \\\n dict([(p, theano.shared(value=np.zeros(p.get_value().shape,\n dtype=theano.config.floatX), name='avg_grad_sqr_'+p.name))\n for p in self._params])\n self._avg_grad_sqrs_updates = collections.OrderedDict()\n self._updates_nomomentum = collections.OrderedDict()\n self._updates = collections.OrderedDict()\n self._n = T.lscalar('n')\n self._n.tag.test_value = 0.\n self._noop = 0.0 * self._n\n self._batch_idx = theano.shared(\n value=np.array(0, dtype=np.int64), name='batch_idx')\n\n self.costs = []\n self._compile_functions()\n\n#}}}\n\n def __del__(self):\n if self._inputs_type == 'h5':\n self._inputsfile.close()\n\n def save(self, filename):\n \"\"\"Saves the trainers parameters to a file\n Params:\n filename: path to the file\n \"\"\"\n ext = os.path.splitext(filename)[1]\n if ext == '.pkl':\n print 'saving trainer params to a pkl file'\n self.save_pkl(filename)\n else:\n print 'saving trainer params to a hdf5 file'\n self.save_h5(filename)\n\n def save_h5(self, filename):\n \"\"\"Saves a HDF5 file containing the trainers parameters\n Params:\n filename: path to the file\n \"\"\"\n try:\n shutil.copyfile(filename, '{0}_bak'.format(filename))\n except IOError:\n print 'could not make backup of trainer param file (which is \\\n normal if we haven\\'t saved one until now)'\n paramfile = tables.openFile(filename, 'w')\n paramfile.createArray(paramfile.root, 'learningrate',\n self.learningrate)\n paramfile.createArray(paramfile.root, 'verbose', self.verbose)\n paramfile.createArray(paramfile.root, 'loadsize', self.loadsize)\n paramfile.createArray(paramfile.root, 'batchsize', self.batchsize)\n paramfile.createArray(paramfile.root, 'momentum',\n self.momentum)\n paramfile.createArray(paramfile.root, 'epochcount',\n self.epochcount)\n paramfile.createArray(paramfile.root, 'momentum_batchcounter',\n self.momentum_batchcounter)\n incsgrp = paramfile.createGroup(paramfile.root, 'incs', 'increments')\n for p in self._params:\n paramfile.createArray(incsgrp, p.name, self._incs[p].get_value())\n if self.rmsprop is not None:\n avg_grad_sqrs_grp = paramfile.createGroup(paramfile.root, 'avg_grad_sqrs')\n for p in self._params:\n paramfile.createArray(avg_grad_sqrs_grp, p.name, self._avg_grad_sqrs[p].get_value())\n paramfile.close()\n\n def save_pkl(self, filename):\n \"\"\"Saves a pickled dictionary containing the parameters to a file\n Params:\n filename: path to the file\n \"\"\"\n param_dict = {}\n param_dict['learningrate'] = self.learningrate\n param_dict['verbose'] = self.verbose\n param_dict['loadsize'] = self.loadsize\n param_dict['batchsize'] = self.batchsize\n param_dict['momentum'] = self.momentum\n param_dict['epochcount'] = self.epochcount\n param_dict['momentum_batchcounter'] = self.momentum_batchcounter\n param_dict['incs'] = dict(\n [(p.name, self._incs[p].get_value()) for p in self._params])\n if self.rmsprop is not None:\n param_dict['avg_grad_sqrs'] = dict(\n [(p.name, self._avg_grad_sqrs[p].get_value()) for p in self._params])\n pickle.dump(param_dict, open(filename, 'wb'))\n\n def load(self, filename):\n \"\"\"Loads pickled dictionary containing parameters from a file\n Params:\n filename: path to the file\n \"\"\"\n param_dict = pickle.load(open('%s' % filename, 'rb'))\n self.learningrate = param_dict['learningrate']\n self.verbose = param_dict['verbose']\n self._loadsize = param_dict['loadsize']\n self._batchsize = param_dict['batchsize']\n self.momentum = param_dict['momentum']\n self.epochcount = param_dict['epochcount']\n self._momentum_batchcounter = param_dict['momentum_batchcounter']\n for param_name in param_dict['incs'].keys():\n for p in self._params:\n if p.name == param_name:\n self._incs[p].set_value(param_dict['incs'][param_name])\n if self.rmsprop is not None:\n for param_name in param_dict['avg_grad_sqrs'].keys():\n for p in self._params:\n if p.name == param_name:\n self._avg_grad_sqrs[p].set_value(param_dict['avg_grad_sqrs'][param_name])\n self._numbatches = self._loadsize // self._batchsize\n if self._inputs_type != 'function':\n self._numloads = self._inputs.shape[0] // self._loadsize\n if self._inputs_type == 'h5':\n self._inputs_theano.set_value(\n self._inputs.read(stop=self._loadsize))\n else:\n self._inputs_theano.set_value(self._inputs[:self._loadsize])\n\n def reset_incs(self):\n for p in self._params:\n self._incs[p].set_value(\n np.zeros(p.get_value().shape, dtype=theano.config.floatX))\n\n def reset_avg_grad_sqrs(self):\n for p in self._params:\n self._avg_grad_sqrs[p].set_value(\n np.zeros(p.get_value().shape, dtype=theano.config.floatX))\n\n def _compile_functions(self):\n self._gradnorm = T.zeros([])\n for _param, _grad in zip(self._params, self._grads):\n # apply rmsprop to before clipping gradients\n if self.rmsprop is not None:\n avg_grad_sqr = self._avg_grad_sqrs[_param]\n new_avg_grad_sqr = self.averaging_coeff * avg_grad_sqr + \\\n (1 - self.averaging_coeff) * T.sqr(_grad)\n self._avg_grad_sqrs_updates[avg_grad_sqr] = new_avg_grad_sqr\n rms_grad_t = T.sqrt(new_avg_grad_sqr)\n rms_grad_t = T.maximum(rms_grad_t, self.stabilizer)\n _grad = _grad / rms_grad_t\n self._gradnorm += T.sum(_grad**2) # calculated on the rmsprop 'grad'\n self._gradnorm = T.sqrt(self._gradnorm)\n self.gradnorm = theano.function(\n inputs=[],\n outputs=self._gradnorm,\n givens={\n self._inputvar:\n self._inputs_theano[\n self._batch_idx*self.batchsize:\n (self._batch_idx+1)*self.batchsize]})\n\n avg_gradnorm_update = {\n self._avg_gradnorm: self._avg_gradnorm * .8 + self._gradnorm * .2}\n\n for _param, _grad in zip(self._params, self._grads):\n if hasattr(self._model, 'skip_params'):\n if _param.name in self._model.skip_params:\n continue\n\n _clip_grad = T.switch(\n T.gt(self._gradnorm, self._gradient_clip_threshold),\n _grad * self._gradient_clip_threshold / self._gradnorm, _grad)\n\n try: # ... to apply learningrate_modifiers\n # Cliphid version:\n self._inc_updates[self._incs[_param]] = \\\n self._momentum * self._incs[_param] - \\\n self._learningrate * \\\n self._model.layer.learningrate_modifiers[\n _param.name] * _clip_grad\n self._updates[_param] = _param + self._incs[_param]\n self._updates_nomomentum[_param] = _param - \\\n self._learningrate * \\\n self._model.layer.learningrate_modifiers[_param.name] * \\\n _clip_grad\n\n except AttributeError:\n self._inc_updates[self._incs[_param]] = self._momentum * \\\n self._incs[_param] - self._learningrate * _clip_grad\n self._updates[_param] = _param + self._incs[_param]\n self._updates_nomomentum[_param] = _param - \\\n self._learningrate * _clip_grad\n\n # first update gradient norm running avg\n ordered_updates = collections.OrderedDict(avg_gradnorm_update)\n # so that it is considered in the parameter update computations\n ordered_updates.update(self._inc_updates)\n self._updateincs = theano.function(\n [], [self._cost, self._avg_gradnorm], updates = ordered_updates,\n givens = {self._inputvar:self._inputs_theano[\n self._batch_idx*self._batchsize:(self._batch_idx+1)* \\\n self._batchsize]})\n\n self._trainmodel = theano.function(\n [self._n], self._noop, updates = self._updates)\n\n self._trainmodel_nomomentum = theano.function(\n [self._n], self._noop, updates = self._updates_nomomentum,\n givens = {self._inputvar:self._inputs_theano[\n self._batch_idx*self._batchsize:(self._batch_idx+1)* \\\n self._batchsize]})\n\n self._momentum_batchcounter = 0\n\n\n def _trainsubstep(self, batchidx):\n self._batch_idx.set_value(batchidx)\n stepcost, avg_gradnorm = self._updateincs()\n # catch NaN, before updating params\n if np.isnan(stepcost):\n raise ValueError, 'Cost function returned nan!'\n elif np.isinf(stepcost):\n raise ValueError, 'Cost function returned infinity!'\n\n if self._momentum_batchcounter < 10:\n self._momentum_batchcounter += 1\n self._trainmodel_nomomentum(0)\n else:\n self._momentum_batchcounter = 10\n self._trainmodel(0)\n return stepcost, avg_gradnorm\n\n def get_avg_gradnorm(self):\n avg_gradnorm = 0.0\n print self.gradnorm()\n for batch_idx in range(self._numbatches):\n self._batch_idx.set_value(batch_idx)\n tmp = self.gradnorm()\n avg_gradnorm += tmp / self._numbatches\n print avg_gradnorm\n return avg_gradnorm\n\n def step(self):\n total_cost = 0.0\n cost = 0.0\n stepcount = 0.0\n\n self._epochcount += 1\n\n for load_index in range(self._numloads):\n indices = np.random.permutation(self._loadsize)\n if self._inputs_type == 'h5':\n self._inputs_theano.set_value(\n self._inputs.read(\n start=load_index * self._loadsize,\n stop=(load_index + 1) * self._loadsize)[indices])\n elif self._inputs_type == 'function':\n # if load has been used n times, gen new load\n if self._epochcount % self.numepochs_per_load == 0:\n print 'using data function to generate new load...'\n inp = np.empty((self._loadsize, ) + (self._inputs_fn().shape),\n dtype=np.float32)\n for i in xrange(self._loadsize):\n inp[i] = self._inputs_fn()\n if (i + 1) % 100 == 0:\n print '{0}/{1}'.format(i + 1, self.loadsize)\n self._inputs_theano.set_value(inp)\n print 'done'\n else:\n self._inputs_theano.set_value(\n self._inputs[load_index * self._loadsize + indices])\n for batch_index in self._rng.permutation(self._numbatches):\n stepcount += 1.0\n self._total_stepcount += 1.0\n stepcost, avg_gradnorm = self._trainsubstep(batch_index)\n cost = (1.0-1.0/stepcount)*cost + (1.0/stepcount)* \\\n stepcost\n if self._verbose:\n print '> epoch {0:d}, load {1:d}/{2:d}, cost: {3:f}, avg. gradnorm: {4}'.format(\n self._epochcount, load_index + 1, self._numloads, cost, avg_gradnorm)\n if hasattr(self._model, 'monitor'):\n self._model.monitor()\n self.costs.append(cost)\n return cost\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ClipboardEvent(Event):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ClipboardEvent(Event):
def __init__(self, text: str, *args, **kwargs):
super().__init__(*args, text=text, **kwargs)
<|reserved_special_token_1|>
from platypush.message.event import Event
class ClipboardEvent(Event):
def __init__(self, text: str, *args, **kwargs):
super().__init__(*args, text=text, **kwargs)
<|reserved_special_token_1|>
from platypush.message.event import Event
class ClipboardEvent(Event):
def __init__(self, text: str, *args, **kwargs):
super().__init__(*args, text=text, **kwargs)
# vim:sw=4:ts=4:et:
|
flexible
|
{
"blob_id": "9b02ce0b3acb14bdd6463c5bdba865b28253767c",
"index": 7896,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ClipboardEvent(Event):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ClipboardEvent(Event):\n\n def __init__(self, text: str, *args, **kwargs):\n super().__init__(*args, text=text, **kwargs)\n",
"step-4": "from platypush.message.event import Event\n\n\nclass ClipboardEvent(Event):\n\n def __init__(self, text: str, *args, **kwargs):\n super().__init__(*args, text=text, **kwargs)\n",
"step-5": "from platypush.message.event import Event\n\n\nclass ClipboardEvent(Event):\n def __init__(self, text: str, *args, **kwargs):\n super().__init__(*args, text=text, **kwargs)\n\n\n# vim:sw=4:ts=4:et:\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
monkey.patch_all()
<|reserved_special_token_0|>
http_server.serve_forever()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
monkey.patch_all()
<|reserved_special_token_0|>
http_server = WSGIServer(('0.0.0.0', 5000), app)
http_server.serve_forever()
<|reserved_special_token_1|>
from gevent import monkey
monkey.patch_all()
from gevent.pywsgi import WSGIServer
from waitlist.app import app
http_server = WSGIServer(('0.0.0.0', 5000), app)
http_server.serve_forever()
<|reserved_special_token_1|>
# pylint: disable=wrong-import-position,wrong-import-order
from gevent import monkey
monkey.patch_all()
from gevent.pywsgi import WSGIServer
from waitlist.app import app
http_server = WSGIServer(("0.0.0.0", 5000), app)
http_server.serve_forever()
|
flexible
|
{
"blob_id": "c36625dfbd733767b09fcb5505d029ae2b16aa44",
"index": 7077,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nmonkey.patch_all()\n<mask token>\nhttp_server.serve_forever()\n",
"step-3": "<mask token>\nmonkey.patch_all()\n<mask token>\nhttp_server = WSGIServer(('0.0.0.0', 5000), app)\nhttp_server.serve_forever()\n",
"step-4": "from gevent import monkey\nmonkey.patch_all()\nfrom gevent.pywsgi import WSGIServer\nfrom waitlist.app import app\nhttp_server = WSGIServer(('0.0.0.0', 5000), app)\nhttp_server.serve_forever()\n",
"step-5": "# pylint: disable=wrong-import-position,wrong-import-order\nfrom gevent import monkey\n\nmonkey.patch_all()\n\nfrom gevent.pywsgi import WSGIServer\nfrom waitlist.app import app\n\nhttp_server = WSGIServer((\"0.0.0.0\", 5000), app)\nhttp_server.serve_forever()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# coding: utf-8
import pandas as pd
import os
import numpy as np
import json as json
import mysql.connector as sqlcnt
import datetime as dt
import requests
from mysql.connector.constants import SQLMode
import os
import glob
import re
import warnings
warnings.filterwarnings("ignore")
from pathlib import Path
# In[289]:
import os,sys
lib_path = r"\\"
#lib_path = r"C:\Users\300068241\Documents\Covid_Data\Daily"
os.chdir(lib_path)
covid_pred=pd.read_csv(r'total_cases_data.csv')
data=covid_pred
import scipy
import patsy
import statsmodels.api as sm
X=data.Time
X=sm.add_constant(X)
data['logTotal']=np.log(data.Total)
y=data.logTotal
mod=sm.OLS(y,X)
res=mod.fit()
print(res.summary())
import math
initial_value_exponent=2.2588
X0=math.exp(initial_value_exponent)
X0
growth_factor_exponent=0.1730
# In[304]:
b=math.exp(growth_factor_exponent)
# In[305]:
b
# In[306]:
from datetime import date
start_date = date(2020, 3, 2) #1st case is assumed to be of 2nd Mar'20
# In[307]:
import datetime
today = datetime.date.today()
t = today + datetime.timedelta(days = 1) #+1 in days as 1st case was on 2nd and another +1 days as we're predicting for tomorrow
delta = t - start_date
time=delta.days
Xt = X0 * (math.pow(b,time))
#Xt
predicted = round(Xt)
tomorrow = t - datetime.timedelta(days=1)
covid_actual=pd.read_csv(r'total_cases_data.csv')
covid_actual.loc[:, 'Date':'human_date']
covid_predicted=pd.DataFrame({'Date':["26/3/2020","27/3/2020","28/3/2020"],'Total':["721","857","1022"], 'human_date':["26th Mar","27th Mar","28th Mar"]}) #change here
covid_predicted.to_csv('predicted_data.csv',index=False)
covid_merge = pd.merge(covid_actual,covid_predicted,left_on=['Date'],right_on=['Date'],how = 'left')
covid_accuracy = covid_merge[(covid_merge['Date']=='26/3/2020') | (covid_merge['Date']=='27/3/2020') | (covid_merge['Date']=='28/3/2020')] #change here
#covid_accuracy
covid_accuracy['Total_y']=covid_accuracy['Total_y'].astype(int)
covid_accuracy['Total_x']=covid_accuracy['Total_x'].astype(int)
covid_accuracy.loc[covid_accuracy['Total_x']>=covid_accuracy['Total_y'], 'Accuracy'] = (covid_accuracy['Total_y']/covid_accuracy['Total_x'])*100
covid_accuracy.loc[covid_accuracy['Total_x']<covid_accuracy['Total_y'], 'Accuracy'] = (covid_accuracy['Total_x']/covid_accuracy['Total_y'])*100
accuracy_final=covid_accuracy.mean(axis = 0)
|
normal
|
{
"blob_id": "2060f57cfd910a308d60ad35ebbbf9ffd5678b9c",
"index": 3519,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwarnings.filterwarnings('ignore')\n<mask token>\nos.chdir(lib_path)\n<mask token>\nprint(res.summary())\n<mask token>\nX0\n<mask token>\nb\n<mask token>\ncovid_actual.loc[:, 'Date':'human_date']\n<mask token>\ncovid_predicted.to_csv('predicted_data.csv', index=False)\n<mask token>\n",
"step-3": "<mask token>\nwarnings.filterwarnings('ignore')\n<mask token>\nlib_path = '\\\\\\\\'\nos.chdir(lib_path)\ncovid_pred = pd.read_csv('total_cases_data.csv')\ndata = covid_pred\n<mask token>\nX = data.Time\nX = sm.add_constant(X)\ndata['logTotal'] = np.log(data.Total)\ny = data.logTotal\nmod = sm.OLS(y, X)\nres = mod.fit()\nprint(res.summary())\n<mask token>\ninitial_value_exponent = 2.2588\nX0 = math.exp(initial_value_exponent)\nX0\ngrowth_factor_exponent = 0.173\nb = math.exp(growth_factor_exponent)\nb\n<mask token>\nstart_date = date(2020, 3, 2)\n<mask token>\ntoday = datetime.date.today()\nt = today + datetime.timedelta(days=1)\ndelta = t - start_date\ntime = delta.days\nXt = X0 * math.pow(b, time)\npredicted = round(Xt)\ntomorrow = t - datetime.timedelta(days=1)\ncovid_actual = pd.read_csv('total_cases_data.csv')\ncovid_actual.loc[:, 'Date':'human_date']\ncovid_predicted = pd.DataFrame({'Date': ['26/3/2020', '27/3/2020',\n '28/3/2020'], 'Total': ['721', '857', '1022'], 'human_date': [\n '26th Mar', '27th Mar', '28th Mar']})\ncovid_predicted.to_csv('predicted_data.csv', index=False)\ncovid_merge = pd.merge(covid_actual, covid_predicted, left_on=['Date'],\n right_on=['Date'], how='left')\ncovid_accuracy = covid_merge[(covid_merge['Date'] == '26/3/2020') | (\n covid_merge['Date'] == '27/3/2020') | (covid_merge['Date'] == '28/3/2020')]\ncovid_accuracy['Total_y'] = covid_accuracy['Total_y'].astype(int)\ncovid_accuracy['Total_x'] = covid_accuracy['Total_x'].astype(int)\ncovid_accuracy.loc[covid_accuracy['Total_x'] >= covid_accuracy['Total_y'],\n 'Accuracy'] = covid_accuracy['Total_y'] / covid_accuracy['Total_x'] * 100\ncovid_accuracy.loc[covid_accuracy['Total_x'] < covid_accuracy['Total_y'],\n 'Accuracy'] = covid_accuracy['Total_x'] / covid_accuracy['Total_y'] * 100\naccuracy_final = covid_accuracy.mean(axis=0)\n",
"step-4": "import pandas as pd\nimport os\nimport numpy as np\nimport json as json\nimport mysql.connector as sqlcnt\nimport datetime as dt\nimport requests\nfrom mysql.connector.constants import SQLMode\nimport os\nimport glob\nimport re\nimport warnings\nwarnings.filterwarnings('ignore')\nfrom pathlib import Path\nimport os, sys\nlib_path = '\\\\\\\\'\nos.chdir(lib_path)\ncovid_pred = pd.read_csv('total_cases_data.csv')\ndata = covid_pred\nimport scipy\nimport patsy\nimport statsmodels.api as sm\nX = data.Time\nX = sm.add_constant(X)\ndata['logTotal'] = np.log(data.Total)\ny = data.logTotal\nmod = sm.OLS(y, X)\nres = mod.fit()\nprint(res.summary())\nimport math\ninitial_value_exponent = 2.2588\nX0 = math.exp(initial_value_exponent)\nX0\ngrowth_factor_exponent = 0.173\nb = math.exp(growth_factor_exponent)\nb\nfrom datetime import date\nstart_date = date(2020, 3, 2)\nimport datetime\ntoday = datetime.date.today()\nt = today + datetime.timedelta(days=1)\ndelta = t - start_date\ntime = delta.days\nXt = X0 * math.pow(b, time)\npredicted = round(Xt)\ntomorrow = t - datetime.timedelta(days=1)\ncovid_actual = pd.read_csv('total_cases_data.csv')\ncovid_actual.loc[:, 'Date':'human_date']\ncovid_predicted = pd.DataFrame({'Date': ['26/3/2020', '27/3/2020',\n '28/3/2020'], 'Total': ['721', '857', '1022'], 'human_date': [\n '26th Mar', '27th Mar', '28th Mar']})\ncovid_predicted.to_csv('predicted_data.csv', index=False)\ncovid_merge = pd.merge(covid_actual, covid_predicted, left_on=['Date'],\n right_on=['Date'], how='left')\ncovid_accuracy = covid_merge[(covid_merge['Date'] == '26/3/2020') | (\n covid_merge['Date'] == '27/3/2020') | (covid_merge['Date'] == '28/3/2020')]\ncovid_accuracy['Total_y'] = covid_accuracy['Total_y'].astype(int)\ncovid_accuracy['Total_x'] = covid_accuracy['Total_x'].astype(int)\ncovid_accuracy.loc[covid_accuracy['Total_x'] >= covid_accuracy['Total_y'],\n 'Accuracy'] = covid_accuracy['Total_y'] / covid_accuracy['Total_x'] * 100\ncovid_accuracy.loc[covid_accuracy['Total_x'] < covid_accuracy['Total_y'],\n 'Accuracy'] = covid_accuracy['Total_x'] / covid_accuracy['Total_y'] * 100\naccuracy_final = covid_accuracy.mean(axis=0)\n",
"step-5": "\n# coding: utf-8\n\n\nimport pandas as pd\nimport os\nimport numpy as np\nimport json as json\nimport mysql.connector as sqlcnt\nimport datetime as dt\nimport requests\nfrom mysql.connector.constants import SQLMode\nimport os\nimport glob\nimport re\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\nfrom pathlib import Path\n\n\n# In[289]:\n\n\nimport os,sys\nlib_path = r\"\\\\\"\n#lib_path = r\"C:\\Users\\300068241\\Documents\\Covid_Data\\Daily\"\nos.chdir(lib_path)\n\n\ncovid_pred=pd.read_csv(r'total_cases_data.csv')\ndata=covid_pred\n\n\nimport scipy\nimport patsy\n\n\nimport statsmodels.api as sm\n\n\nX=data.Time\nX=sm.add_constant(X)\ndata['logTotal']=np.log(data.Total)\ny=data.logTotal\nmod=sm.OLS(y,X)\nres=mod.fit()\nprint(res.summary())\n\n\nimport math\ninitial_value_exponent=2.2588\nX0=math.exp(initial_value_exponent)\nX0\n\n\ngrowth_factor_exponent=0.1730\n\n\n# In[304]:\n\n\nb=math.exp(growth_factor_exponent)\n\n\n# In[305]:\n\n\nb\n\n\n# In[306]:\n\n\nfrom datetime import date\nstart_date = date(2020, 3, 2) #1st case is assumed to be of 2nd Mar'20\n\n\n# In[307]:\n\n\nimport datetime \ntoday = datetime.date.today()\nt = today + datetime.timedelta(days = 1) #+1 in days as 1st case was on 2nd and another +1 days as we're predicting for tomorrow\n\ndelta = t - start_date\n\n\ntime=delta.days\n\n\nXt = X0 * (math.pow(b,time))\n#Xt\npredicted = round(Xt)\n\ntomorrow = t - datetime.timedelta(days=1)\n\ncovid_actual=pd.read_csv(r'total_cases_data.csv')\n\ncovid_actual.loc[:, 'Date':'human_date']\n\n\ncovid_predicted=pd.DataFrame({'Date':[\"26/3/2020\",\"27/3/2020\",\"28/3/2020\"],'Total':[\"721\",\"857\",\"1022\"], 'human_date':[\"26th Mar\",\"27th Mar\",\"28th Mar\"]}) #change here\n\ncovid_predicted.to_csv('predicted_data.csv',index=False)\n\n\ncovid_merge = pd.merge(covid_actual,covid_predicted,left_on=['Date'],right_on=['Date'],how = 'left')\ncovid_accuracy = covid_merge[(covid_merge['Date']=='26/3/2020') | (covid_merge['Date']=='27/3/2020') | (covid_merge['Date']=='28/3/2020')] #change here\n\n#covid_accuracy\n\ncovid_accuracy['Total_y']=covid_accuracy['Total_y'].astype(int)\ncovid_accuracy['Total_x']=covid_accuracy['Total_x'].astype(int)\n\ncovid_accuracy.loc[covid_accuracy['Total_x']>=covid_accuracy['Total_y'], 'Accuracy'] = (covid_accuracy['Total_y']/covid_accuracy['Total_x'])*100\n\ncovid_accuracy.loc[covid_accuracy['Total_x']<covid_accuracy['Total_y'], 'Accuracy'] = (covid_accuracy['Total_x']/covid_accuracy['Total_y'])*100\n\naccuracy_final=covid_accuracy.mean(axis = 0)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
datacake_url = (
'https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/'
)
serial = 'python0001'
number_of_persons_a = 234
number_of_persons_b = 345
additional_payload = 'bla bla'
some_data = 23.456
a_boolean = True
r = requests.post(datacake_url, json={'number_of_persons_a':
number_of_persons_a, 'number_of_persons_b': number_of_persons_b,
'additional_payload': additional_payload, 'some_data': some_data,
'a_boolean': a_boolean, 'serial': serial})
print(r)
<|reserved_special_token_1|>
import requests
if __name__ == '__main__':
datacake_url = (
'https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/'
)
serial = 'python0001'
number_of_persons_a = 234
number_of_persons_b = 345
additional_payload = 'bla bla'
some_data = 23.456
a_boolean = True
r = requests.post(datacake_url, json={'number_of_persons_a':
number_of_persons_a, 'number_of_persons_b': number_of_persons_b,
'additional_payload': additional_payload, 'some_data': some_data,
'a_boolean': a_boolean, 'serial': serial})
print(r)
<|reserved_special_token_1|>
import requests
if __name__ == "__main__":
# individual datacake webhook url
# Change this to the webhook url of your datacake device/product
datacake_url = "https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/"
# Serial number
# Include Serial Number in Payload so Datacake can route information
# based on serial of device
serial = "python0001"
# Just some random demo data
number_of_persons_a = 234
number_of_persons_b = 345
additional_payload = "bla bla"
some_data = 23.456
a_boolean = True
# create api call
r = requests.post(datacake_url, json={
"number_of_persons_a": number_of_persons_a,
"number_of_persons_b": number_of_persons_b,
"additional_payload": additional_payload,
"some_data": some_data,
"a_boolean": a_boolean,
"serial": serial
})
print(r)
|
flexible
|
{
"blob_id": "00af9627242648a5a16a34a18bfc117945f1bc08",
"index": 4936,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n datacake_url = (\n 'https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/'\n )\n serial = 'python0001'\n number_of_persons_a = 234\n number_of_persons_b = 345\n additional_payload = 'bla bla'\n some_data = 23.456\n a_boolean = True\n r = requests.post(datacake_url, json={'number_of_persons_a':\n number_of_persons_a, 'number_of_persons_b': number_of_persons_b,\n 'additional_payload': additional_payload, 'some_data': some_data,\n 'a_boolean': a_boolean, 'serial': serial})\n print(r)\n",
"step-3": "import requests\nif __name__ == '__main__':\n datacake_url = (\n 'https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/'\n )\n serial = 'python0001'\n number_of_persons_a = 234\n number_of_persons_b = 345\n additional_payload = 'bla bla'\n some_data = 23.456\n a_boolean = True\n r = requests.post(datacake_url, json={'number_of_persons_a':\n number_of_persons_a, 'number_of_persons_b': number_of_persons_b,\n 'additional_payload': additional_payload, 'some_data': some_data,\n 'a_boolean': a_boolean, 'serial': serial})\n print(r)\n",
"step-4": "import requests\n\nif __name__ == \"__main__\":\n\n # individual datacake webhook url\n # Change this to the webhook url of your datacake device/product\n datacake_url = \"https://api.datacake.co/integrations/api/ae6dd531-4cf6-4966-b5c9-6c43939aae90/\"\n\n # Serial number\n # Include Serial Number in Payload so Datacake can route information\n # based on serial of device\n serial = \"python0001\"\n\n # Just some random demo data\n number_of_persons_a = 234\n number_of_persons_b = 345\n additional_payload = \"bla bla\"\n some_data = 23.456\n a_boolean = True\n\n # create api call\n r = requests.post(datacake_url, json={\n \"number_of_persons_a\": number_of_persons_a,\n \"number_of_persons_b\": number_of_persons_b,\n \"additional_payload\": additional_payload,\n \"some_data\": some_data,\n \"a_boolean\": a_boolean,\n \"serial\": serial\n })\n\n print(r)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/python
import os
from nao.tactics import Tactic
from nao.inspector import Inspector
def test_file():
print("\n[*] === file ===")
name_libmagic_so = 'libmagic.so.1'
inspector = Inspector("./sample/file", debug=True)
# find_addr = 0x1742D # ret block of is_tar
find_addr = 0x173F8 # return 3 at is_tar
# find_addr = 0x17293
cond = inspector.get_condition_at(Tactic.near_path_constraint, object_name=name_libmagic_so, relative_addr=find_addr)
print("post condition = {}".format(cond))
inspector.run(args=["./sample.tar"], env={'LD_LIBRARY_PATH': os.environ['LD_LIBRARY_PATH']})
return inspector.collect(cond)
if __name__ == "__main__":
res = test_file()
print(res)
assert len(res) > 0
|
normal
|
{
"blob_id": "a25fb9b59d86de5a3180e4257c4e398f22cdbb05",
"index": 6947,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_file():\n print('\\n[*] === file ===')\n name_libmagic_so = 'libmagic.so.1'\n inspector = Inspector('./sample/file', debug=True)\n find_addr = 95224\n cond = inspector.get_condition_at(Tactic.near_path_constraint,\n object_name=name_libmagic_so, relative_addr=find_addr)\n print('post condition = {}'.format(cond))\n inspector.run(args=['./sample.tar'], env={'LD_LIBRARY_PATH': os.environ\n ['LD_LIBRARY_PATH']})\n return inspector.collect(cond)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test_file():\n print('\\n[*] === file ===')\n name_libmagic_so = 'libmagic.so.1'\n inspector = Inspector('./sample/file', debug=True)\n find_addr = 95224\n cond = inspector.get_condition_at(Tactic.near_path_constraint,\n object_name=name_libmagic_so, relative_addr=find_addr)\n print('post condition = {}'.format(cond))\n inspector.run(args=['./sample.tar'], env={'LD_LIBRARY_PATH': os.environ\n ['LD_LIBRARY_PATH']})\n return inspector.collect(cond)\n\n\nif __name__ == '__main__':\n res = test_file()\n print(res)\n assert len(res) > 0\n",
"step-4": "import os\nfrom nao.tactics import Tactic\nfrom nao.inspector import Inspector\n\n\ndef test_file():\n print('\\n[*] === file ===')\n name_libmagic_so = 'libmagic.so.1'\n inspector = Inspector('./sample/file', debug=True)\n find_addr = 95224\n cond = inspector.get_condition_at(Tactic.near_path_constraint,\n object_name=name_libmagic_so, relative_addr=find_addr)\n print('post condition = {}'.format(cond))\n inspector.run(args=['./sample.tar'], env={'LD_LIBRARY_PATH': os.environ\n ['LD_LIBRARY_PATH']})\n return inspector.collect(cond)\n\n\nif __name__ == '__main__':\n res = test_file()\n print(res)\n assert len(res) > 0\n",
"step-5": "#!/usr/bin/python\nimport os\nfrom nao.tactics import Tactic\nfrom nao.inspector import Inspector\n\ndef test_file():\n print(\"\\n[*] === file ===\")\n name_libmagic_so = 'libmagic.so.1'\n inspector = Inspector(\"./sample/file\", debug=True)\n # find_addr = 0x1742D # ret block of is_tar\n find_addr = 0x173F8 # return 3 at is_tar\n # find_addr = 0x17293\n\n cond = inspector.get_condition_at(Tactic.near_path_constraint, object_name=name_libmagic_so, relative_addr=find_addr)\n print(\"post condition = {}\".format(cond))\n\n inspector.run(args=[\"./sample.tar\"], env={'LD_LIBRARY_PATH': os.environ['LD_LIBRARY_PATH']})\n return inspector.collect(cond)\n\nif __name__ == \"__main__\":\n res = test_file()\n print(res)\n assert len(res) > 0\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#embaralhar sorteio
import random
a1 = input('Primeiro aluno: ')
a2 = input('Primeiro segundo: ')
a3 = input('Primeiro terceiro: ')
a4 = input('Primeiro quarto: ')
lista = [a1, a2, a3, a4]
random.shuffle(lista)
print('A ordem de apresentacao será')
print(lista)
|
normal
|
{
"blob_id": "9a0e24fbe9f51dc914d891e90196c2ff4e65f04a",
"index": 9652,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)\n",
"step-3": "<mask token>\na1 = input('Primeiro aluno: ')\na2 = input('Primeiro segundo: ')\na3 = input('Primeiro terceiro: ')\na4 = input('Primeiro quarto: ')\nlista = [a1, a2, a3, a4]\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)\n",
"step-4": "import random\na1 = input('Primeiro aluno: ')\na2 = input('Primeiro segundo: ')\na3 = input('Primeiro terceiro: ')\na4 = input('Primeiro quarto: ')\nlista = [a1, a2, a3, a4]\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)\n",
"step-5": "#embaralhar sorteio\nimport random\na1 = input('Primeiro aluno: ')\na2 = input('Primeiro segundo: ')\na3 = input('Primeiro terceiro: ')\na4 = input('Primeiro quarto: ')\nlista = [a1, a2, a3, a4]\nrandom.shuffle(lista)\nprint('A ordem de apresentacao será')\nprint(lista)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ArchiveParserTest(unittest.TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def testReadFile(self):
"""Tests that file is read correctly.
Tests that correctly formatted file in archive is read correctly.
"""
content = archive_parser.Archive.GLOBAL_SIG
file_name = 'test_file'
content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -
len(file_name))
content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH
content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH
content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH
content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH
message = 'test file contents'
message_size = str(len(message))
content += message_size + ' ' * (archive_parser.Archive.
CONTENT_SIZE_LENGTH - len(message_size))
content += archive_parser.Archive.END_TAG
content += message
archive = archive_parser.Archive(content)
archive.Parse()
self.assertIn(file_name, archive.files)
self.assertEquals(archive.files[file_name], message)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ArchiveParserTest(unittest.TestCase):
<|reserved_special_token_0|>
def testReadHeaderPass(self):
"""Tests that archive is read when header is correct.
Parses archive content containing only the signature.
"""
try:
archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)
archive.Parse()
except ValueError:
self.fail('Archive reader read improperly.')
<|reserved_special_token_0|>
def testReadFile(self):
"""Tests that file is read correctly.
Tests that correctly formatted file in archive is read correctly.
"""
content = archive_parser.Archive.GLOBAL_SIG
file_name = 'test_file'
content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -
len(file_name))
content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH
content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH
content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH
content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH
message = 'test file contents'
message_size = str(len(message))
content += message_size + ' ' * (archive_parser.Archive.
CONTENT_SIZE_LENGTH - len(message_size))
content += archive_parser.Archive.END_TAG
content += message
archive = archive_parser.Archive(content)
archive.Parse()
self.assertIn(file_name, archive.files)
self.assertEquals(archive.files[file_name], message)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ArchiveParserTest(unittest.TestCase):
"""Unit tests for archive_parser of vts.utils.python.archive.
"""
def testReadHeaderPass(self):
"""Tests that archive is read when header is correct.
Parses archive content containing only the signature.
"""
try:
archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)
archive.Parse()
except ValueError:
self.fail('Archive reader read improperly.')
def testReadHeaderFail(self):
"""Tests that parser throws error when header is invalid.
Parses archive content lacking the correct signature.
"""
archive = archive_parser.Archive('Fail.')
self.assertRaises(ValueError, archive.Parse)
def testReadFile(self):
"""Tests that file is read correctly.
Tests that correctly formatted file in archive is read correctly.
"""
content = archive_parser.Archive.GLOBAL_SIG
file_name = 'test_file'
content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -
len(file_name))
content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH
content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH
content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH
content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH
message = 'test file contents'
message_size = str(len(message))
content += message_size + ' ' * (archive_parser.Archive.
CONTENT_SIZE_LENGTH - len(message_size))
content += archive_parser.Archive.END_TAG
content += message
archive = archive_parser.Archive(content)
archive.Parse()
self.assertIn(file_name, archive.files)
self.assertEquals(archive.files[file_name], message)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import os
import unittest
from vts.utils.python.archive import archive_parser
class ArchiveParserTest(unittest.TestCase):
"""Unit tests for archive_parser of vts.utils.python.archive.
"""
def testReadHeaderPass(self):
"""Tests that archive is read when header is correct.
Parses archive content containing only the signature.
"""
try:
archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)
archive.Parse()
except ValueError:
self.fail('Archive reader read improperly.')
def testReadHeaderFail(self):
"""Tests that parser throws error when header is invalid.
Parses archive content lacking the correct signature.
"""
archive = archive_parser.Archive('Fail.')
self.assertRaises(ValueError, archive.Parse)
def testReadFile(self):
"""Tests that file is read correctly.
Tests that correctly formatted file in archive is read correctly.
"""
content = archive_parser.Archive.GLOBAL_SIG
file_name = 'test_file'
content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -
len(file_name))
content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH
content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH
content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH
content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH
message = 'test file contents'
message_size = str(len(message))
content += message_size + ' ' * (archive_parser.Archive.
CONTENT_SIZE_LENGTH - len(message_size))
content += archive_parser.Archive.END_TAG
content += message
archive = archive_parser.Archive(content)
archive.Parse()
self.assertIn(file_name, archive.files)
self.assertEquals(archive.files[file_name], message)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
#!/usr/bin/env python
#
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import unittest
from vts.utils.python.archive import archive_parser
class ArchiveParserTest(unittest.TestCase):
"""Unit tests for archive_parser of vts.utils.python.archive.
"""
def testReadHeaderPass(self):
"""Tests that archive is read when header is correct.
Parses archive content containing only the signature.
"""
try:
archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)
archive.Parse()
except ValueError:
self.fail('Archive reader read improperly.')
def testReadHeaderFail(self):
"""Tests that parser throws error when header is invalid.
Parses archive content lacking the correct signature.
"""
archive = archive_parser.Archive('Fail.')
self.assertRaises(ValueError, archive.Parse)
def testReadFile(self):
"""Tests that file is read correctly.
Tests that correctly formatted file in archive is read correctly.
"""
content = archive_parser.Archive.GLOBAL_SIG
file_name = 'test_file'
content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -
len(file_name))
content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH
content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH
content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH
content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH
message = 'test file contents'
message_size = str(len(message))
content += message_size + ' ' * (archive_parser.Archive.CONTENT_SIZE_LENGTH -
len(message_size))
content += archive_parser.Archive.END_TAG
content += message
archive = archive_parser.Archive(content)
archive.Parse()
self.assertIn(file_name, archive.files)
self.assertEquals(archive.files[file_name], message)
if __name__ == "__main__":
unittest.main()
|
flexible
|
{
"blob_id": "2ea335dd8d879731aad7713499440db6d1f60d36",
"index": 2427,
"step-1": "<mask token>\n\n\nclass ArchiveParserTest(unittest.TestCase):\n <mask token>\n <mask token>\n <mask token>\n\n def testReadFile(self):\n \"\"\"Tests that file is read correctly.\n\n Tests that correctly formatted file in archive is read correctly.\n \"\"\"\n content = archive_parser.Archive.GLOBAL_SIG\n file_name = 'test_file'\n content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -\n len(file_name))\n content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH\n content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH\n content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH\n content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH\n message = 'test file contents'\n message_size = str(len(message))\n content += message_size + ' ' * (archive_parser.Archive.\n CONTENT_SIZE_LENGTH - len(message_size))\n content += archive_parser.Archive.END_TAG\n content += message\n archive = archive_parser.Archive(content)\n archive.Parse()\n self.assertIn(file_name, archive.files)\n self.assertEquals(archive.files[file_name], message)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ArchiveParserTest(unittest.TestCase):\n <mask token>\n\n def testReadHeaderPass(self):\n \"\"\"Tests that archive is read when header is correct.\n\n Parses archive content containing only the signature.\n \"\"\"\n try:\n archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)\n archive.Parse()\n except ValueError:\n self.fail('Archive reader read improperly.')\n <mask token>\n\n def testReadFile(self):\n \"\"\"Tests that file is read correctly.\n\n Tests that correctly formatted file in archive is read correctly.\n \"\"\"\n content = archive_parser.Archive.GLOBAL_SIG\n file_name = 'test_file'\n content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -\n len(file_name))\n content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH\n content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH\n content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH\n content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH\n message = 'test file contents'\n message_size = str(len(message))\n content += message_size + ' ' * (archive_parser.Archive.\n CONTENT_SIZE_LENGTH - len(message_size))\n content += archive_parser.Archive.END_TAG\n content += message\n archive = archive_parser.Archive(content)\n archive.Parse()\n self.assertIn(file_name, archive.files)\n self.assertEquals(archive.files[file_name], message)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ArchiveParserTest(unittest.TestCase):\n \"\"\"Unit tests for archive_parser of vts.utils.python.archive.\n \"\"\"\n\n def testReadHeaderPass(self):\n \"\"\"Tests that archive is read when header is correct.\n\n Parses archive content containing only the signature.\n \"\"\"\n try:\n archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)\n archive.Parse()\n except ValueError:\n self.fail('Archive reader read improperly.')\n\n def testReadHeaderFail(self):\n \"\"\"Tests that parser throws error when header is invalid.\n\n Parses archive content lacking the correct signature.\n \"\"\"\n archive = archive_parser.Archive('Fail.')\n self.assertRaises(ValueError, archive.Parse)\n\n def testReadFile(self):\n \"\"\"Tests that file is read correctly.\n\n Tests that correctly formatted file in archive is read correctly.\n \"\"\"\n content = archive_parser.Archive.GLOBAL_SIG\n file_name = 'test_file'\n content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -\n len(file_name))\n content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH\n content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH\n content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH\n content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH\n message = 'test file contents'\n message_size = str(len(message))\n content += message_size + ' ' * (archive_parser.Archive.\n CONTENT_SIZE_LENGTH - len(message_size))\n content += archive_parser.Archive.END_TAG\n content += message\n archive = archive_parser.Archive(content)\n archive.Parse()\n self.assertIn(file_name, archive.files)\n self.assertEquals(archive.files[file_name], message)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import os\nimport unittest\nfrom vts.utils.python.archive import archive_parser\n\n\nclass ArchiveParserTest(unittest.TestCase):\n \"\"\"Unit tests for archive_parser of vts.utils.python.archive.\n \"\"\"\n\n def testReadHeaderPass(self):\n \"\"\"Tests that archive is read when header is correct.\n\n Parses archive content containing only the signature.\n \"\"\"\n try:\n archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)\n archive.Parse()\n except ValueError:\n self.fail('Archive reader read improperly.')\n\n def testReadHeaderFail(self):\n \"\"\"Tests that parser throws error when header is invalid.\n\n Parses archive content lacking the correct signature.\n \"\"\"\n archive = archive_parser.Archive('Fail.')\n self.assertRaises(ValueError, archive.Parse)\n\n def testReadFile(self):\n \"\"\"Tests that file is read correctly.\n\n Tests that correctly formatted file in archive is read correctly.\n \"\"\"\n content = archive_parser.Archive.GLOBAL_SIG\n file_name = 'test_file'\n content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -\n len(file_name))\n content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH\n content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH\n content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH\n content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH\n message = 'test file contents'\n message_size = str(len(message))\n content += message_size + ' ' * (archive_parser.Archive.\n CONTENT_SIZE_LENGTH - len(message_size))\n content += archive_parser.Archive.END_TAG\n content += message\n archive = archive_parser.Archive(content)\n archive.Parse()\n self.assertIn(file_name, archive.files)\n self.assertEquals(archive.files[file_name], message)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "#!/usr/bin/env python\n#\n# Copyright (C) 2016 The Android Open Source Project\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport os\nimport unittest\n\nfrom vts.utils.python.archive import archive_parser\n\n\nclass ArchiveParserTest(unittest.TestCase):\n \"\"\"Unit tests for archive_parser of vts.utils.python.archive.\n \"\"\"\n\n def testReadHeaderPass(self):\n \"\"\"Tests that archive is read when header is correct.\n\n Parses archive content containing only the signature.\n \"\"\"\n try:\n archive = archive_parser.Archive(archive_parser.Archive.GLOBAL_SIG)\n archive.Parse()\n except ValueError:\n self.fail('Archive reader read improperly.')\n\n def testReadHeaderFail(self):\n \"\"\"Tests that parser throws error when header is invalid.\n\n Parses archive content lacking the correct signature.\n \"\"\"\n archive = archive_parser.Archive('Fail.')\n self.assertRaises(ValueError, archive.Parse)\n\n def testReadFile(self):\n \"\"\"Tests that file is read correctly.\n\n Tests that correctly formatted file in archive is read correctly.\n \"\"\"\n content = archive_parser.Archive.GLOBAL_SIG\n file_name = 'test_file'\n content += file_name + ' ' * (archive_parser.Archive.FILE_ID_LENGTH -\n len(file_name))\n content += ' ' * archive_parser.Archive.FILE_TIMESTAMP_LENGTH\n content += ' ' * archive_parser.Archive.OWNER_ID_LENGTH\n content += ' ' * archive_parser.Archive.GROUP_ID_LENGTH\n content += ' ' * archive_parser.Archive.FILE_MODE_LENGTH\n\n message = 'test file contents'\n message_size = str(len(message))\n content += message_size + ' ' * (archive_parser.Archive.CONTENT_SIZE_LENGTH -\n len(message_size))\n content += archive_parser.Archive.END_TAG\n content += message\n archive = archive_parser.Archive(content)\n archive.Parse()\n self.assertIn(file_name, archive.files)\n self.assertEquals(archive.files[file_name], message)\n\nif __name__ == \"__main__\":\n unittest.main()\n",
"step-ids": [
2,
3,
6,
7,
8
]
}
|
[
2,
3,
6,
7,
8
] |
from click.testing import CliRunner
from apitest.actions.cli import cli
def test_sendto_cli_runs_ok():
runner = CliRunner()
result = runner.invoke(cli, ["sendto"])
assert result.exit_code == 0
|
normal
|
{
"blob_id": "7537deb4560e880365b23a99584d0b1f8fa3daf4",
"index": 5675,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_sendto_cli_runs_ok():\n runner = CliRunner()\n result = runner.invoke(cli, ['sendto'])\n assert result.exit_code == 0\n",
"step-3": "from click.testing import CliRunner\nfrom apitest.actions.cli import cli\n\n\ndef test_sendto_cli_runs_ok():\n runner = CliRunner()\n result = runner.invoke(cli, ['sendto'])\n assert result.exit_code == 0\n",
"step-4": "from click.testing import CliRunner\nfrom apitest.actions.cli import cli\n\n\ndef test_sendto_cli_runs_ok():\n runner = CliRunner()\n result = runner.invoke(cli, [\"sendto\"])\n \n assert result.exit_code == 0\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
def GetDensity(T, P, config):
return P/(T*config["Flow"]["mixture"]["gasConstant"])
def GetViscosity(T, config):
if (config["Flow"]["mixture"]["viscosityModel"]["type"] == "Constant"):
viscosity = config["Flow"]["mixture"]["viscosityModel"]["Visc"]
elif (config["Flow"]["mixture"]["viscosityModel"]["type"] == "PowerLaw"):
viscosity = config["Flow"]["mixture"]["viscosityModel"]["ViscRef"]*(T/config["Flow"]["mixture"]["viscosityModel"]["TempRef"])**0.7
elif (config["Flow"]["mixture"]["viscosityModel"]["type"] == "Sutherland"):
viscosity = (config["Flow"]["mixture"]["viscosityModel"]["ViscRef"]*(T/config["Flow"]["mixture"]["viscosityModel"]["TempRef"])**1.5)*(config["Flow"]["mixture"]["viscosityModel"]["TempRef"]+config["Flow"]["mixture"]["viscosityModel"]["SRef"])/(T+config["Flow"]["mixture"]["viscosityModel"]["SRef"])
else:
assert False
return viscosity
|
normal
|
{
"blob_id": "0e47a7d9cd6809886674291d6a535dd18205a012",
"index": 5455,
"step-1": "<mask token>\n",
"step-2": "def GetDensity(T, P, config):\n return P / (T * config['Flow']['mixture']['gasConstant'])\n\n\n<mask token>\n",
"step-3": "def GetDensity(T, P, config):\n return P / (T * config['Flow']['mixture']['gasConstant'])\n\n\ndef GetViscosity(T, config):\n if config['Flow']['mixture']['viscosityModel']['type'] == 'Constant':\n viscosity = config['Flow']['mixture']['viscosityModel']['Visc']\n elif config['Flow']['mixture']['viscosityModel']['type'] == 'PowerLaw':\n viscosity = config['Flow']['mixture']['viscosityModel']['ViscRef'] * (T\n / config['Flow']['mixture']['viscosityModel']['TempRef']) ** 0.7\n elif config['Flow']['mixture']['viscosityModel']['type'] == 'Sutherland':\n viscosity = config['Flow']['mixture']['viscosityModel']['ViscRef'] * (T\n / config['Flow']['mixture']['viscosityModel']['TempRef']\n ) ** 1.5 * (config['Flow']['mixture']['viscosityModel'][\n 'TempRef'] + config['Flow']['mixture']['viscosityModel']['SRef']\n ) / (T + config['Flow']['mixture']['viscosityModel']['SRef'])\n else:\n assert False\n return viscosity\n",
"step-4": "#!/usr/bin/env python3\n\ndef GetDensity(T, P, config):\n return P/(T*config[\"Flow\"][\"mixture\"][\"gasConstant\"])\n\ndef GetViscosity(T, config):\n if (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"type\"] == \"Constant\"):\n viscosity = config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"Visc\"]\n elif (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"type\"] == \"PowerLaw\"):\n viscosity = config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"ViscRef\"]*(T/config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"TempRef\"])**0.7\n elif (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"type\"] == \"Sutherland\"):\n viscosity = (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"ViscRef\"]*(T/config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"TempRef\"])**1.5)*(config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"TempRef\"]+config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"SRef\"])/(T+config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"SRef\"])\n else: \n assert False\n return viscosity\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from common.get_keyword import GetKeyword
from common.operation_Excel import OperationExcel
from common.op_database import OpDatabase
from interface.login import Login
from interface.address import Address
import unittest
import ddt
# 测试数据
op_excel = OperationExcel()
add_file = r'D:\pyCharm\Demo\pycode\Requests\20191109\课堂练习\ECShop_interface\data\add_address.xlsx'
modify_file = r'D:\pyCharm\Demo\pycode\Requests\20191109\课堂练习\ECShop_interface\data\modify_address.xlsx'
test_data1 = op_excel.get_data(add_file)
test_data2 = op_excel.get_data(modify_file)
@ddt.ddt
class TestAddress(unittest.TestCase):
# 编写test fixture
def setUp(self) -> None:
# 登录数据
login_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/user/signin'
login_data = {"name": "tester", "password": "123456"}
# 实例化登录对象
login = Login(url=login_url)
self.session = login.get_session(login_data)
self.user_id = int(GetKeyword.get_keyword(self.session, 'uid'))
# 实例化数据操作对象
self.op_database = OpDatabase()
@classmethod
def setUpClass(cls) -> None:
# 清空数据信息
op_database = OpDatabase()
op_database.clear_mysql()
@classmethod
def tearDownClass(cls) -> None:
# 清空数据信息
op_database = OpDatabase()
op_database.clear_mysql()
# 编写test case
# 添加收货地址
@ddt.data(*test_data1)
def test_01_add_address(self, data):
# SQL语句
sql = f'select * from ecs_user_address where user_id = {self.user_id}'
# 获取收货地址表中用户地址数
before = self.op_database.get_all(sql)
# 添加收货地址数据
add_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/add'
add_data = {
"address": {"default_address": 0, "consignee": f"{data['consignee']}", "tel": f"{data['tel']}",
"zipcode": f"{data['postcode']}", "country": "1", "city": "271", "id": 0,
"email": f"{data['email']}", "address": f"{data['detail']}",
"province": "", "district": "", "mobile": ""}, "session": self.session
}
# 添加收货地址
Address.add_address(url=add_url, data=add_data)
# 获取收货地址表中用户地址数
after = self.op_database.get_all(sql)
result = len(after) - len(before) # 实际结果
# 断言
self.assertEqual(data['expect'], result, msg='断言失败')
# 查看收货地址
def test_02_check_address(self):
# 查看收货地址数据
url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/list'
data = {"session": self.session}
# 查看收货地址
response = Address.check_address(url, data)
# 获取返回数据中data的值
addr_list = Address.get_value(response, 'data') # 实际结果
# SQL语句
sql = f'select * from ecs_user_address where user_id = {self.user_id}'
# 获取收货地址表中用户地址数
sql_addr = self.op_database.get_all(sql) # 期望结果
# 断言
self.assertEqual(len(sql_addr), len(addr_list), msg='断言失败')
# 修改收货地址
@ddt.data(*test_data2)
def test_03_modify_address(self, data):
# 读取收货地址表中的地址的address_id
sql = f'select address_id from ecs_user_address where user_id = {self.user_id}'
id_list = self.op_database.get_all(sql)
# 修改收货地址数据
url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/update'
modify_data = {
"address": {"default_address": 0, "consignee": f"{data['consignee']}", "tel": f"{data['tel']}",
"zipcode": f"{data['postcode']}", "country": "1", "city": "271", "id": 0, "email": f"{data['email']}",
"address": f"{data['detail']}", "province": "0", "district": "0", "mobile": f"{data['mobile']}"},
"address_id": id_list[0]['address_id'], "session": self.session
}
# 修改收货地址
response = Address.modify_address(url, modify_data)
# 获取返回数据中的succeed
succeed = Address.get_value(response, 'succeed')
# 断言----缺少数据库验证代码
self.assertEqual(data['expect'], succeed, msg='断言失败')
# 删除收货地址
def test_04_delete_address(self):
# 读取收货地址表中的地址的address_id
sql = f'select address_id from ecs_user_address where user_id = {self.user_id}'
id_list = self.op_database.get_all(sql)
# 删除收货地址数据
url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/delete'
delete_data = {"address_id": id_list[0]['address_id'], "session": self.session}
# 删除收货地址
response = Address.delete_address(url, delete_data)
# 获取返回数据中succeed
succeed = Address.get_value(response, 'succeed') # 实际结果
# 查询收货地址表中该地址的信息
sql = f"select * from ecs_user_address where address_id = {id_list[0]['address_id']}"
info = self.op_database.get_one(sql)
result = False if info != None else True # 期望结果
# 断言
self.assertEqual(result, succeed, msg='断言失败')
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "0f0b3eea9dc397d32e81749304041abaf6651e94",
"index": 1873,
"step-1": "<mask token>\n\n\[email protected]\nclass TestAddress(unittest.TestCase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_02_check_address(self):\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/list'\n data = {'session': self.session}\n response = Address.check_address(url, data)\n addr_list = Address.get_value(response, 'data')\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n sql_addr = self.op_database.get_all(sql)\n self.assertEqual(len(sql_addr), len(addr_list), msg='断言失败')\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]\nclass TestAddress(unittest.TestCase):\n\n def setUp(self) ->None:\n login_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/user/signin'\n login_data = {'name': 'tester', 'password': '123456'}\n login = Login(url=login_url)\n self.session = login.get_session(login_data)\n self.user_id = int(GetKeyword.get_keyword(self.session, 'uid'))\n self.op_database = OpDatabase()\n\n @classmethod\n def setUpClass(cls) ->None:\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n @classmethod\n def tearDownClass(cls) ->None:\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n @ddt.data(*test_data1)\n def test_01_add_address(self, data):\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n before = self.op_database.get_all(sql)\n add_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/add'\n add_data = {'address': {'default_address': 0, 'consignee':\n f\"{data['consignee']}\", 'tel': f\"{data['tel']}\", 'zipcode':\n f\"{data['postcode']}\", 'country': '1', 'city': '271', 'id': 0,\n 'email': f\"{data['email']}\", 'address': f\"{data['detail']}\",\n 'province': '', 'district': '', 'mobile': ''}, 'session': self.\n session}\n Address.add_address(url=add_url, data=add_data)\n after = self.op_database.get_all(sql)\n result = len(after) - len(before)\n self.assertEqual(data['expect'], result, msg='断言失败')\n\n def test_02_check_address(self):\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/list'\n data = {'session': self.session}\n response = Address.check_address(url, data)\n addr_list = Address.get_value(response, 'data')\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n sql_addr = self.op_database.get_all(sql)\n self.assertEqual(len(sql_addr), len(addr_list), msg='断言失败')\n\n @ddt.data(*test_data2)\n def test_03_modify_address(self, data):\n sql = (\n f'select address_id from ecs_user_address where user_id = {self.user_id}'\n )\n id_list = self.op_database.get_all(sql)\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/update'\n modify_data = {'address': {'default_address': 0, 'consignee':\n f\"{data['consignee']}\", 'tel': f\"{data['tel']}\", 'zipcode':\n f\"{data['postcode']}\", 'country': '1', 'city': '271', 'id': 0,\n 'email': f\"{data['email']}\", 'address': f\"{data['detail']}\",\n 'province': '0', 'district': '0', 'mobile': f\"{data['mobile']}\"\n }, 'address_id': id_list[0]['address_id'], 'session': self.session}\n response = Address.modify_address(url, modify_data)\n succeed = Address.get_value(response, 'succeed')\n self.assertEqual(data['expect'], succeed, msg='断言失败')\n\n def test_04_delete_address(self):\n sql = (\n f'select address_id from ecs_user_address where user_id = {self.user_id}'\n )\n id_list = self.op_database.get_all(sql)\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/delete'\n delete_data = {'address_id': id_list[0]['address_id'], 'session':\n self.session}\n response = Address.delete_address(url, delete_data)\n succeed = Address.get_value(response, 'succeed')\n sql = (\n f\"select * from ecs_user_address where address_id = {id_list[0]['address_id']}\"\n )\n info = self.op_database.get_one(sql)\n result = False if info != None else True\n self.assertEqual(result, succeed, msg='断言失败')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\[email protected]\nclass TestAddress(unittest.TestCase):\n\n def setUp(self) ->None:\n login_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/user/signin'\n login_data = {'name': 'tester', 'password': '123456'}\n login = Login(url=login_url)\n self.session = login.get_session(login_data)\n self.user_id = int(GetKeyword.get_keyword(self.session, 'uid'))\n self.op_database = OpDatabase()\n\n @classmethod\n def setUpClass(cls) ->None:\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n @classmethod\n def tearDownClass(cls) ->None:\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n @ddt.data(*test_data1)\n def test_01_add_address(self, data):\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n before = self.op_database.get_all(sql)\n add_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/add'\n add_data = {'address': {'default_address': 0, 'consignee':\n f\"{data['consignee']}\", 'tel': f\"{data['tel']}\", 'zipcode':\n f\"{data['postcode']}\", 'country': '1', 'city': '271', 'id': 0,\n 'email': f\"{data['email']}\", 'address': f\"{data['detail']}\",\n 'province': '', 'district': '', 'mobile': ''}, 'session': self.\n session}\n Address.add_address(url=add_url, data=add_data)\n after = self.op_database.get_all(sql)\n result = len(after) - len(before)\n self.assertEqual(data['expect'], result, msg='断言失败')\n\n def test_02_check_address(self):\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/list'\n data = {'session': self.session}\n response = Address.check_address(url, data)\n addr_list = Address.get_value(response, 'data')\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n sql_addr = self.op_database.get_all(sql)\n self.assertEqual(len(sql_addr), len(addr_list), msg='断言失败')\n\n @ddt.data(*test_data2)\n def test_03_modify_address(self, data):\n sql = (\n f'select address_id from ecs_user_address where user_id = {self.user_id}'\n )\n id_list = self.op_database.get_all(sql)\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/update'\n modify_data = {'address': {'default_address': 0, 'consignee':\n f\"{data['consignee']}\", 'tel': f\"{data['tel']}\", 'zipcode':\n f\"{data['postcode']}\", 'country': '1', 'city': '271', 'id': 0,\n 'email': f\"{data['email']}\", 'address': f\"{data['detail']}\",\n 'province': '0', 'district': '0', 'mobile': f\"{data['mobile']}\"\n }, 'address_id': id_list[0]['address_id'], 'session': self.session}\n response = Address.modify_address(url, modify_data)\n succeed = Address.get_value(response, 'succeed')\n self.assertEqual(data['expect'], succeed, msg='断言失败')\n\n def test_04_delete_address(self):\n sql = (\n f'select address_id from ecs_user_address where user_id = {self.user_id}'\n )\n id_list = self.op_database.get_all(sql)\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/delete'\n delete_data = {'address_id': id_list[0]['address_id'], 'session':\n self.session}\n response = Address.delete_address(url, delete_data)\n succeed = Address.get_value(response, 'succeed')\n sql = (\n f\"select * from ecs_user_address where address_id = {id_list[0]['address_id']}\"\n )\n info = self.op_database.get_one(sql)\n result = False if info != None else True\n self.assertEqual(result, succeed, msg='断言失败')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nop_excel = OperationExcel()\nadd_file = (\n 'D:\\\\pyCharm\\\\Demo\\\\pycode\\\\Requests\\\\20191109\\\\课堂练习\\\\ECShop_interface\\\\data\\\\add_address.xlsx'\n )\nmodify_file = (\n 'D:\\\\pyCharm\\\\Demo\\\\pycode\\\\Requests\\\\20191109\\\\课堂练习\\\\ECShop_interface\\\\data\\\\modify_address.xlsx'\n )\ntest_data1 = op_excel.get_data(add_file)\ntest_data2 = op_excel.get_data(modify_file)\n\n\[email protected]\nclass TestAddress(unittest.TestCase):\n\n def setUp(self) ->None:\n login_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/user/signin'\n login_data = {'name': 'tester', 'password': '123456'}\n login = Login(url=login_url)\n self.session = login.get_session(login_data)\n self.user_id = int(GetKeyword.get_keyword(self.session, 'uid'))\n self.op_database = OpDatabase()\n\n @classmethod\n def setUpClass(cls) ->None:\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n @classmethod\n def tearDownClass(cls) ->None:\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n @ddt.data(*test_data1)\n def test_01_add_address(self, data):\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n before = self.op_database.get_all(sql)\n add_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/add'\n add_data = {'address': {'default_address': 0, 'consignee':\n f\"{data['consignee']}\", 'tel': f\"{data['tel']}\", 'zipcode':\n f\"{data['postcode']}\", 'country': '1', 'city': '271', 'id': 0,\n 'email': f\"{data['email']}\", 'address': f\"{data['detail']}\",\n 'province': '', 'district': '', 'mobile': ''}, 'session': self.\n session}\n Address.add_address(url=add_url, data=add_data)\n after = self.op_database.get_all(sql)\n result = len(after) - len(before)\n self.assertEqual(data['expect'], result, msg='断言失败')\n\n def test_02_check_address(self):\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/list'\n data = {'session': self.session}\n response = Address.check_address(url, data)\n addr_list = Address.get_value(response, 'data')\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n sql_addr = self.op_database.get_all(sql)\n self.assertEqual(len(sql_addr), len(addr_list), msg='断言失败')\n\n @ddt.data(*test_data2)\n def test_03_modify_address(self, data):\n sql = (\n f'select address_id from ecs_user_address where user_id = {self.user_id}'\n )\n id_list = self.op_database.get_all(sql)\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/update'\n modify_data = {'address': {'default_address': 0, 'consignee':\n f\"{data['consignee']}\", 'tel': f\"{data['tel']}\", 'zipcode':\n f\"{data['postcode']}\", 'country': '1', 'city': '271', 'id': 0,\n 'email': f\"{data['email']}\", 'address': f\"{data['detail']}\",\n 'province': '0', 'district': '0', 'mobile': f\"{data['mobile']}\"\n }, 'address_id': id_list[0]['address_id'], 'session': self.session}\n response = Address.modify_address(url, modify_data)\n succeed = Address.get_value(response, 'succeed')\n self.assertEqual(data['expect'], succeed, msg='断言失败')\n\n def test_04_delete_address(self):\n sql = (\n f'select address_id from ecs_user_address where user_id = {self.user_id}'\n )\n id_list = self.op_database.get_all(sql)\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/delete'\n delete_data = {'address_id': id_list[0]['address_id'], 'session':\n self.session}\n response = Address.delete_address(url, delete_data)\n succeed = Address.get_value(response, 'succeed')\n sql = (\n f\"select * from ecs_user_address where address_id = {id_list[0]['address_id']}\"\n )\n info = self.op_database.get_one(sql)\n result = False if info != None else True\n self.assertEqual(result, succeed, msg='断言失败')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "from common.get_keyword import GetKeyword\nfrom common.operation_Excel import OperationExcel\nfrom common.op_database import OpDatabase\nfrom interface.login import Login\nfrom interface.address import Address\nimport unittest\nimport ddt\n\n# 测试数据\nop_excel = OperationExcel()\nadd_file = r'D:\\pyCharm\\Demo\\pycode\\Requests\\20191109\\课堂练习\\ECShop_interface\\data\\add_address.xlsx'\nmodify_file = r'D:\\pyCharm\\Demo\\pycode\\Requests\\20191109\\课堂练习\\ECShop_interface\\data\\modify_address.xlsx'\ntest_data1 = op_excel.get_data(add_file)\ntest_data2 = op_excel.get_data(modify_file)\n\n\[email protected]\nclass TestAddress(unittest.TestCase):\n # 编写test fixture\n def setUp(self) -> None:\n # 登录数据\n login_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/user/signin'\n login_data = {\"name\": \"tester\", \"password\": \"123456\"}\n # 实例化登录对象\n login = Login(url=login_url)\n self.session = login.get_session(login_data)\n self.user_id = int(GetKeyword.get_keyword(self.session, 'uid'))\n # 实例化数据操作对象\n self.op_database = OpDatabase()\n\n @classmethod\n def setUpClass(cls) -> None:\n # 清空数据信息\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n @classmethod\n def tearDownClass(cls) -> None:\n # 清空数据信息\n op_database = OpDatabase()\n op_database.clear_mysql()\n\n # 编写test case\n # 添加收货地址\n @ddt.data(*test_data1)\n def test_01_add_address(self, data):\n # SQL语句\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n # 获取收货地址表中用户地址数\n before = self.op_database.get_all(sql)\n # 添加收货地址数据\n add_url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/add'\n add_data = {\n \"address\": {\"default_address\": 0, \"consignee\": f\"{data['consignee']}\", \"tel\": f\"{data['tel']}\",\n \"zipcode\": f\"{data['postcode']}\", \"country\": \"1\", \"city\": \"271\", \"id\": 0,\n \"email\": f\"{data['email']}\", \"address\": f\"{data['detail']}\",\n \"province\": \"\", \"district\": \"\", \"mobile\": \"\"}, \"session\": self.session\n }\n # 添加收货地址\n Address.add_address(url=add_url, data=add_data)\n # 获取收货地址表中用户地址数\n after = self.op_database.get_all(sql)\n result = len(after) - len(before) # 实际结果\n # 断言\n self.assertEqual(data['expect'], result, msg='断言失败')\n\n # 查看收货地址\n def test_02_check_address(self):\n # 查看收货地址数据\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/list'\n data = {\"session\": self.session}\n # 查看收货地址\n response = Address.check_address(url, data)\n # 获取返回数据中data的值\n addr_list = Address.get_value(response, 'data') # 实际结果\n # SQL语句\n sql = f'select * from ecs_user_address where user_id = {self.user_id}'\n # 获取收货地址表中用户地址数\n sql_addr = self.op_database.get_all(sql) # 期望结果\n # 断言\n self.assertEqual(len(sql_addr), len(addr_list), msg='断言失败')\n\n # 修改收货地址\n @ddt.data(*test_data2)\n def test_03_modify_address(self, data):\n # 读取收货地址表中的地址的address_id\n sql = f'select address_id from ecs_user_address where user_id = {self.user_id}'\n id_list = self.op_database.get_all(sql)\n # 修改收货地址数据\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/update'\n modify_data = {\n \"address\": {\"default_address\": 0, \"consignee\": f\"{data['consignee']}\", \"tel\": f\"{data['tel']}\",\n \"zipcode\": f\"{data['postcode']}\", \"country\": \"1\", \"city\": \"271\", \"id\": 0, \"email\": f\"{data['email']}\",\n \"address\": f\"{data['detail']}\", \"province\": \"0\", \"district\": \"0\", \"mobile\": f\"{data['mobile']}\"},\n \"address_id\": id_list[0]['address_id'], \"session\": self.session\n }\n # 修改收货地址\n response = Address.modify_address(url, modify_data)\n # 获取返回数据中的succeed\n succeed = Address.get_value(response, 'succeed')\n # 断言----缺少数据库验证代码\n self.assertEqual(data['expect'], succeed, msg='断言失败')\n\n # 删除收货地址\n def test_04_delete_address(self):\n # 读取收货地址表中的地址的address_id\n sql = f'select address_id from ecs_user_address where user_id = {self.user_id}'\n id_list = self.op_database.get_all(sql)\n # 删除收货地址数据\n url = 'http://ecshop.itsoso.cn/ECMobile/?url=/address/delete'\n delete_data = {\"address_id\": id_list[0]['address_id'], \"session\": self.session}\n # 删除收货地址\n response = Address.delete_address(url, delete_data)\n # 获取返回数据中succeed\n succeed = Address.get_value(response, 'succeed') # 实际结果\n # 查询收货地址表中该地址的信息\n sql = f\"select * from ecs_user_address where address_id = {id_list[0]['address_id']}\"\n info = self.op_database.get_one(sql)\n result = False if info != None else True # 期望结果\n # 断言\n self.assertEqual(result, succeed, msg='断言失败')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
2,
8,
9,
10,
12
]
}
|
[
2,
8,
9,
10,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_action_getter():
path = './../Version_1.0/Tests/General/Action_1.json'
document = json.loads(open(path).read())
gamestate = Gamestate.from_document(document['gamestate'])
nloops = 100
total_time = 0
for _ in range(nloops):
t = time()
action_getter.get_actions(gamestate)
total_time += time() - t
print('Time used to find all actions', str(nloops), 'times:', str(round
(total_time, 3)))
<|reserved_special_token_1|>
import json
from gamestate.gamestate_module import Gamestate
from time import time
from gamestate import action_getter as action_getter
def test_action_getter():
path = './../Version_1.0/Tests/General/Action_1.json'
document = json.loads(open(path).read())
gamestate = Gamestate.from_document(document['gamestate'])
nloops = 100
total_time = 0
for _ in range(nloops):
t = time()
action_getter.get_actions(gamestate)
total_time += time() - t
print('Time used to find all actions', str(nloops), 'times:', str(round
(total_time, 3)))
<|reserved_special_token_1|>
import json
from gamestate.gamestate_module import Gamestate
from time import time
from gamestate import action_getter as action_getter
def test_action_getter():
path = "./../Version_1.0/Tests/General/Action_1.json"
document = json.loads(open(path).read())
gamestate = Gamestate.from_document(document["gamestate"])
nloops = 100
total_time = 0
for _ in range(nloops):
t = time()
action_getter.get_actions(gamestate)
total_time += time() - t
print("Time used to find all actions", str(nloops), "times:", str(round(total_time, 3)))
|
flexible
|
{
"blob_id": "b16691429d83f6909a08b10cc0b310bb62cd550d",
"index": 3985,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_action_getter():\n path = './../Version_1.0/Tests/General/Action_1.json'\n document = json.loads(open(path).read())\n gamestate = Gamestate.from_document(document['gamestate'])\n nloops = 100\n total_time = 0\n for _ in range(nloops):\n t = time()\n action_getter.get_actions(gamestate)\n total_time += time() - t\n print('Time used to find all actions', str(nloops), 'times:', str(round\n (total_time, 3)))\n",
"step-3": "import json\nfrom gamestate.gamestate_module import Gamestate\nfrom time import time\nfrom gamestate import action_getter as action_getter\n\n\ndef test_action_getter():\n path = './../Version_1.0/Tests/General/Action_1.json'\n document = json.loads(open(path).read())\n gamestate = Gamestate.from_document(document['gamestate'])\n nloops = 100\n total_time = 0\n for _ in range(nloops):\n t = time()\n action_getter.get_actions(gamestate)\n total_time += time() - t\n print('Time used to find all actions', str(nloops), 'times:', str(round\n (total_time, 3)))\n",
"step-4": "import json\nfrom gamestate.gamestate_module import Gamestate\nfrom time import time\nfrom gamestate import action_getter as action_getter\n\n\ndef test_action_getter():\n path = \"./../Version_1.0/Tests/General/Action_1.json\"\n document = json.loads(open(path).read())\n gamestate = Gamestate.from_document(document[\"gamestate\"])\n\n nloops = 100\n total_time = 0\n for _ in range(nloops):\n t = time()\n action_getter.get_actions(gamestate)\n total_time += time() - t\n\n print(\"Time used to find all actions\", str(nloops), \"times:\", str(round(total_time, 3)))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from datetime import datetime
from poop import objstore
class Comment(objstore.Item):
__typename__ = 'comment'
__table__ = 'comment'
relatesToId = objstore.column('relates_to_id')
relatesToVersion = objstore.column('relates_to_version')
posted = objstore.column()
approved = objstore.column()
relatesToCommentId=None
def __init__(self, *a, **k):
relatesTo = k.pop('relatesTo')
authorName = k.pop('authorName')
authorEmail = k.pop('authorEmail')
humanCheck = k.pop('humanCheck')
comment = k.pop('comment')
relatesToCommentId = k.pop('relatesToCommentId')
super(Comment, self).__init__(*a, **k)
if hasattr(relatesTo,'id'):
self.relatesToId, self.relatesToVersion = relatesTo.id, relatesTo.version
else:
self.relatesToId, self.relatesToVersion = relatesTo,1
self.authorName = authorName
self.comment = comment
self.authorEmail = authorEmail
self.posted = datetime.utcnow()
self.approved = False
self.relatesToCommentId = relatesToCommentId
def registerTypes(store):
store.registerType(Comment)
|
normal
|
{
"blob_id": "e398908ba74306c5a746d7643b38f08651cf92ec",
"index": 4205,
"step-1": "<mask token>\n\n\nclass Comment(objstore.Item):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, *a, **k):\n relatesTo = k.pop('relatesTo')\n authorName = k.pop('authorName')\n authorEmail = k.pop('authorEmail')\n humanCheck = k.pop('humanCheck')\n comment = k.pop('comment')\n relatesToCommentId = k.pop('relatesToCommentId')\n super(Comment, self).__init__(*a, **k)\n if hasattr(relatesTo, 'id'):\n self.relatesToId, self.relatesToVersion = (relatesTo.id,\n relatesTo.version)\n else:\n self.relatesToId, self.relatesToVersion = relatesTo, 1\n self.authorName = authorName\n self.comment = comment\n self.authorEmail = authorEmail\n self.posted = datetime.utcnow()\n self.approved = False\n self.relatesToCommentId = relatesToCommentId\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Comment(objstore.Item):\n __typename__ = 'comment'\n __table__ = 'comment'\n relatesToId = objstore.column('relates_to_id')\n relatesToVersion = objstore.column('relates_to_version')\n posted = objstore.column()\n approved = objstore.column()\n relatesToCommentId = None\n\n def __init__(self, *a, **k):\n relatesTo = k.pop('relatesTo')\n authorName = k.pop('authorName')\n authorEmail = k.pop('authorEmail')\n humanCheck = k.pop('humanCheck')\n comment = k.pop('comment')\n relatesToCommentId = k.pop('relatesToCommentId')\n super(Comment, self).__init__(*a, **k)\n if hasattr(relatesTo, 'id'):\n self.relatesToId, self.relatesToVersion = (relatesTo.id,\n relatesTo.version)\n else:\n self.relatesToId, self.relatesToVersion = relatesTo, 1\n self.authorName = authorName\n self.comment = comment\n self.authorEmail = authorEmail\n self.posted = datetime.utcnow()\n self.approved = False\n self.relatesToCommentId = relatesToCommentId\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Comment(objstore.Item):\n __typename__ = 'comment'\n __table__ = 'comment'\n relatesToId = objstore.column('relates_to_id')\n relatesToVersion = objstore.column('relates_to_version')\n posted = objstore.column()\n approved = objstore.column()\n relatesToCommentId = None\n\n def __init__(self, *a, **k):\n relatesTo = k.pop('relatesTo')\n authorName = k.pop('authorName')\n authorEmail = k.pop('authorEmail')\n humanCheck = k.pop('humanCheck')\n comment = k.pop('comment')\n relatesToCommentId = k.pop('relatesToCommentId')\n super(Comment, self).__init__(*a, **k)\n if hasattr(relatesTo, 'id'):\n self.relatesToId, self.relatesToVersion = (relatesTo.id,\n relatesTo.version)\n else:\n self.relatesToId, self.relatesToVersion = relatesTo, 1\n self.authorName = authorName\n self.comment = comment\n self.authorEmail = authorEmail\n self.posted = datetime.utcnow()\n self.approved = False\n self.relatesToCommentId = relatesToCommentId\n\n\ndef registerTypes(store):\n store.registerType(Comment)\n",
"step-4": "from datetime import datetime\nfrom poop import objstore\n\n\nclass Comment(objstore.Item):\n __typename__ = 'comment'\n __table__ = 'comment'\n relatesToId = objstore.column('relates_to_id')\n relatesToVersion = objstore.column('relates_to_version')\n posted = objstore.column()\n approved = objstore.column()\n relatesToCommentId = None\n\n def __init__(self, *a, **k):\n relatesTo = k.pop('relatesTo')\n authorName = k.pop('authorName')\n authorEmail = k.pop('authorEmail')\n humanCheck = k.pop('humanCheck')\n comment = k.pop('comment')\n relatesToCommentId = k.pop('relatesToCommentId')\n super(Comment, self).__init__(*a, **k)\n if hasattr(relatesTo, 'id'):\n self.relatesToId, self.relatesToVersion = (relatesTo.id,\n relatesTo.version)\n else:\n self.relatesToId, self.relatesToVersion = relatesTo, 1\n self.authorName = authorName\n self.comment = comment\n self.authorEmail = authorEmail\n self.posted = datetime.utcnow()\n self.approved = False\n self.relatesToCommentId = relatesToCommentId\n\n\ndef registerTypes(store):\n store.registerType(Comment)\n",
"step-5": "from datetime import datetime\nfrom poop import objstore\n\n\n\nclass Comment(objstore.Item):\n\n\n __typename__ = 'comment'\n __table__ = 'comment'\n\n\n relatesToId = objstore.column('relates_to_id')\n relatesToVersion = objstore.column('relates_to_version')\n posted = objstore.column()\n approved = objstore.column()\n relatesToCommentId=None\n\n\n def __init__(self, *a, **k):\n relatesTo = k.pop('relatesTo')\n authorName = k.pop('authorName')\n authorEmail = k.pop('authorEmail')\n humanCheck = k.pop('humanCheck')\n comment = k.pop('comment')\n relatesToCommentId = k.pop('relatesToCommentId')\n super(Comment, self).__init__(*a, **k)\n if hasattr(relatesTo,'id'):\n self.relatesToId, self.relatesToVersion = relatesTo.id, relatesTo.version\n else:\n self.relatesToId, self.relatesToVersion = relatesTo,1\n self.authorName = authorName\n self.comment = comment\n self.authorEmail = authorEmail\n self.posted = datetime.utcnow()\n self.approved = False\n self.relatesToCommentId = relatesToCommentId\n\n\n\ndef registerTypes(store):\n store.registerType(Comment)\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.