code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
from . import common_wizard
|
normal
|
{
"blob_id": "1844cfb3e174454e0e95d91e4e55679caddcd56e",
"index": 1963,
"step-1": "<mask token>\n",
"step-2": "from . import common_wizard\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import shelve
arguments = ["self", "info", "args", "world"]
minlevel = 2
helpstring = "moneyreset"
def main(connection, info, args, world) :
"""Resets a users money"""
money = shelve.open("money-%s.db" % (world.hostnicks[connection.host]), writeback=True)
money[info["sender"]] = {"money":100000, "maxmoney":100000, "items":[], "coinchance":[True for x in range(50)] + [False for x in range(50)]}
money.sync()
connection.ircsend(info["channel"], "%s: Your money data has been reset." % (info["sender"]))
|
normal
|
{
"blob_id": "95021cc01c0b85b512fd466797d4d128472773c3",
"index": 2943,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main(connection, info, args, world):\n \"\"\"Resets a users money\"\"\"\n money = shelve.open('money-%s.db' % world.hostnicks[connection.host],\n writeback=True)\n money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':\n [], 'coinchance': [(True) for x in range(50)] + [(False) for x in\n range(50)]}\n money.sync()\n connection.ircsend(info['channel'], \n '%s: Your money data has been reset.' % info['sender'])\n",
"step-3": "<mask token>\narguments = ['self', 'info', 'args', 'world']\nminlevel = 2\nhelpstring = 'moneyreset'\n\n\ndef main(connection, info, args, world):\n \"\"\"Resets a users money\"\"\"\n money = shelve.open('money-%s.db' % world.hostnicks[connection.host],\n writeback=True)\n money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':\n [], 'coinchance': [(True) for x in range(50)] + [(False) for x in\n range(50)]}\n money.sync()\n connection.ircsend(info['channel'], \n '%s: Your money data has been reset.' % info['sender'])\n",
"step-4": "import shelve\narguments = ['self', 'info', 'args', 'world']\nminlevel = 2\nhelpstring = 'moneyreset'\n\n\ndef main(connection, info, args, world):\n \"\"\"Resets a users money\"\"\"\n money = shelve.open('money-%s.db' % world.hostnicks[connection.host],\n writeback=True)\n money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':\n [], 'coinchance': [(True) for x in range(50)] + [(False) for x in\n range(50)]}\n money.sync()\n connection.ircsend(info['channel'], \n '%s: Your money data has been reset.' % info['sender'])\n",
"step-5": "import shelve\narguments = [\"self\", \"info\", \"args\", \"world\"]\nminlevel = 2\nhelpstring = \"moneyreset\"\n\ndef main(connection, info, args, world) :\n \"\"\"Resets a users money\"\"\"\n money = shelve.open(\"money-%s.db\" % (world.hostnicks[connection.host]), writeback=True)\n money[info[\"sender\"]] = {\"money\":100000, \"maxmoney\":100000, \"items\":[], \"coinchance\":[True for x in range(50)] + [False for x in range(50)]}\n money.sync()\n connection.ircsend(info[\"channel\"], \"%s: Your money data has been reset.\" % (info[\"sender\"]))\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
Subfunction A31 is responsible for inputting the component parameters
and then using the information about the component to determine
the pressure drop across that component
----------------------------------------------------------
Using data structure from /SysEng/jsonParameterFileFormat/ recall that each
cell is only present if there is data stored and thus
we can call "if "parameterName" in dict.keys()" to see if it is there.
"""
#Need math function
import math
class A31:
def __init__(self,dict): #dict is for dictionary
self.dict = dict
#Now we set several new local variables for ease of calling them later
self.CID = self.dict["CID"]
self.val = self.dict["values"]
self.calc = self.val["calculated"]
self.comp = self.val["component"]
self.fluid = self.val["fluid"]
# Create a new key for the pressure drop
self.calc["pressureDrop"] = {}
#We also need to define 'g' for this method (in SI)
self.g = 9.81
#
#Set up the logic tree to see what we need to do
#
#This method of finding the pressure drop for each different type
# of component is WAY underoptimized. Feel free to improve it! :)
if self.CID == 'LNE':
self.calc['pressureDrop']["value"] = self.lineCalc()
elif self.CID == 'BND':
self.calc['pressureDrop']["value"] = self.bendCalc()
elif self.CID == 'VLV':
self.calc['pressureDrop']["value"] = False
elif self.CID == 'ORF':
self.calc['pressureDrop']["value"] = False
elif self.CID == 'INJ':
self.calc['pressureDrop']["value"] = False
elif self.CID == 'CAT':
self.calc['pressureDrop']["value"] = False
elif self.CID == 'BND':
self.calc['pressureDrop']["value"] = False
elif self.CID == 'SPL':
self.calc['pressureDrop']["value"] = False
elif self.CID == 'JON':
self.calc['pressureDrop']["value"] = False
elif self.CID == 'EXP':
self.calc['pressureDrop']["value"] = self.expansionCalc()
elif self.CID == 'CON':
self.calc['pressureDrop']["value"] = self.contractionCalc()
if self.calc['pressureDrop']["value"] == False:
raise NotImplementedError('Calcuations for a '+
str(self.dict['CID'])+' have not yet '+
'been implemented in this' +
'pre-alpha state.')
else:
self.calc["pressureDrop"]["unit"] = "Pa"
self.dict["values"]["calculated"]["pressureDrop"] = self.calc["pressureDrop"]
def expansionCalc(self):
q = self.calc['dynamicPressure']
kt = self.calc['ktLosses']
pDrop = kt * q
return(pDrop)
def contractionCalc(self):
f = self.calc['frictionFactor']
kt = self.calc['ktLosses']
A1 = self.comp['upstreamArea']["value"]
A2 = self.comp['downstreamArea']["value"]
q = self.calc['dynamicPressure']
D1 = 2 * math.sqrt(A1/math.pi)
D2 = 2 * math.sqrt(A2/math.pi)
cL = self.comp['contractionLength']
if self.comp['contractionAngledOrCurved']["value"] == 'angle':
angle = self.comp['angle']["value"]
if angle < math.pi/4:
pDrop = (
kt + 4*f * (
cL / (
(D1 + D2) / 2
)
)
) * q
else:
pDrop = kt * q
else:
pDrop = kt * q
return(pDrop)
def lineCalc(self):
# Create some local variables for ease of use
rho = self.fluid["density"]["value"]
q = self.calc["dynamicPressure"]
g = self.g
z = self.comp["height"]["value"]
f = self.calc["frictionFactor"]
x = self.comp["length"]["value"]
Dh = self.comp["hydraulicDiameter"]["value"]
pDrop = rho*g*z + q * ((4*f*x)/Dh)
return(pDrop)
def bendCalc(self):
rho = self.fluid['density']["value"]
g = self.g
z = self.comp['height']["value"]
f = self.calc['frictionFactor']
x = self.comp['length']["value"]
Dh = self.comp['hydraulicDiameter']["value"]
kt = self.calc['ktLosses']
pDrop = rho*g*z + q * (
((4*f*x)/Dh) + kt
)
return(pDrop)
|
normal
|
{
"blob_id": "4b8038ddea60f371aa8da168ea4456372d6f0388",
"index": 2357,
"step-1": "<mask token>\n\n\nclass A31:\n\n def __init__(self, dict):\n self.dict = dict\n self.CID = self.dict['CID']\n self.val = self.dict['values']\n self.calc = self.val['calculated']\n self.comp = self.val['component']\n self.fluid = self.val['fluid']\n self.calc['pressureDrop'] = {}\n self.g = 9.81\n if self.CID == 'LNE':\n self.calc['pressureDrop']['value'] = self.lineCalc()\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = self.bendCalc()\n elif self.CID == 'VLV':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'ORF':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'INJ':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'CAT':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'SPL':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'JON':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'EXP':\n self.calc['pressureDrop']['value'] = self.expansionCalc()\n elif self.CID == 'CON':\n self.calc['pressureDrop']['value'] = self.contractionCalc()\n if self.calc['pressureDrop']['value'] == False:\n raise NotImplementedError('Calcuations for a ' + str(self.dict[\n 'CID']) + ' have not yet ' + 'been implemented in this' +\n 'pre-alpha state.')\n else:\n self.calc['pressureDrop']['unit'] = 'Pa'\n self.dict['values']['calculated']['pressureDrop'] = self.calc[\n 'pressureDrop']\n <mask token>\n\n def contractionCalc(self):\n f = self.calc['frictionFactor']\n kt = self.calc['ktLosses']\n A1 = self.comp['upstreamArea']['value']\n A2 = self.comp['downstreamArea']['value']\n q = self.calc['dynamicPressure']\n D1 = 2 * math.sqrt(A1 / math.pi)\n D2 = 2 * math.sqrt(A2 / math.pi)\n cL = self.comp['contractionLength']\n if self.comp['contractionAngledOrCurved']['value'] == 'angle':\n angle = self.comp['angle']['value']\n if angle < math.pi / 4:\n pDrop = (kt + 4 * f * (cL / ((D1 + D2) / 2))) * q\n else:\n pDrop = kt * q\n else:\n pDrop = kt * q\n return pDrop\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass A31:\n\n def __init__(self, dict):\n self.dict = dict\n self.CID = self.dict['CID']\n self.val = self.dict['values']\n self.calc = self.val['calculated']\n self.comp = self.val['component']\n self.fluid = self.val['fluid']\n self.calc['pressureDrop'] = {}\n self.g = 9.81\n if self.CID == 'LNE':\n self.calc['pressureDrop']['value'] = self.lineCalc()\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = self.bendCalc()\n elif self.CID == 'VLV':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'ORF':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'INJ':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'CAT':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'SPL':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'JON':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'EXP':\n self.calc['pressureDrop']['value'] = self.expansionCalc()\n elif self.CID == 'CON':\n self.calc['pressureDrop']['value'] = self.contractionCalc()\n if self.calc['pressureDrop']['value'] == False:\n raise NotImplementedError('Calcuations for a ' + str(self.dict[\n 'CID']) + ' have not yet ' + 'been implemented in this' +\n 'pre-alpha state.')\n else:\n self.calc['pressureDrop']['unit'] = 'Pa'\n self.dict['values']['calculated']['pressureDrop'] = self.calc[\n 'pressureDrop']\n <mask token>\n\n def contractionCalc(self):\n f = self.calc['frictionFactor']\n kt = self.calc['ktLosses']\n A1 = self.comp['upstreamArea']['value']\n A2 = self.comp['downstreamArea']['value']\n q = self.calc['dynamicPressure']\n D1 = 2 * math.sqrt(A1 / math.pi)\n D2 = 2 * math.sqrt(A2 / math.pi)\n cL = self.comp['contractionLength']\n if self.comp['contractionAngledOrCurved']['value'] == 'angle':\n angle = self.comp['angle']['value']\n if angle < math.pi / 4:\n pDrop = (kt + 4 * f * (cL / ((D1 + D2) / 2))) * q\n else:\n pDrop = kt * q\n else:\n pDrop = kt * q\n return pDrop\n\n def lineCalc(self):\n rho = self.fluid['density']['value']\n q = self.calc['dynamicPressure']\n g = self.g\n z = self.comp['height']['value']\n f = self.calc['frictionFactor']\n x = self.comp['length']['value']\n Dh = self.comp['hydraulicDiameter']['value']\n pDrop = rho * g * z + q * (4 * f * x / Dh)\n return pDrop\n\n def bendCalc(self):\n rho = self.fluid['density']['value']\n g = self.g\n z = self.comp['height']['value']\n f = self.calc['frictionFactor']\n x = self.comp['length']['value']\n Dh = self.comp['hydraulicDiameter']['value']\n kt = self.calc['ktLosses']\n pDrop = rho * g * z + q * (4 * f * x / Dh + kt)\n return pDrop\n",
"step-3": "<mask token>\n\n\nclass A31:\n\n def __init__(self, dict):\n self.dict = dict\n self.CID = self.dict['CID']\n self.val = self.dict['values']\n self.calc = self.val['calculated']\n self.comp = self.val['component']\n self.fluid = self.val['fluid']\n self.calc['pressureDrop'] = {}\n self.g = 9.81\n if self.CID == 'LNE':\n self.calc['pressureDrop']['value'] = self.lineCalc()\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = self.bendCalc()\n elif self.CID == 'VLV':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'ORF':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'INJ':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'CAT':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'SPL':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'JON':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'EXP':\n self.calc['pressureDrop']['value'] = self.expansionCalc()\n elif self.CID == 'CON':\n self.calc['pressureDrop']['value'] = self.contractionCalc()\n if self.calc['pressureDrop']['value'] == False:\n raise NotImplementedError('Calcuations for a ' + str(self.dict[\n 'CID']) + ' have not yet ' + 'been implemented in this' +\n 'pre-alpha state.')\n else:\n self.calc['pressureDrop']['unit'] = 'Pa'\n self.dict['values']['calculated']['pressureDrop'] = self.calc[\n 'pressureDrop']\n\n def expansionCalc(self):\n q = self.calc['dynamicPressure']\n kt = self.calc['ktLosses']\n pDrop = kt * q\n return pDrop\n\n def contractionCalc(self):\n f = self.calc['frictionFactor']\n kt = self.calc['ktLosses']\n A1 = self.comp['upstreamArea']['value']\n A2 = self.comp['downstreamArea']['value']\n q = self.calc['dynamicPressure']\n D1 = 2 * math.sqrt(A1 / math.pi)\n D2 = 2 * math.sqrt(A2 / math.pi)\n cL = self.comp['contractionLength']\n if self.comp['contractionAngledOrCurved']['value'] == 'angle':\n angle = self.comp['angle']['value']\n if angle < math.pi / 4:\n pDrop = (kt + 4 * f * (cL / ((D1 + D2) / 2))) * q\n else:\n pDrop = kt * q\n else:\n pDrop = kt * q\n return pDrop\n\n def lineCalc(self):\n rho = self.fluid['density']['value']\n q = self.calc['dynamicPressure']\n g = self.g\n z = self.comp['height']['value']\n f = self.calc['frictionFactor']\n x = self.comp['length']['value']\n Dh = self.comp['hydraulicDiameter']['value']\n pDrop = rho * g * z + q * (4 * f * x / Dh)\n return pDrop\n\n def bendCalc(self):\n rho = self.fluid['density']['value']\n g = self.g\n z = self.comp['height']['value']\n f = self.calc['frictionFactor']\n x = self.comp['length']['value']\n Dh = self.comp['hydraulicDiameter']['value']\n kt = self.calc['ktLosses']\n pDrop = rho * g * z + q * (4 * f * x / Dh + kt)\n return pDrop\n",
"step-4": "<mask token>\nimport math\n\n\nclass A31:\n\n def __init__(self, dict):\n self.dict = dict\n self.CID = self.dict['CID']\n self.val = self.dict['values']\n self.calc = self.val['calculated']\n self.comp = self.val['component']\n self.fluid = self.val['fluid']\n self.calc['pressureDrop'] = {}\n self.g = 9.81\n if self.CID == 'LNE':\n self.calc['pressureDrop']['value'] = self.lineCalc()\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = self.bendCalc()\n elif self.CID == 'VLV':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'ORF':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'INJ':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'CAT':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'BND':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'SPL':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'JON':\n self.calc['pressureDrop']['value'] = False\n elif self.CID == 'EXP':\n self.calc['pressureDrop']['value'] = self.expansionCalc()\n elif self.CID == 'CON':\n self.calc['pressureDrop']['value'] = self.contractionCalc()\n if self.calc['pressureDrop']['value'] == False:\n raise NotImplementedError('Calcuations for a ' + str(self.dict[\n 'CID']) + ' have not yet ' + 'been implemented in this' +\n 'pre-alpha state.')\n else:\n self.calc['pressureDrop']['unit'] = 'Pa'\n self.dict['values']['calculated']['pressureDrop'] = self.calc[\n 'pressureDrop']\n\n def expansionCalc(self):\n q = self.calc['dynamicPressure']\n kt = self.calc['ktLosses']\n pDrop = kt * q\n return pDrop\n\n def contractionCalc(self):\n f = self.calc['frictionFactor']\n kt = self.calc['ktLosses']\n A1 = self.comp['upstreamArea']['value']\n A2 = self.comp['downstreamArea']['value']\n q = self.calc['dynamicPressure']\n D1 = 2 * math.sqrt(A1 / math.pi)\n D2 = 2 * math.sqrt(A2 / math.pi)\n cL = self.comp['contractionLength']\n if self.comp['contractionAngledOrCurved']['value'] == 'angle':\n angle = self.comp['angle']['value']\n if angle < math.pi / 4:\n pDrop = (kt + 4 * f * (cL / ((D1 + D2) / 2))) * q\n else:\n pDrop = kt * q\n else:\n pDrop = kt * q\n return pDrop\n\n def lineCalc(self):\n rho = self.fluid['density']['value']\n q = self.calc['dynamicPressure']\n g = self.g\n z = self.comp['height']['value']\n f = self.calc['frictionFactor']\n x = self.comp['length']['value']\n Dh = self.comp['hydraulicDiameter']['value']\n pDrop = rho * g * z + q * (4 * f * x / Dh)\n return pDrop\n\n def bendCalc(self):\n rho = self.fluid['density']['value']\n g = self.g\n z = self.comp['height']['value']\n f = self.calc['frictionFactor']\n x = self.comp['length']['value']\n Dh = self.comp['hydraulicDiameter']['value']\n kt = self.calc['ktLosses']\n pDrop = rho * g * z + q * (4 * f * x / Dh + kt)\n return pDrop\n",
"step-5": "\"\"\"\nSubfunction A31 is responsible for inputting the component parameters\nand then using the information about the component to determine\nthe pressure drop across that component\n----------------------------------------------------------\nUsing data structure from /SysEng/jsonParameterFileFormat/ recall that each\ncell is only present if there is data stored and thus\nwe can call \"if \"parameterName\" in dict.keys()\" to see if it is there.\n\"\"\"\n\n#Need math function\nimport math\n\n\nclass A31:\n def __init__(self,dict): #dict is for dictionary\n self.dict = dict\n #Now we set several new local variables for ease of calling them later\n self.CID = self.dict[\"CID\"]\n self.val = self.dict[\"values\"]\n self.calc = self.val[\"calculated\"]\n self.comp = self.val[\"component\"]\n self.fluid = self.val[\"fluid\"]\n # Create a new key for the pressure drop\n self.calc[\"pressureDrop\"] = {}\n #We also need to define 'g' for this method (in SI)\n self.g = 9.81 \n #\n #Set up the logic tree to see what we need to do\n #\n #This method of finding the pressure drop for each different type\n # of component is WAY underoptimized. Feel free to improve it! :)\n if self.CID == 'LNE':\n self.calc['pressureDrop'][\"value\"] = self.lineCalc()\n elif self.CID == 'BND':\n self.calc['pressureDrop'][\"value\"] = self.bendCalc()\n elif self.CID == 'VLV':\n self.calc['pressureDrop'][\"value\"] = False\n elif self.CID == 'ORF':\n self.calc['pressureDrop'][\"value\"] = False\n elif self.CID == 'INJ':\n self.calc['pressureDrop'][\"value\"] = False\n elif self.CID == 'CAT':\n self.calc['pressureDrop'][\"value\"] = False\n elif self.CID == 'BND':\n self.calc['pressureDrop'][\"value\"] = False\n elif self.CID == 'SPL':\n self.calc['pressureDrop'][\"value\"] = False\n elif self.CID == 'JON':\n self.calc['pressureDrop'][\"value\"] = False\n elif self.CID == 'EXP':\n self.calc['pressureDrop'][\"value\"] = self.expansionCalc()\n elif self.CID == 'CON':\n self.calc['pressureDrop'][\"value\"] = self.contractionCalc()\n if self.calc['pressureDrop'][\"value\"] == False:\n raise NotImplementedError('Calcuations for a '+\n str(self.dict['CID'])+' have not yet '+\n 'been implemented in this' +\n 'pre-alpha state.')\n else:\n self.calc[\"pressureDrop\"][\"unit\"] = \"Pa\"\n self.dict[\"values\"][\"calculated\"][\"pressureDrop\"] = self.calc[\"pressureDrop\"]\n\n def expansionCalc(self):\n q = self.calc['dynamicPressure']\n kt = self.calc['ktLosses']\n pDrop = kt * q\n return(pDrop)\n\n def contractionCalc(self):\n f = self.calc['frictionFactor']\n kt = self.calc['ktLosses']\n A1 = self.comp['upstreamArea'][\"value\"]\n A2 = self.comp['downstreamArea'][\"value\"]\n q = self.calc['dynamicPressure']\n D1 = 2 * math.sqrt(A1/math.pi)\n D2 = 2 * math.sqrt(A2/math.pi)\n cL = self.comp['contractionLength']\n if self.comp['contractionAngledOrCurved'][\"value\"] == 'angle':\n angle = self.comp['angle'][\"value\"]\n if angle < math.pi/4:\n pDrop = (\n kt + 4*f * (\n cL / (\n (D1 + D2) / 2\n )\n )\n ) * q\n else:\n pDrop = kt * q\n else:\n pDrop = kt * q\n return(pDrop) \n\n def lineCalc(self):\n # Create some local variables for ease of use\n rho = self.fluid[\"density\"][\"value\"]\n q = self.calc[\"dynamicPressure\"]\n g = self.g\n z = self.comp[\"height\"][\"value\"]\n f = self.calc[\"frictionFactor\"]\n x = self.comp[\"length\"][\"value\"]\n Dh = self.comp[\"hydraulicDiameter\"][\"value\"]\n pDrop = rho*g*z + q * ((4*f*x)/Dh)\n return(pDrop)\n\n def bendCalc(self):\n rho = self.fluid['density'][\"value\"]\n g = self.g\n z = self.comp['height'][\"value\"]\n f = self.calc['frictionFactor']\n x = self.comp['length'][\"value\"]\n Dh = self.comp['hydraulicDiameter'][\"value\"]\n kt = self.calc['ktLosses']\n pDrop = rho*g*z + q * (\n ((4*f*x)/Dh) + kt\n )\n return(pDrop)\n \n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
#"countinu" example : repeat printing "Too small" or "Input is..." according to input's lenth
while True:
s=raw_input('Enter something: ')
if s == 'quit' :
break
if len(s) <3:
print 'Too small'
continue
#continue : not excute lower line, go to next loop
print 'Input is of sufficient lenth'
|
normal
|
{
"blob_id": "915d6547057f43c1cc5d96d9cb4529c56bc85559",
"index": 3412,
"step-1": "#\"countinu\" example : repeat printing \"Too small\" or \"Input is...\" according to input's lenth\r\n\r\nwhile True:\r\n s=raw_input('Enter something: ')\r\n if s == 'quit' :\r\n break\r\n if len(s) <3:\r\n print 'Too small'\r\n continue\r\n #continue : not excute lower line, go to next loop\r\n print 'Input is of sufficient lenth'\r\n\r\n\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
import datetime
from unittest.mock import patch
from odoo.tests import common
import odoo
from .common import RunbotCase
class TestSchedule(RunbotCase):
def setUp(self):
# entering test mode to avoid that the _schedule method commits records
registry = odoo.registry()
super(TestSchedule, self).setUp()
self.repo = self.Repo.create({'name': '[email protected]:foo/bar'})
self.branch = self.Branch.create({
'repo_id': self.repo.id,
'name': 'refs/heads/master'
})
@patch('odoo.addons.runbot.models.build.os.path.getmtime')
@patch('odoo.addons.runbot.models.build.docker_state')
def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):
""" Test that results are set even when job_30_run is skipped """
job_end_time = datetime.datetime.now()
mock_getmtime.return_value = job_end_time.timestamp()
build = self.Build.create({
'local_state': 'testing',
'branch_id': self.branch.id,
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
'port': '1234',
'host': 'runbotxx',
'job_start': datetime.datetime.now(),
'config_id': self.env.ref('runbot.runbot_build_config_default').id,
'active_step': self.env.ref('runbot.runbot_build_config_step_run').id,
})
domain = [('repo_id', 'in', (self.repo.id, ))]
domain_host = domain + [('host', '=', 'runbotxx')]
build_ids = self.Build.search(domain_host + [('local_state', 'in', ['testing', 'running'])])
mock_docker_state.return_value = 'UNKNOWN'
self.assertEqual(build.local_state, 'testing')
build_ids._schedule() # too fast, docker not started
self.assertEqual(build.local_state, 'testing')
build_ids.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started
build_ids._schedule()
self.assertEqual(build.local_state, 'done')
self.assertEqual(build.local_result, 'ok')
|
normal
|
{
"blob_id": "aa515b1b919eb557cd8c7e5f4d22773980b5af96",
"index": 8213,
"step-1": "<mask token>\n\n\nclass TestSchedule(RunbotCase):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestSchedule(RunbotCase):\n <mask token>\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n build = self.Build.create({'local_state': 'testing', 'branch_id':\n self.branch.id, 'name':\n 'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',\n 'host': 'runbotxx', 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default')\n .id, 'active_step': self.env.ref(\n 'runbot.runbot_build_config_step_run').id})\n domain = [('repo_id', 'in', (self.repo.id,))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in',\n ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule()\n self.assertEqual(build.local_state, 'testing')\n build_ids.write({'job_start': datetime.datetime.now() - datetime.\n timedelta(seconds=70)})\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-3": "<mask token>\n\n\nclass TestSchedule(RunbotCase):\n\n def setUp(self):\n registry = odoo.registry()\n super(TestSchedule, self).setUp()\n self.repo = self.Repo.create({'name': '[email protected]:foo/bar'})\n self.branch = self.Branch.create({'repo_id': self.repo.id, 'name':\n 'refs/heads/master'})\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n build = self.Build.create({'local_state': 'testing', 'branch_id':\n self.branch.id, 'name':\n 'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',\n 'host': 'runbotxx', 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default')\n .id, 'active_step': self.env.ref(\n 'runbot.runbot_build_config_step_run').id})\n domain = [('repo_id', 'in', (self.repo.id,))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in',\n ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule()\n self.assertEqual(build.local_state, 'testing')\n build_ids.write({'job_start': datetime.datetime.now() - datetime.\n timedelta(seconds=70)})\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-4": "import datetime\nfrom unittest.mock import patch\nfrom odoo.tests import common\nimport odoo\nfrom .common import RunbotCase\n\n\nclass TestSchedule(RunbotCase):\n\n def setUp(self):\n registry = odoo.registry()\n super(TestSchedule, self).setUp()\n self.repo = self.Repo.create({'name': '[email protected]:foo/bar'})\n self.branch = self.Branch.create({'repo_id': self.repo.id, 'name':\n 'refs/heads/master'})\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n build = self.Build.create({'local_state': 'testing', 'branch_id':\n self.branch.id, 'name':\n 'd0d0caca0000ffffffffffffffffffffffffffff', 'port': '1234',\n 'host': 'runbotxx', 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default')\n .id, 'active_step': self.env.ref(\n 'runbot.runbot_build_config_step_run').id})\n domain = [('repo_id', 'in', (self.repo.id,))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in',\n ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule()\n self.assertEqual(build.local_state, 'testing')\n build_ids.write({'job_start': datetime.datetime.now() - datetime.\n timedelta(seconds=70)})\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-5": "# -*- coding: utf-8 -*-\nimport datetime\nfrom unittest.mock import patch\nfrom odoo.tests import common\nimport odoo\nfrom .common import RunbotCase\n\n\nclass TestSchedule(RunbotCase):\n\n def setUp(self):\n # entering test mode to avoid that the _schedule method commits records\n registry = odoo.registry()\n super(TestSchedule, self).setUp()\n\n self.repo = self.Repo.create({'name': '[email protected]:foo/bar'})\n self.branch = self.Branch.create({\n 'repo_id': self.repo.id,\n 'name': 'refs/heads/master'\n })\n\n @patch('odoo.addons.runbot.models.build.os.path.getmtime')\n @patch('odoo.addons.runbot.models.build.docker_state')\n def test_schedule_mark_done(self, mock_docker_state, mock_getmtime):\n \"\"\" Test that results are set even when job_30_run is skipped \"\"\"\n job_end_time = datetime.datetime.now()\n mock_getmtime.return_value = job_end_time.timestamp()\n\n build = self.Build.create({\n 'local_state': 'testing',\n 'branch_id': self.branch.id,\n 'name': 'd0d0caca0000ffffffffffffffffffffffffffff',\n 'port': '1234',\n 'host': 'runbotxx',\n 'job_start': datetime.datetime.now(),\n 'config_id': self.env.ref('runbot.runbot_build_config_default').id,\n 'active_step': self.env.ref('runbot.runbot_build_config_step_run').id,\n })\n domain = [('repo_id', 'in', (self.repo.id, ))]\n domain_host = domain + [('host', '=', 'runbotxx')]\n build_ids = self.Build.search(domain_host + [('local_state', 'in', ['testing', 'running'])])\n mock_docker_state.return_value = 'UNKNOWN'\n self.assertEqual(build.local_state, 'testing')\n build_ids._schedule() # too fast, docker not started\n self.assertEqual(build.local_state, 'testing')\n\n build_ids.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started\n build_ids._schedule()\n self.assertEqual(build.local_state, 'done')\n self.assertEqual(build.local_result, 'ok')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import os, tempfile, shutil
from flask import Flask, flash, request, redirect, url_for, send_from_directory, send_file
from werkzeug.utils import secure_filename
from contextlib import contextmanager
"""
Flask stores uploaded FileStorage objects in memory if they are small. Otherwise, it internally uses tempfile.gettempdir() which returns the globally
configured temporary directory that tempfile is using.
WARNING: Flask accepts an unlimited file size unless I limit it
Flask encourages the use of <FileStorage>.save() to save uploaded files on the server. Afterwards, I can interact with the files normally. There does
not appear to be an easy way to directly interact with a FileStorage object with such functions as open()
"""
#UPLOAD_FOLDER = './uploads'
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
app = Flask(__name__)
# Limit the file size fo 16 MB
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
# I want each user to have their own upload folder
#app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
"""
Upload a text file and the server will process the file by writing a single line to it and returning the modified file. The temporary directory where
the file was saved (and modified) is deleted at the end of the request. It works exactly as expected! Try stepping through it.
"""
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
f = request.files['file']
# if the user does not select file, browser should also submit an empty part without filename
if f.filename == '':
flash('No selected file')
return redirect(request.url)
if f and allowed_file(f.filename):
"""
This code is fine because 'with' acts like a finally block. The context manager will always exit (unless the program abnormally
terminates), even if an exception is thrown or return is called within the 'with' block. Thus, I can send the processed file to the
client and then the entire directory will be deleted.
"""
filename = secure_filename(f.filename)
with TemporaryDirectory() as temp_dir:
print("temp_dir was: " + temp_dir)
path = os.path.join(temp_dir, filename)
f.save(path)
#f.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
with open(path, "r+") as my_file:
my_file.write("The server wrote this line.\n")
return send_from_directory(temp_dir, filename)
#return redirect(url_for('uploaded_file', filename=filename))
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data>
<input type=file name=file>
<input type=submit value=Upload>
</form>
'''
# Send the uploaded file right back to the user as an example. I don't do this because I process the file and spit it back to the user
"""
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
"""
# Create a context manager to deal with automatically deleting the temporary directory when the 'with' statement exists
@contextmanager
def TemporaryDirectory():
name = tempfile.mkdtemp()
try:
yield name
finally:
shutil.rmtree(name)
@app.route("/safe", methods=["POST"])
def safe():
f = request.files["file-form-param"]
name = secure_filename(f.filename)
filepath = os.path.join(os.path.dirname(__file__), "uploads", name)
f.save(filepath)
return str({
"filename": name,
"saved at": filepath
})
@app.route("/unsafe", methods=["POST"])
def unsafe():
f = request.files["file-form-param"]
filepath = os.path.join(os.path.dirname(__file__), "uploads", f.filename)
f.save(filepath)
return str({
"filename": f.filename,
"saved at": filepath
})
@app.route("/sendfile", methods=["POST"])
def send_file_py():
filename = request.form.get("filename")
return send_file(os.path.join(os.path.dirname(__file__), "uploads", filename))
@app.route("/sendfromdirectory", methods=["POST"])
def send_from_directory_py():
filename = request.form.get("filename")
return send_from_directory(os.path.join(os.path.dirname(__file__), "uploads"), filename)
|
normal
|
{
"blob_id": "9f6cfeff9e00079715827a2887263c14a1bb51ff",
"index": 7679,
"step-1": "<mask token>\n\n\n@contextmanager\ndef TemporaryDirectory():\n name = tempfile.mkdtemp()\n try:\n yield name\n finally:\n shutil.rmtree(name)\n\n\[email protected]('/safe', methods=['POST'])\ndef safe():\n f = request.files['file-form-param']\n name = secure_filename(f.filename)\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', name)\n f.save(filepath)\n return str({'filename': name, 'saved at': filepath})\n\n\[email protected]('/unsafe', methods=['POST'])\ndef unsafe():\n f = request.files['file-form-param']\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', f.filename)\n f.save(filepath)\n return str({'filename': f.filename, 'saved at': filepath})\n\n\[email protected]('/sendfile', methods=['POST'])\ndef send_file_py():\n filename = request.form.get('filename')\n return send_file(os.path.join(os.path.dirname(__file__), 'uploads',\n filename))\n\n\[email protected]('/sendfromdirectory', methods=['POST'])\ndef send_from_directory_py():\n filename = request.form.get('filename')\n return send_from_directory(os.path.join(os.path.dirname(__file__),\n 'uploads'), filename)\n",
"step-2": "<mask token>\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1].lower(\n ) in ALLOWED_EXTENSIONS\n\n\n<mask token>\n\n\n@contextmanager\ndef TemporaryDirectory():\n name = tempfile.mkdtemp()\n try:\n yield name\n finally:\n shutil.rmtree(name)\n\n\[email protected]('/safe', methods=['POST'])\ndef safe():\n f = request.files['file-form-param']\n name = secure_filename(f.filename)\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', name)\n f.save(filepath)\n return str({'filename': name, 'saved at': filepath})\n\n\[email protected]('/unsafe', methods=['POST'])\ndef unsafe():\n f = request.files['file-form-param']\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', f.filename)\n f.save(filepath)\n return str({'filename': f.filename, 'saved at': filepath})\n\n\[email protected]('/sendfile', methods=['POST'])\ndef send_file_py():\n filename = request.form.get('filename')\n return send_file(os.path.join(os.path.dirname(__file__), 'uploads',\n filename))\n\n\[email protected]('/sendfromdirectory', methods=['POST'])\ndef send_from_directory_py():\n filename = request.form.get('filename')\n return send_from_directory(os.path.join(os.path.dirname(__file__),\n 'uploads'), filename)\n",
"step-3": "<mask token>\nALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])\napp = Flask(__name__)\napp.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1].lower(\n ) in ALLOWED_EXTENSIONS\n\n\n<mask token>\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST':\n if 'file' not in request.files:\n flash('No file part')\n return redirect(request.url)\n f = request.files['file']\n if f.filename == '':\n flash('No selected file')\n return redirect(request.url)\n if f and allowed_file(f.filename):\n \"\"\" \n This code is fine because 'with' acts like a finally block. The context manager will always exit (unless the program abnormally\n terminates), even if an exception is thrown or return is called within the 'with' block. Thus, I can send the processed file to the\n client and then the entire directory will be deleted.\n \"\"\"\n filename = secure_filename(f.filename)\n with TemporaryDirectory() as temp_dir:\n print('temp_dir was: ' + temp_dir)\n path = os.path.join(temp_dir, filename)\n f.save(path)\n with open(path, 'r+') as my_file:\n my_file.write('The server wrote this line.\\n')\n return send_from_directory(temp_dir, filename)\n return \"\"\"\n <!doctype html>\n <title>Upload new File</title>\n <h1>Upload new File</h1>\n <form method=post enctype=multipart/form-data>\n <input type=file name=file>\n <input type=submit value=Upload>\n </form>\n \"\"\"\n\n\n<mask token>\n\n\n@contextmanager\ndef TemporaryDirectory():\n name = tempfile.mkdtemp()\n try:\n yield name\n finally:\n shutil.rmtree(name)\n\n\[email protected]('/safe', methods=['POST'])\ndef safe():\n f = request.files['file-form-param']\n name = secure_filename(f.filename)\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', name)\n f.save(filepath)\n return str({'filename': name, 'saved at': filepath})\n\n\[email protected]('/unsafe', methods=['POST'])\ndef unsafe():\n f = request.files['file-form-param']\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', f.filename)\n f.save(filepath)\n return str({'filename': f.filename, 'saved at': filepath})\n\n\[email protected]('/sendfile', methods=['POST'])\ndef send_file_py():\n filename = request.form.get('filename')\n return send_file(os.path.join(os.path.dirname(__file__), 'uploads',\n filename))\n\n\[email protected]('/sendfromdirectory', methods=['POST'])\ndef send_from_directory_py():\n filename = request.form.get('filename')\n return send_from_directory(os.path.join(os.path.dirname(__file__),\n 'uploads'), filename)\n",
"step-4": "import os, tempfile, shutil\nfrom flask import Flask, flash, request, redirect, url_for, send_from_directory, send_file\nfrom werkzeug.utils import secure_filename\nfrom contextlib import contextmanager\n<mask token>\nALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])\napp = Flask(__name__)\napp.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1].lower(\n ) in ALLOWED_EXTENSIONS\n\n\n<mask token>\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST':\n if 'file' not in request.files:\n flash('No file part')\n return redirect(request.url)\n f = request.files['file']\n if f.filename == '':\n flash('No selected file')\n return redirect(request.url)\n if f and allowed_file(f.filename):\n \"\"\" \n This code is fine because 'with' acts like a finally block. The context manager will always exit (unless the program abnormally\n terminates), even if an exception is thrown or return is called within the 'with' block. Thus, I can send the processed file to the\n client and then the entire directory will be deleted.\n \"\"\"\n filename = secure_filename(f.filename)\n with TemporaryDirectory() as temp_dir:\n print('temp_dir was: ' + temp_dir)\n path = os.path.join(temp_dir, filename)\n f.save(path)\n with open(path, 'r+') as my_file:\n my_file.write('The server wrote this line.\\n')\n return send_from_directory(temp_dir, filename)\n return \"\"\"\n <!doctype html>\n <title>Upload new File</title>\n <h1>Upload new File</h1>\n <form method=post enctype=multipart/form-data>\n <input type=file name=file>\n <input type=submit value=Upload>\n </form>\n \"\"\"\n\n\n<mask token>\n\n\n@contextmanager\ndef TemporaryDirectory():\n name = tempfile.mkdtemp()\n try:\n yield name\n finally:\n shutil.rmtree(name)\n\n\[email protected]('/safe', methods=['POST'])\ndef safe():\n f = request.files['file-form-param']\n name = secure_filename(f.filename)\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', name)\n f.save(filepath)\n return str({'filename': name, 'saved at': filepath})\n\n\[email protected]('/unsafe', methods=['POST'])\ndef unsafe():\n f = request.files['file-form-param']\n filepath = os.path.join(os.path.dirname(__file__), 'uploads', f.filename)\n f.save(filepath)\n return str({'filename': f.filename, 'saved at': filepath})\n\n\[email protected]('/sendfile', methods=['POST'])\ndef send_file_py():\n filename = request.form.get('filename')\n return send_file(os.path.join(os.path.dirname(__file__), 'uploads',\n filename))\n\n\[email protected]('/sendfromdirectory', methods=['POST'])\ndef send_from_directory_py():\n filename = request.form.get('filename')\n return send_from_directory(os.path.join(os.path.dirname(__file__),\n 'uploads'), filename)\n",
"step-5": "import os, tempfile, shutil\nfrom flask import Flask, flash, request, redirect, url_for, send_from_directory, send_file\nfrom werkzeug.utils import secure_filename\nfrom contextlib import contextmanager\n\n\n\"\"\"\nFlask stores uploaded FileStorage objects in memory if they are small. Otherwise, it internally uses tempfile.gettempdir() which returns the globally\nconfigured temporary directory that tempfile is using.\n\nWARNING: Flask accepts an unlimited file size unless I limit it\n\nFlask encourages the use of <FileStorage>.save() to save uploaded files on the server. Afterwards, I can interact with the files normally. There does\nnot appear to be an easy way to directly interact with a FileStorage object with such functions as open()\n\"\"\"\n\n\n#UPLOAD_FOLDER = './uploads'\nALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])\n\n\napp = Flask(__name__)\n# Limit the file size fo 16 MB\napp.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024\n# I want each user to have their own upload folder\n#app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS\n\n\n\"\"\"\nUpload a text file and the server will process the file by writing a single line to it and returning the modified file. The temporary directory where\nthe file was saved (and modified) is deleted at the end of the request. It works exactly as expected! Try stepping through it.\n\"\"\"\[email protected]('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST':\n # check if the post request has the file part\n if 'file' not in request.files:\n flash('No file part')\n return redirect(request.url)\n f = request.files['file']\n # if the user does not select file, browser should also submit an empty part without filename\n if f.filename == '':\n flash('No selected file')\n return redirect(request.url)\n if f and allowed_file(f.filename):\n \"\"\" \n This code is fine because 'with' acts like a finally block. The context manager will always exit (unless the program abnormally\n terminates), even if an exception is thrown or return is called within the 'with' block. Thus, I can send the processed file to the\n client and then the entire directory will be deleted.\n \"\"\"\n filename = secure_filename(f.filename)\n with TemporaryDirectory() as temp_dir:\n print(\"temp_dir was: \" + temp_dir)\n path = os.path.join(temp_dir, filename)\n f.save(path)\n #f.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n with open(path, \"r+\") as my_file:\n my_file.write(\"The server wrote this line.\\n\")\n return send_from_directory(temp_dir, filename)\n #return redirect(url_for('uploaded_file', filename=filename))\n return '''\n <!doctype html>\n <title>Upload new File</title>\n <h1>Upload new File</h1>\n <form method=post enctype=multipart/form-data>\n <input type=file name=file>\n <input type=submit value=Upload>\n </form>\n '''\n\n\n# Send the uploaded file right back to the user as an example. I don't do this because I process the file and spit it back to the user\n\"\"\"\[email protected]('/uploads/<filename>')\ndef uploaded_file(filename):\n return send_from_directory(app.config['UPLOAD_FOLDER'], filename)\n\"\"\"\n\n\n# Create a context manager to deal with automatically deleting the temporary directory when the 'with' statement exists\n@contextmanager\ndef TemporaryDirectory():\n name = tempfile.mkdtemp()\n try:\n yield name\n finally:\n shutil.rmtree(name)\n\n\[email protected](\"/safe\", methods=[\"POST\"])\ndef safe():\n f = request.files[\"file-form-param\"]\n name = secure_filename(f.filename)\n filepath = os.path.join(os.path.dirname(__file__), \"uploads\", name)\n f.save(filepath)\n return str({\n \"filename\": name,\n \"saved at\": filepath\n })\n\n\[email protected](\"/unsafe\", methods=[\"POST\"])\ndef unsafe():\n f = request.files[\"file-form-param\"]\n filepath = os.path.join(os.path.dirname(__file__), \"uploads\", f.filename)\n f.save(filepath)\n return str({\n \"filename\": f.filename,\n \"saved at\": filepath\n })\n\n\[email protected](\"/sendfile\", methods=[\"POST\"])\ndef send_file_py():\n filename = request.form.get(\"filename\")\n return send_file(os.path.join(os.path.dirname(__file__), \"uploads\", filename))\n\n\[email protected](\"/sendfromdirectory\", methods=[\"POST\"])\ndef send_from_directory_py():\n filename = request.form.get(\"filename\")\n return send_from_directory(os.path.join(os.path.dirname(__file__), \"uploads\"), filename)\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
from CategoryReplacer.CategoryReplcaers import CountEncoder
from CategoryReplacer.CategoryReplcaers import CombinCountEncoder
from CategoryReplacer.CategoryReplcaers import FrequencyEncoder
from CategoryReplacer.CategoryReplcaers import NullCounter
from CategoryReplacer.CategoryReplcaers import AutoCalcEncoder
from CategoryReplacer.CategoryReplcaers import extract_obj_cols
__all__ = [
"CountEncoder",
"CombinCountEncoder",
"FrequencyEncoder",
"NullCounter",
"AutoCalcEncoder",
"extract_obj_cols"
]
|
normal
|
{
"blob_id": "d28e517e72c3689e973a5b1255d414648de418fb",
"index": 1658,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = ['CountEncoder', 'CombinCountEncoder', 'FrequencyEncoder',\n 'NullCounter', 'AutoCalcEncoder', 'extract_obj_cols']\n",
"step-3": "from CategoryReplacer.CategoryReplcaers import CountEncoder\nfrom CategoryReplacer.CategoryReplcaers import CombinCountEncoder\nfrom CategoryReplacer.CategoryReplcaers import FrequencyEncoder\nfrom CategoryReplacer.CategoryReplcaers import NullCounter\nfrom CategoryReplacer.CategoryReplcaers import AutoCalcEncoder\nfrom CategoryReplacer.CategoryReplcaers import extract_obj_cols\n__all__ = ['CountEncoder', 'CombinCountEncoder', 'FrequencyEncoder',\n 'NullCounter', 'AutoCalcEncoder', 'extract_obj_cols']\n",
"step-4": "from CategoryReplacer.CategoryReplcaers import CountEncoder\nfrom CategoryReplacer.CategoryReplcaers import CombinCountEncoder\nfrom CategoryReplacer.CategoryReplcaers import FrequencyEncoder\nfrom CategoryReplacer.CategoryReplcaers import NullCounter\nfrom CategoryReplacer.CategoryReplcaers import AutoCalcEncoder\nfrom CategoryReplacer.CategoryReplcaers import extract_obj_cols\n\n__all__ = [\n \"CountEncoder\",\n \"CombinCountEncoder\",\n \"FrequencyEncoder\",\n \"NullCounter\",\n \"AutoCalcEncoder\",\n \"extract_obj_cols\"\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
GoldenTemplate based on the golden-layout library.
"""
from __future__ import annotations
import pathlib
from typing import TYPE_CHECKING, Literal
import param
from ...config import config
from ...io.resources import JS_URLS
from ..base import BasicTemplate
if TYPE_CHECKING:
from ...io.resources import ResourcesType
class GoldenTemplate(BasicTemplate):
"""
GoldenTemplate is built on top of golden-layout library.
"""
sidebar_width = param.Integer(default=20, constant=True, doc="""
The width of the sidebar in percent.""")
_css = pathlib.Path(__file__).parent / 'golden.css'
_template = pathlib.Path(__file__).parent / 'golden.html'
_resources = {
'css': {
'goldenlayout': f"{config.npm_cdn}/[email protected]/src/css/goldenlayout-base.css",
'golden-theme-dark': f"{config.npm_cdn}/[email protected]/src/css/goldenlayout-dark-theme.css",
'golden-theme-light': f"{config.npm_cdn}/[email protected]/src/css/goldenlayout-light-theme.css"
},
'js': {
'jquery': JS_URLS['jQuery'],
'goldenlayout': f"{config.npm_cdn}/[email protected]/dist/goldenlayout.min.js"
}
}
def _apply_root(self, name, model, tags):
if 'main' in tags:
model.margin = (10, 15, 10, 10)
def resolve_resources(self, cdn: bool | Literal['auto'] = 'auto') -> ResourcesType:
resources = super().resolve_resources(cdn=cdn)
del_theme = 'dark' if self._design.theme._name =='default' else 'light'
del resources['css'][f'golden-theme-{del_theme}']
return resources
|
normal
|
{
"blob_id": "5bfb69d1608b397d6a19e663164a30089e4f67ad",
"index": 2859,
"step-1": "<mask token>\n\n\nclass GoldenTemplate(BasicTemplate):\n <mask token>\n sidebar_width = param.Integer(default=20, constant=True, doc=\n \"\"\"\n The width of the sidebar in percent.\"\"\")\n _css = pathlib.Path(__file__).parent / 'golden.css'\n _template = pathlib.Path(__file__).parent / 'golden.html'\n _resources = {'css': {'goldenlayout':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-base.css',\n 'golden-theme-dark':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-dark-theme.css'\n , 'golden-theme-light':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-light-theme.css'\n }, 'js': {'jquery': JS_URLS['jQuery'], 'goldenlayout':\n f'{config.npm_cdn}/[email protected]/dist/goldenlayout.min.js'}}\n\n def _apply_root(self, name, model, tags):\n if 'main' in tags:\n model.margin = 10, 15, 10, 10\n\n def resolve_resources(self, cdn: (bool | Literal['auto'])='auto'\n ) ->ResourcesType:\n resources = super().resolve_resources(cdn=cdn)\n del_theme = ('dark' if self._design.theme._name == 'default' else\n 'light')\n del resources['css'][f'golden-theme-{del_theme}']\n return resources\n",
"step-2": "<mask token>\n\n\nclass GoldenTemplate(BasicTemplate):\n \"\"\"\n GoldenTemplate is built on top of golden-layout library.\n \"\"\"\n sidebar_width = param.Integer(default=20, constant=True, doc=\n \"\"\"\n The width of the sidebar in percent.\"\"\")\n _css = pathlib.Path(__file__).parent / 'golden.css'\n _template = pathlib.Path(__file__).parent / 'golden.html'\n _resources = {'css': {'goldenlayout':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-base.css',\n 'golden-theme-dark':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-dark-theme.css'\n , 'golden-theme-light':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-light-theme.css'\n }, 'js': {'jquery': JS_URLS['jQuery'], 'goldenlayout':\n f'{config.npm_cdn}/[email protected]/dist/goldenlayout.min.js'}}\n\n def _apply_root(self, name, model, tags):\n if 'main' in tags:\n model.margin = 10, 15, 10, 10\n\n def resolve_resources(self, cdn: (bool | Literal['auto'])='auto'\n ) ->ResourcesType:\n resources = super().resolve_resources(cdn=cdn)\n del_theme = ('dark' if self._design.theme._name == 'default' else\n 'light')\n del resources['css'][f'golden-theme-{del_theme}']\n return resources\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n from ...io.resources import ResourcesType\n\n\nclass GoldenTemplate(BasicTemplate):\n \"\"\"\n GoldenTemplate is built on top of golden-layout library.\n \"\"\"\n sidebar_width = param.Integer(default=20, constant=True, doc=\n \"\"\"\n The width of the sidebar in percent.\"\"\")\n _css = pathlib.Path(__file__).parent / 'golden.css'\n _template = pathlib.Path(__file__).parent / 'golden.html'\n _resources = {'css': {'goldenlayout':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-base.css',\n 'golden-theme-dark':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-dark-theme.css'\n , 'golden-theme-light':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-light-theme.css'\n }, 'js': {'jquery': JS_URLS['jQuery'], 'goldenlayout':\n f'{config.npm_cdn}/[email protected]/dist/goldenlayout.min.js'}}\n\n def _apply_root(self, name, model, tags):\n if 'main' in tags:\n model.margin = 10, 15, 10, 10\n\n def resolve_resources(self, cdn: (bool | Literal['auto'])='auto'\n ) ->ResourcesType:\n resources = super().resolve_resources(cdn=cdn)\n del_theme = ('dark' if self._design.theme._name == 'default' else\n 'light')\n del resources['css'][f'golden-theme-{del_theme}']\n return resources\n",
"step-4": "<mask token>\nfrom __future__ import annotations\nimport pathlib\nfrom typing import TYPE_CHECKING, Literal\nimport param\nfrom ...config import config\nfrom ...io.resources import JS_URLS\nfrom ..base import BasicTemplate\nif TYPE_CHECKING:\n from ...io.resources import ResourcesType\n\n\nclass GoldenTemplate(BasicTemplate):\n \"\"\"\n GoldenTemplate is built on top of golden-layout library.\n \"\"\"\n sidebar_width = param.Integer(default=20, constant=True, doc=\n \"\"\"\n The width of the sidebar in percent.\"\"\")\n _css = pathlib.Path(__file__).parent / 'golden.css'\n _template = pathlib.Path(__file__).parent / 'golden.html'\n _resources = {'css': {'goldenlayout':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-base.css',\n 'golden-theme-dark':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-dark-theme.css'\n , 'golden-theme-light':\n f'{config.npm_cdn}/[email protected]/src/css/goldenlayout-light-theme.css'\n }, 'js': {'jquery': JS_URLS['jQuery'], 'goldenlayout':\n f'{config.npm_cdn}/[email protected]/dist/goldenlayout.min.js'}}\n\n def _apply_root(self, name, model, tags):\n if 'main' in tags:\n model.margin = 10, 15, 10, 10\n\n def resolve_resources(self, cdn: (bool | Literal['auto'])='auto'\n ) ->ResourcesType:\n resources = super().resolve_resources(cdn=cdn)\n del_theme = ('dark' if self._design.theme._name == 'default' else\n 'light')\n del resources['css'][f'golden-theme-{del_theme}']\n return resources\n",
"step-5": "\"\"\"\nGoldenTemplate based on the golden-layout library.\n\"\"\"\nfrom __future__ import annotations\n\nimport pathlib\n\nfrom typing import TYPE_CHECKING, Literal\n\nimport param\n\nfrom ...config import config\nfrom ...io.resources import JS_URLS\nfrom ..base import BasicTemplate\n\nif TYPE_CHECKING:\n from ...io.resources import ResourcesType\n\n\nclass GoldenTemplate(BasicTemplate):\n \"\"\"\n GoldenTemplate is built on top of golden-layout library.\n \"\"\"\n\n sidebar_width = param.Integer(default=20, constant=True, doc=\"\"\"\n The width of the sidebar in percent.\"\"\")\n\n _css = pathlib.Path(__file__).parent / 'golden.css'\n\n _template = pathlib.Path(__file__).parent / 'golden.html'\n\n _resources = {\n 'css': {\n 'goldenlayout': f\"{config.npm_cdn}/[email protected]/src/css/goldenlayout-base.css\",\n 'golden-theme-dark': f\"{config.npm_cdn}/[email protected]/src/css/goldenlayout-dark-theme.css\",\n 'golden-theme-light': f\"{config.npm_cdn}/[email protected]/src/css/goldenlayout-light-theme.css\"\n },\n 'js': {\n 'jquery': JS_URLS['jQuery'],\n 'goldenlayout': f\"{config.npm_cdn}/[email protected]/dist/goldenlayout.min.js\"\n }\n }\n\n def _apply_root(self, name, model, tags):\n if 'main' in tags:\n model.margin = (10, 15, 10, 10)\n\n def resolve_resources(self, cdn: bool | Literal['auto'] = 'auto') -> ResourcesType:\n resources = super().resolve_resources(cdn=cdn)\n del_theme = 'dark' if self._design.theme._name =='default' else 'light'\n del resources['css'][f'golden-theme-{del_theme}']\n return resources\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
def count_singlekey(inputDict, keyword):
# sample input
# inputDict = {
# abName1: { dna: 'atgc', protein: 'x' }
# abName2: { dna: 'ctga', protein: 'y' }
# }
countDict = {}
for abName, abInfo in inputDict.iteritems():
if countDict.has_key(abInfo[keyword]):
countDict[abInfo[keyword]][1] += 1
else:
countDict[abInfo[keyword]] = [abName, 1]
return countDict
def count_multikey(inputDict, keywords):
# sample input
# inputDict = {
# abName1: { dna: 'atgc', protein: 'x' }
# abName2: { dna: 'ctga', protein: 'y' }
# }
#keywords = list(keywords)
keywords.sort()
keywords = tuple(keywords)
countDict = {}
for abName, abInfo in inputDict.iteritems():
combinedKey = []
for k in keywords:
combinedKey.append(abInfo[k])
combinedKey = tuple(combinedKey)
if countDict.has_key(combinedKey):
countDict[combinedKey][1] += 1
else:
countDict[combinedKey] = [abName, 1]
return countDict
|
normal
|
{
"blob_id": "b164dc8183c0dc460aa20883553fc73acd1e45ec",
"index": 7828,
"step-1": "<mask token>\n",
"step-2": "def count_singlekey(inputDict, keyword):\n countDict = {}\n for abName, abInfo in inputDict.iteritems():\n if countDict.has_key(abInfo[keyword]):\n countDict[abInfo[keyword]][1] += 1\n else:\n countDict[abInfo[keyword]] = [abName, 1]\n return countDict\n\n\n<mask token>\n",
"step-3": "def count_singlekey(inputDict, keyword):\n countDict = {}\n for abName, abInfo in inputDict.iteritems():\n if countDict.has_key(abInfo[keyword]):\n countDict[abInfo[keyword]][1] += 1\n else:\n countDict[abInfo[keyword]] = [abName, 1]\n return countDict\n\n\ndef count_multikey(inputDict, keywords):\n keywords.sort()\n keywords = tuple(keywords)\n countDict = {}\n for abName, abInfo in inputDict.iteritems():\n combinedKey = []\n for k in keywords:\n combinedKey.append(abInfo[k])\n combinedKey = tuple(combinedKey)\n if countDict.has_key(combinedKey):\n countDict[combinedKey][1] += 1\n else:\n countDict[combinedKey] = [abName, 1]\n return countDict\n",
"step-4": "def count_singlekey(inputDict, keyword):\n # sample input\n # inputDict = {\n # abName1: { dna: 'atgc', protein: 'x' }\n # abName2: { dna: 'ctga', protein: 'y' }\n # }\n\n countDict = {}\n for abName, abInfo in inputDict.iteritems():\n if countDict.has_key(abInfo[keyword]):\n countDict[abInfo[keyword]][1] += 1\n else:\n countDict[abInfo[keyword]] = [abName, 1]\n return countDict\n\n\ndef count_multikey(inputDict, keywords):\n # sample input\n # inputDict = {\n # abName1: { dna: 'atgc', protein: 'x' }\n # abName2: { dna: 'ctga', protein: 'y' }\n # }\n #keywords = list(keywords)\n keywords.sort()\n keywords = tuple(keywords)\n countDict = {}\n for abName, abInfo in inputDict.iteritems():\n combinedKey = []\n for k in keywords:\n combinedKey.append(abInfo[k])\n combinedKey = tuple(combinedKey)\n if countDict.has_key(combinedKey):\n countDict[combinedKey][1] += 1\n else:\n countDict[combinedKey] = [abName, 1]\n return countDict\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TriggerPipelineReference(Model):
"""Pipeline that needs to be triggered with the given parameters.
:param pipeline_reference: Pipeline reference.
:type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference
:param parameters: Pipeline parameters.
:type parameters: dict[str, object]
"""
_attribute_map = {
'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'},
'parameters': {'key': 'parameters', 'type': '{object}'},
}
def __init__(self, pipeline_reference=None, parameters=None):
self.pipeline_reference = pipeline_reference
self.parameters = parameters
|
normal
|
{
"blob_id": "6a954197b13c9adf9f56b82bcea830aaf44e725f",
"index": 8999,
"step-1": "<mask token>\n\n\nclass TriggerPipelineReference(Model):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TriggerPipelineReference(Model):\n <mask token>\n _attribute_map = {'pipeline_reference': {'key': 'pipelineReference',\n 'type': 'PipelineReference'}, 'parameters': {'key': 'parameters',\n 'type': '{object}'}}\n\n def __init__(self, pipeline_reference=None, parameters=None):\n self.pipeline_reference = pipeline_reference\n self.parameters = parameters\n",
"step-3": "<mask token>\n\n\nclass TriggerPipelineReference(Model):\n \"\"\"Pipeline that needs to be triggered with the given parameters.\n\n :param pipeline_reference: Pipeline reference.\n :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference\n :param parameters: Pipeline parameters.\n :type parameters: dict[str, object]\n \"\"\"\n _attribute_map = {'pipeline_reference': {'key': 'pipelineReference',\n 'type': 'PipelineReference'}, 'parameters': {'key': 'parameters',\n 'type': '{object}'}}\n\n def __init__(self, pipeline_reference=None, parameters=None):\n self.pipeline_reference = pipeline_reference\n self.parameters = parameters\n",
"step-4": "from msrest.serialization import Model\n\n\nclass TriggerPipelineReference(Model):\n \"\"\"Pipeline that needs to be triggered with the given parameters.\n\n :param pipeline_reference: Pipeline reference.\n :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference\n :param parameters: Pipeline parameters.\n :type parameters: dict[str, object]\n \"\"\"\n _attribute_map = {'pipeline_reference': {'key': 'pipelineReference',\n 'type': 'PipelineReference'}, 'parameters': {'key': 'parameters',\n 'type': '{object}'}}\n\n def __init__(self, pipeline_reference=None, parameters=None):\n self.pipeline_reference = pipeline_reference\n self.parameters = parameters\n",
"step-5": "# coding=utf-8\n# --------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See License.txt in the project root for\n# license information.\n#\n# Code generated by Microsoft (R) AutoRest Code Generator.\n# Changes may cause incorrect behavior and will be lost if the code is\n# regenerated.\n# --------------------------------------------------------------------------\n\nfrom msrest.serialization import Model\n\n\nclass TriggerPipelineReference(Model):\n \"\"\"Pipeline that needs to be triggered with the given parameters.\n\n :param pipeline_reference: Pipeline reference.\n :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference\n :param parameters: Pipeline parameters.\n :type parameters: dict[str, object]\n \"\"\"\n\n _attribute_map = {\n 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'},\n 'parameters': {'key': 'parameters', 'type': '{object}'},\n }\n\n def __init__(self, pipeline_reference=None, parameters=None):\n self.pipeline_reference = pipeline_reference\n self.parameters = parameters\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-19 15:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='OpenHumansMember',
fields=[
('oh_id', models.CharField(max_length=16, primary_key=True, serialize=False, unique=True)),
('access_token', models.CharField(max_length=256)),
('refresh_token', models.CharField(max_length=256)),
('token_expires', models.DateTimeField()),
('seeq_id', models.IntegerField(null=True)),
],
),
]
|
normal
|
{
"blob_id": "28854823b1edc7df6cf025175811c1858efd2c42",
"index": 862,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='OpenHumansMember', fields=[(\n 'oh_id', models.CharField(max_length=16, primary_key=True,\n serialize=False, unique=True)), ('access_token', models.CharField(\n max_length=256)), ('refresh_token', models.CharField(max_length=256\n )), ('token_expires', models.DateTimeField()), ('seeq_id', models.\n IntegerField(null=True))])]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='OpenHumansMember', fields=[(\n 'oh_id', models.CharField(max_length=16, primary_key=True,\n serialize=False, unique=True)), ('access_token', models.CharField(\n max_length=256)), ('refresh_token', models.CharField(max_length=256\n )), ('token_expires', models.DateTimeField()), ('seeq_id', models.\n IntegerField(null=True))])]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.4 on 2016-12-19 15:25\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='OpenHumansMember',\n fields=[\n ('oh_id', models.CharField(max_length=16, primary_key=True, serialize=False, unique=True)),\n ('access_token', models.CharField(max_length=256)),\n ('refresh_token', models.CharField(max_length=256)),\n ('token_expires', models.DateTimeField()),\n ('seeq_id', models.IntegerField(null=True)),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
This file contains the ScoreLoop which is used to show
the user thw at most 10 highest scores made by the player
"""
import pygame
from score_fetcher import fetch_scores
from entities.sprite_text import TextSprite
class ScoreLoop:
def __init__(self):
self.scores = fetch_scores()
self.sprites = pygame.sprite.Group()
self.get_score_sprites()
self.space_cooldown = True
def get_score_sprites(self):
rank = 1
for score in self.scores:
self.sprites.add(
TextSprite(str(score), 256, 100+50*rank, True)
)
rank += 1
def increment(self):
keys = pygame.key.get_pressed()
if keys[pygame.K_SPACE]:
if self.space_cooldown:
return None
return "startloop"
self.space_cooldown = False
return None
def get_sprites(self):
"""retruns sprites for the UI"""
return self.sprites
if __name__ == "__main__":
pass
|
normal
|
{
"blob_id": "047b3398a73c9e7d75d43eeeab85f52c05ff90c3",
"index": 4534,
"step-1": "<mask token>\n\n\nclass ScoreLoop:\n\n def __init__(self):\n self.scores = fetch_scores()\n self.sprites = pygame.sprite.Group()\n self.get_score_sprites()\n self.space_cooldown = True\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ScoreLoop:\n\n def __init__(self):\n self.scores = fetch_scores()\n self.sprites = pygame.sprite.Group()\n self.get_score_sprites()\n self.space_cooldown = True\n\n def get_score_sprites(self):\n rank = 1\n for score in self.scores:\n self.sprites.add(TextSprite(str(score), 256, 100 + 50 * rank, True)\n )\n rank += 1\n\n def increment(self):\n keys = pygame.key.get_pressed()\n if keys[pygame.K_SPACE]:\n if self.space_cooldown:\n return None\n return 'startloop'\n self.space_cooldown = False\n return None\n\n def get_sprites(self):\n \"\"\"retruns sprites for the UI\"\"\"\n return self.sprites\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ScoreLoop:\n\n def __init__(self):\n self.scores = fetch_scores()\n self.sprites = pygame.sprite.Group()\n self.get_score_sprites()\n self.space_cooldown = True\n\n def get_score_sprites(self):\n rank = 1\n for score in self.scores:\n self.sprites.add(TextSprite(str(score), 256, 100 + 50 * rank, True)\n )\n rank += 1\n\n def increment(self):\n keys = pygame.key.get_pressed()\n if keys[pygame.K_SPACE]:\n if self.space_cooldown:\n return None\n return 'startloop'\n self.space_cooldown = False\n return None\n\n def get_sprites(self):\n \"\"\"retruns sprites for the UI\"\"\"\n return self.sprites\n\n\nif __name__ == '__main__':\n pass\n",
"step-4": "<mask token>\nimport pygame\nfrom score_fetcher import fetch_scores\nfrom entities.sprite_text import TextSprite\n\n\nclass ScoreLoop:\n\n def __init__(self):\n self.scores = fetch_scores()\n self.sprites = pygame.sprite.Group()\n self.get_score_sprites()\n self.space_cooldown = True\n\n def get_score_sprites(self):\n rank = 1\n for score in self.scores:\n self.sprites.add(TextSprite(str(score), 256, 100 + 50 * rank, True)\n )\n rank += 1\n\n def increment(self):\n keys = pygame.key.get_pressed()\n if keys[pygame.K_SPACE]:\n if self.space_cooldown:\n return None\n return 'startloop'\n self.space_cooldown = False\n return None\n\n def get_sprites(self):\n \"\"\"retruns sprites for the UI\"\"\"\n return self.sprites\n\n\nif __name__ == '__main__':\n pass\n",
"step-5": "\"\"\"\nThis file contains the ScoreLoop which is used to show\nthe user thw at most 10 highest scores made by the player\n\"\"\"\nimport pygame\nfrom score_fetcher import fetch_scores\nfrom entities.sprite_text import TextSprite\n\n\nclass ScoreLoop:\n\n def __init__(self):\n\n self.scores = fetch_scores()\n self.sprites = pygame.sprite.Group()\n self.get_score_sprites()\n\n self.space_cooldown = True\n\n def get_score_sprites(self):\n\n rank = 1\n\n for score in self.scores:\n self.sprites.add(\n TextSprite(str(score), 256, 100+50*rank, True)\n )\n rank += 1\n\n def increment(self):\n\n keys = pygame.key.get_pressed()\n\n if keys[pygame.K_SPACE]:\n if self.space_cooldown:\n return None\n return \"startloop\"\n self.space_cooldown = False\n return None\n\n def get_sprites(self):\n \"\"\"retruns sprites for the UI\"\"\"\n return self.sprites\n\n\nif __name__ == \"__main__\":\n\n pass\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
decoded = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "=", "."]
encoded = ["49", "48", "4B", "4A", "4D", "4C", "4F", "4E", "41", "40", "38", "3B", "3A", "3D", "3C", "3F", "3E", "31", "30", "33", "32", "35", "34", "37", "36", "29", "28", "2B", "2A", "2D", "2C", "2F", "2E", "21", "20", "23", "18", "1B", "1A", "1D", "1C", "1F", "1E", "11", "10", "13", "12", "15", "14", "17", "16", "09", "08", "0B", "0A", "0D", "0C", "0F", "0E", "01", "00", "03", "44", "57"]
def decode(value) :
out_value = ""
char = [value[i:i+2] for i in range(0, len(value), 2)]
for i in range(0, len(char)) :
out_value += decoded[encoded.index(char[i])]
return out_value
def encode(char) :
out_value = ""
char = [value[i:i+1] for i in range(0, len(value))]
for i in range(0, len(char)) :
out_value += encoded[decoded.index(char[i])]
return out_value
if __name__ == "__main__" :
print("By default the program will open UserCustom.ini which should be in the directory as the program.")
user_input = str(input("Would you like to encode or decode UserCustom.ini ? (encode/decode) "))
const = "+CVars="
config = open("UserCustom.ini" , "r")
out_file = open("UserCustom.ini.out", "w")
out_value = ""
lines = config.readlines()
for i in range(0, len(lines)) :
if lines[i].startswith(const) :
value = lines[i].split(const)[-1].split("\n")[0]
if user_input.lower() == "encode" or user_input.lower() == "e" :
out_value = encode(value)
elif user_input.lower() == "decode" or user_input.lower() == "d" :
out_value = decode(value)
out_file.write(const + out_value + "\n")
else :
out_file.write(lines[i])
out_file.close()
config.close()
pass
|
normal
|
{
"blob_id": "23236cd8262eb414666db88215c01d973abf1d97",
"index": 1247,
"step-1": "<mask token>\n\n\ndef decode(value):\n out_value = ''\n char = [value[i:i + 2] for i in range(0, len(value), 2)]\n for i in range(0, len(char)):\n out_value += decoded[encoded.index(char[i])]\n return out_value\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef decode(value):\n out_value = ''\n char = [value[i:i + 2] for i in range(0, len(value), 2)]\n for i in range(0, len(char)):\n out_value += decoded[encoded.index(char[i])]\n return out_value\n\n\ndef encode(char):\n out_value = ''\n char = [value[i:i + 1] for i in range(0, len(value))]\n for i in range(0, len(char)):\n out_value += encoded[decoded.index(char[i])]\n return out_value\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef decode(value):\n out_value = ''\n char = [value[i:i + 2] for i in range(0, len(value), 2)]\n for i in range(0, len(char)):\n out_value += decoded[encoded.index(char[i])]\n return out_value\n\n\ndef encode(char):\n out_value = ''\n char = [value[i:i + 1] for i in range(0, len(value))]\n for i in range(0, len(char)):\n out_value += encoded[decoded.index(char[i])]\n return out_value\n\n\nif __name__ == '__main__':\n print(\n 'By default the program will open UserCustom.ini which should be in the directory as the program.'\n )\n user_input = str(input(\n 'Would you like to encode or decode UserCustom.ini ? (encode/decode) ')\n )\n const = '+CVars='\n config = open('UserCustom.ini', 'r')\n out_file = open('UserCustom.ini.out', 'w')\n out_value = ''\n lines = config.readlines()\n for i in range(0, len(lines)):\n if lines[i].startswith(const):\n value = lines[i].split(const)[-1].split('\\n')[0]\n if user_input.lower() == 'encode' or user_input.lower() == 'e':\n out_value = encode(value)\n elif user_input.lower() == 'decode' or user_input.lower() == 'd':\n out_value = decode(value)\n out_file.write(const + out_value + '\\n')\n else:\n out_file.write(lines[i])\n out_file.close()\n config.close()\n pass\n",
"step-4": "decoded = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C',\n 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q',\n 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e',\n 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',\n 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', '.']\nencoded = ['49', '48', '4B', '4A', '4D', '4C', '4F', '4E', '41', '40', '38',\n '3B', '3A', '3D', '3C', '3F', '3E', '31', '30', '33', '32', '35', '34',\n '37', '36', '29', '28', '2B', '2A', '2D', '2C', '2F', '2E', '21', '20',\n '23', '18', '1B', '1A', '1D', '1C', '1F', '1E', '11', '10', '13', '12',\n '15', '14', '17', '16', '09', '08', '0B', '0A', '0D', '0C', '0F', '0E',\n '01', '00', '03', '44', '57']\n\n\ndef decode(value):\n out_value = ''\n char = [value[i:i + 2] for i in range(0, len(value), 2)]\n for i in range(0, len(char)):\n out_value += decoded[encoded.index(char[i])]\n return out_value\n\n\ndef encode(char):\n out_value = ''\n char = [value[i:i + 1] for i in range(0, len(value))]\n for i in range(0, len(char)):\n out_value += encoded[decoded.index(char[i])]\n return out_value\n\n\nif __name__ == '__main__':\n print(\n 'By default the program will open UserCustom.ini which should be in the directory as the program.'\n )\n user_input = str(input(\n 'Would you like to encode or decode UserCustom.ini ? (encode/decode) ')\n )\n const = '+CVars='\n config = open('UserCustom.ini', 'r')\n out_file = open('UserCustom.ini.out', 'w')\n out_value = ''\n lines = config.readlines()\n for i in range(0, len(lines)):\n if lines[i].startswith(const):\n value = lines[i].split(const)[-1].split('\\n')[0]\n if user_input.lower() == 'encode' or user_input.lower() == 'e':\n out_value = encode(value)\n elif user_input.lower() == 'decode' or user_input.lower() == 'd':\n out_value = decode(value)\n out_file.write(const + out_value + '\\n')\n else:\n out_file.write(lines[i])\n out_file.close()\n config.close()\n pass\n",
"step-5": "decoded = [\"0\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\", \"I\", \"J\", \"K\", \"L\", \"M\", \"N\", \"O\", \"P\", \"Q\", \"R\", \"S\", \"T\", \"U\", \"V\", \"W\", \"X\", \"Y\", \"Z\", \"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\", \"i\", \"j\", \"k\", \"l\", \"m\", \"n\", \"o\", \"p\", \"q\", \"r\", \"s\", \"t\", \"u\", \"v\", \"w\", \"x\", \"y\", \"z\", \"=\", \".\"]\nencoded = [\"49\", \"48\", \"4B\", \"4A\", \"4D\", \"4C\", \"4F\", \"4E\", \"41\", \"40\", \"38\", \"3B\", \"3A\", \"3D\", \"3C\", \"3F\", \"3E\", \"31\", \"30\", \"33\", \"32\", \"35\", \"34\", \"37\", \"36\", \"29\", \"28\", \"2B\", \"2A\", \"2D\", \"2C\", \"2F\", \"2E\", \"21\", \"20\", \"23\", \"18\", \"1B\", \"1A\", \"1D\", \"1C\", \"1F\", \"1E\", \"11\", \"10\", \"13\", \"12\", \"15\", \"14\", \"17\", \"16\", \"09\", \"08\", \"0B\", \"0A\", \"0D\", \"0C\", \"0F\", \"0E\", \"01\", \"00\", \"03\", \"44\", \"57\"]\n\ndef decode(value) : \n out_value = \"\"\n char = [value[i:i+2] for i in range(0, len(value), 2)]\n for i in range(0, len(char)) :\n out_value += decoded[encoded.index(char[i])]\n return out_value\n\ndef encode(char) : \n out_value = \"\"\n char = [value[i:i+1] for i in range(0, len(value))]\n for i in range(0, len(char)) :\n out_value += encoded[decoded.index(char[i])]\n return out_value\n\nif __name__ == \"__main__\" :\n print(\"By default the program will open UserCustom.ini which should be in the directory as the program.\")\n user_input = str(input(\"Would you like to encode or decode UserCustom.ini ? (encode/decode) \"))\n const = \"+CVars=\"\n config = open(\"UserCustom.ini\" , \"r\")\n out_file = open(\"UserCustom.ini.out\", \"w\")\n out_value = \"\"\n lines = config.readlines()\n for i in range(0, len(lines)) :\n if lines[i].startswith(const) :\n value = lines[i].split(const)[-1].split(\"\\n\")[0]\n if user_input.lower() == \"encode\" or user_input.lower() == \"e\" :\n out_value = encode(value)\n elif user_input.lower() == \"decode\" or user_input.lower() == \"d\" :\n out_value = decode(value)\n out_file.write(const + out_value + \"\\n\")\n else : \n out_file.write(lines[i]) \n out_file.close()\n config.close()\n pass",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import requests
url = 'https://item.jd.com/100008348550.html'
try:
r = requests.get(url)
r.raise_for_status()
print(r.encoding)
r.encoding = r.apparent_encoding
print(r.text[:1000])
print(r.apparent_encoding)
except:
print('error')
|
normal
|
{
"blob_id": "0271c45a21047b948946dd76f147692bb16b8bcf",
"index": 5378,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n r = requests.get(url)\n r.raise_for_status()\n print(r.encoding)\n r.encoding = r.apparent_encoding\n print(r.text[:1000])\n print(r.apparent_encoding)\nexcept:\n print('error')\n",
"step-3": "<mask token>\nurl = 'https://item.jd.com/100008348550.html'\ntry:\n r = requests.get(url)\n r.raise_for_status()\n print(r.encoding)\n r.encoding = r.apparent_encoding\n print(r.text[:1000])\n print(r.apparent_encoding)\nexcept:\n print('error')\n",
"step-4": "import requests\nurl = 'https://item.jd.com/100008348550.html'\ntry:\n r = requests.get(url)\n r.raise_for_status()\n print(r.encoding)\n r.encoding = r.apparent_encoding\n print(r.text[:1000])\n print(r.apparent_encoding)\nexcept:\n print('error')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
list_angle_list = RmList()
variable_flag = 0
variable_i = 0
def user_defined_shoot():
global variable_flag
global variable_i
global list_angle_list
variable_i = 1
for count in range(3):
gimbal_ctrl.angle_ctrl(list_angle_list[1], list_angle_list[2])
gun_ctrl.fire_once()
variable_i = variable_i + 2
time.sleep(0.2)
def user_defined_storage_angle():
global variable_flag
global variable_i
global list_angle_list
led_ctrl.gun_led_on()
list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.
gimbal_axis_yaw))
list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.
gimbal_axis_pitch))
time.sleep(5)
led_ctrl.gun_led_off()
def start():
global variable_flag
global variable_i
global list_angle_list
robot_ctrl.set_mode(rm_define.robot_mode_free)
gimbal_ctrl.set_rotate_speed(180)
vision_ctrl.enable_detection(rm_define.vision_detection_marker)
vision_ctrl.detect_marker_and_aim(rm_define.marker_trans_red_heart)
time.sleep(5)
user_defined_storage_angle()
vision_ctrl.detect_marker_and_aim(rm_define.marker_number_three)
time.sleep(3)
user_defined_storage_angle()
user_defined_shoot()
|
normal
|
{
"blob_id": "012e4112970a07559f27fa2127cdffcc557a1566",
"index": 4638,
"step-1": "<mask token>\n\n\ndef user_defined_shoot():\n global variable_flag\n global variable_i\n global list_angle_list\n variable_i = 1\n for count in range(3):\n gimbal_ctrl.angle_ctrl(list_angle_list[1], list_angle_list[2])\n gun_ctrl.fire_once()\n variable_i = variable_i + 2\n time.sleep(0.2)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef user_defined_shoot():\n global variable_flag\n global variable_i\n global list_angle_list\n variable_i = 1\n for count in range(3):\n gimbal_ctrl.angle_ctrl(list_angle_list[1], list_angle_list[2])\n gun_ctrl.fire_once()\n variable_i = variable_i + 2\n time.sleep(0.2)\n\n\ndef user_defined_storage_angle():\n global variable_flag\n global variable_i\n global list_angle_list\n led_ctrl.gun_led_on()\n list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.\n gimbal_axis_yaw))\n list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.\n gimbal_axis_pitch))\n time.sleep(5)\n led_ctrl.gun_led_off()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef user_defined_shoot():\n global variable_flag\n global variable_i\n global list_angle_list\n variable_i = 1\n for count in range(3):\n gimbal_ctrl.angle_ctrl(list_angle_list[1], list_angle_list[2])\n gun_ctrl.fire_once()\n variable_i = variable_i + 2\n time.sleep(0.2)\n\n\ndef user_defined_storage_angle():\n global variable_flag\n global variable_i\n global list_angle_list\n led_ctrl.gun_led_on()\n list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.\n gimbal_axis_yaw))\n list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.\n gimbal_axis_pitch))\n time.sleep(5)\n led_ctrl.gun_led_off()\n\n\ndef start():\n global variable_flag\n global variable_i\n global list_angle_list\n robot_ctrl.set_mode(rm_define.robot_mode_free)\n gimbal_ctrl.set_rotate_speed(180)\n vision_ctrl.enable_detection(rm_define.vision_detection_marker)\n vision_ctrl.detect_marker_and_aim(rm_define.marker_trans_red_heart)\n time.sleep(5)\n user_defined_storage_angle()\n vision_ctrl.detect_marker_and_aim(rm_define.marker_number_three)\n time.sleep(3)\n user_defined_storage_angle()\n user_defined_shoot()\n",
"step-4": "list_angle_list = RmList()\nvariable_flag = 0\nvariable_i = 0\n\n\ndef user_defined_shoot():\n global variable_flag\n global variable_i\n global list_angle_list\n variable_i = 1\n for count in range(3):\n gimbal_ctrl.angle_ctrl(list_angle_list[1], list_angle_list[2])\n gun_ctrl.fire_once()\n variable_i = variable_i + 2\n time.sleep(0.2)\n\n\ndef user_defined_storage_angle():\n global variable_flag\n global variable_i\n global list_angle_list\n led_ctrl.gun_led_on()\n list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.\n gimbal_axis_yaw))\n list_angle_list.append(gimbal_ctrl.get_axis_angle(rm_define.\n gimbal_axis_pitch))\n time.sleep(5)\n led_ctrl.gun_led_off()\n\n\ndef start():\n global variable_flag\n global variable_i\n global list_angle_list\n robot_ctrl.set_mode(rm_define.robot_mode_free)\n gimbal_ctrl.set_rotate_speed(180)\n vision_ctrl.enable_detection(rm_define.vision_detection_marker)\n vision_ctrl.detect_marker_and_aim(rm_define.marker_trans_red_heart)\n time.sleep(5)\n user_defined_storage_angle()\n vision_ctrl.detect_marker_and_aim(rm_define.marker_number_three)\n time.sleep(3)\n user_defined_storage_angle()\n user_defined_shoot()\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
# coding: utf8
from __future__ import absolute_import
import numpy as np
def arr2str(arr, sep=", ", fmt="{}"):
"""
Make a string from a list seperated by ``sep`` and each item formatted
with ``fmt``.
"""
return sep.join([fmt.format(v) for v in arr])
def indent_wrap(s, indent=0, wrap=80):
"""
Wraps and indents a string ``s``.
Parameters
----------
s : str
The string to wrap.
indent : int
How far to indent each new line.
wrape : int
Number of character after which to wrap the string.
Returns
-------
s : str
Indented and wrapped string, each line has length ``wrap``, except the
last one, which may have less than ``wrap`` characters.
Example
-------
>>> s = 2 * "abcdefghijklmnopqrstuvwxyz"
>>> indent_wrap(s, indent=0, wrap=26)
'abcdefghijklmnopqrstuvwxyz\nabcdefghijklmnopqrstuvwxyz'
>>> indent_wrap(s, indent=2, wrap=26)
' abcdefghijklmnopqrstuvwx\n yzabcdefghijklmnopqrstuv\n wxyz'
"""
split = wrap - indent
chunks = [indent * " " + s[i:i + split] for i in range(0, len(s), split)]
return "\n".join(chunks)
def serialize_ndarrays(d):
"""
Recursively traverse through iterable object ``d`` and convert all occuring
ndarrays to lists to make it JSON serializable.
Note: Works for 1D dicts with ndarrays at first level. Certainly not tested
and meant to work for all use cases.
Made with code from: http://code.activestate.com/recipes/577504/
Parameters
----------
d : iterable
Can be dict, list, set, tuple or frozenset.
Returns
-------
d : iterable
Same as input, but all ndarrays replaced by lists.
"""
def dict_handler(d):
return d.items()
handlers = {list: enumerate, tuple: enumerate,
set: enumerate, frozenset: enumerate,
dict: dict_handler}
def serialize(o):
for typ, handler in handlers.items():
if isinstance(o, typ):
for key, val in handler(o):
if isinstance(val, np.ndarray):
o[key] = val.tolist()
else:
o[key] = serialize_ndarrays(o[key])
return o
return serialize(d)
def fill_dict_defaults(d, required_keys=None, opt_keys=None, noleft=True):
"""
Populate dictionary with data from a given dict ``d``, and check if ``d``
has required and optional keys. Set optionals with default if not present.
If input ``d`` is None and ``required_keys`` is empty, just return
``opt_keys``.
Parameters
----------
d : dict or None
Input dictionary containing the data to be checked. If is ``None``, then
a copy of ``opt_keys`` is returned. If ``opt_keys`` is ``None``, a
``TypeError`` is raised. If ``d``is ``None`` and ``required_keys`` is
not, then a ``ValueError`` israised.
required_keys : list or None, optional
Keys that must be present and set in ``d``. (default: None)
opt_keys : dict or None, optional
Keys that are optional. ``opt_keys`` provides optional keys and default
values ``d`` is filled with if not present in ``d``. (default: None)
noleft : bool, optional
If True, raises a ``KeyError``, when ``d`` contains etxra keys, other
than those given in ``required_keys`` and ``opt_keys``. (default: True)
Returns
-------
out : dict
Contains all required and optional keys, using default values, where
optional keys were missing. If ``d`` was None, a copy of ``opt_keys`` is
returned, if ``opt_keys`` was not ``None``.
"""
if required_keys is None:
required_keys = []
if opt_keys is None:
opt_keys = {}
if d is None:
if not required_keys:
if opt_keys is None:
raise TypeError("`d` and òpt_keys` are both None.")
return opt_keys.copy()
else:
raise ValueError("`d` is None, but `required_keys` is not empty.")
d = d.copy()
out = {}
# Set required keys
for key in required_keys:
if key in d:
out[key] = d.pop(key)
else:
raise KeyError("Dict is missing required key '{}'.".format(key))
# Set optional values, if key not given
for key, val in opt_keys.items():
out[key] = d.pop(key, val)
# Complain when extra keys are left and noleft is True
if d and noleft:
raise KeyError("Leftover keys ['{}'].".format(
"', '".join(list(d.keys()))))
return out
|
normal
|
{
"blob_id": "3b4799f43ec497978bea3ac7ecf8c6aaeb2180b4",
"index": 3867,
"step-1": "<mask token>\n\n\ndef indent_wrap(s, indent=0, wrap=80):\n \"\"\"\n Wraps and indents a string ``s``.\n\n Parameters\n ----------\n s : str\n The string to wrap.\n indent : int\n How far to indent each new line.\n wrape : int\n Number of character after which to wrap the string.\n\n Returns\n -------\n s : str\n Indented and wrapped string, each line has length ``wrap``, except the\n last one, which may have less than ``wrap`` characters.\n\n Example\n -------\n >>> s = 2 * \"abcdefghijklmnopqrstuvwxyz\"\n >>> indent_wrap(s, indent=0, wrap=26)\n 'abcdefghijklmnopqrstuvwxyz\nabcdefghijklmnopqrstuvwxyz'\n >>> indent_wrap(s, indent=2, wrap=26)\n ' abcdefghijklmnopqrstuvwx\n yzabcdefghijklmnopqrstuv\n wxyz'\n \"\"\"\n split = wrap - indent\n chunks = [(indent * ' ' + s[i:i + split]) for i in range(0, len(s), split)]\n return '\\n'.join(chunks)\n\n\ndef serialize_ndarrays(d):\n \"\"\"\n Recursively traverse through iterable object ``d`` and convert all occuring\n ndarrays to lists to make it JSON serializable.\n\n Note: Works for 1D dicts with ndarrays at first level. Certainly not tested\n and meant to work for all use cases.\n Made with code from: http://code.activestate.com/recipes/577504/\n\n Parameters\n ----------\n d : iterable\n Can be dict, list, set, tuple or frozenset.\n\n Returns\n -------\n d : iterable\n Same as input, but all ndarrays replaced by lists.\n \"\"\"\n\n def dict_handler(d):\n return d.items()\n handlers = {list: enumerate, tuple: enumerate, set: enumerate,\n frozenset: enumerate, dict: dict_handler}\n\n def serialize(o):\n for typ, handler in handlers.items():\n if isinstance(o, typ):\n for key, val in handler(o):\n if isinstance(val, np.ndarray):\n o[key] = val.tolist()\n else:\n o[key] = serialize_ndarrays(o[key])\n return o\n return serialize(d)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef arr2str(arr, sep=', ', fmt='{}'):\n \"\"\"\n Make a string from a list seperated by ``sep`` and each item formatted\n with ``fmt``.\n \"\"\"\n return sep.join([fmt.format(v) for v in arr])\n\n\ndef indent_wrap(s, indent=0, wrap=80):\n \"\"\"\n Wraps and indents a string ``s``.\n\n Parameters\n ----------\n s : str\n The string to wrap.\n indent : int\n How far to indent each new line.\n wrape : int\n Number of character after which to wrap the string.\n\n Returns\n -------\n s : str\n Indented and wrapped string, each line has length ``wrap``, except the\n last one, which may have less than ``wrap`` characters.\n\n Example\n -------\n >>> s = 2 * \"abcdefghijklmnopqrstuvwxyz\"\n >>> indent_wrap(s, indent=0, wrap=26)\n 'abcdefghijklmnopqrstuvwxyz\nabcdefghijklmnopqrstuvwxyz'\n >>> indent_wrap(s, indent=2, wrap=26)\n ' abcdefghijklmnopqrstuvwx\n yzabcdefghijklmnopqrstuv\n wxyz'\n \"\"\"\n split = wrap - indent\n chunks = [(indent * ' ' + s[i:i + split]) for i in range(0, len(s), split)]\n return '\\n'.join(chunks)\n\n\ndef serialize_ndarrays(d):\n \"\"\"\n Recursively traverse through iterable object ``d`` and convert all occuring\n ndarrays to lists to make it JSON serializable.\n\n Note: Works for 1D dicts with ndarrays at first level. Certainly not tested\n and meant to work for all use cases.\n Made with code from: http://code.activestate.com/recipes/577504/\n\n Parameters\n ----------\n d : iterable\n Can be dict, list, set, tuple or frozenset.\n\n Returns\n -------\n d : iterable\n Same as input, but all ndarrays replaced by lists.\n \"\"\"\n\n def dict_handler(d):\n return d.items()\n handlers = {list: enumerate, tuple: enumerate, set: enumerate,\n frozenset: enumerate, dict: dict_handler}\n\n def serialize(o):\n for typ, handler in handlers.items():\n if isinstance(o, typ):\n for key, val in handler(o):\n if isinstance(val, np.ndarray):\n o[key] = val.tolist()\n else:\n o[key] = serialize_ndarrays(o[key])\n return o\n return serialize(d)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef arr2str(arr, sep=', ', fmt='{}'):\n \"\"\"\n Make a string from a list seperated by ``sep`` and each item formatted\n with ``fmt``.\n \"\"\"\n return sep.join([fmt.format(v) for v in arr])\n\n\ndef indent_wrap(s, indent=0, wrap=80):\n \"\"\"\n Wraps and indents a string ``s``.\n\n Parameters\n ----------\n s : str\n The string to wrap.\n indent : int\n How far to indent each new line.\n wrape : int\n Number of character after which to wrap the string.\n\n Returns\n -------\n s : str\n Indented and wrapped string, each line has length ``wrap``, except the\n last one, which may have less than ``wrap`` characters.\n\n Example\n -------\n >>> s = 2 * \"abcdefghijklmnopqrstuvwxyz\"\n >>> indent_wrap(s, indent=0, wrap=26)\n 'abcdefghijklmnopqrstuvwxyz\nabcdefghijklmnopqrstuvwxyz'\n >>> indent_wrap(s, indent=2, wrap=26)\n ' abcdefghijklmnopqrstuvwx\n yzabcdefghijklmnopqrstuv\n wxyz'\n \"\"\"\n split = wrap - indent\n chunks = [(indent * ' ' + s[i:i + split]) for i in range(0, len(s), split)]\n return '\\n'.join(chunks)\n\n\ndef serialize_ndarrays(d):\n \"\"\"\n Recursively traverse through iterable object ``d`` and convert all occuring\n ndarrays to lists to make it JSON serializable.\n\n Note: Works for 1D dicts with ndarrays at first level. Certainly not tested\n and meant to work for all use cases.\n Made with code from: http://code.activestate.com/recipes/577504/\n\n Parameters\n ----------\n d : iterable\n Can be dict, list, set, tuple or frozenset.\n\n Returns\n -------\n d : iterable\n Same as input, but all ndarrays replaced by lists.\n \"\"\"\n\n def dict_handler(d):\n return d.items()\n handlers = {list: enumerate, tuple: enumerate, set: enumerate,\n frozenset: enumerate, dict: dict_handler}\n\n def serialize(o):\n for typ, handler in handlers.items():\n if isinstance(o, typ):\n for key, val in handler(o):\n if isinstance(val, np.ndarray):\n o[key] = val.tolist()\n else:\n o[key] = serialize_ndarrays(o[key])\n return o\n return serialize(d)\n\n\ndef fill_dict_defaults(d, required_keys=None, opt_keys=None, noleft=True):\n \"\"\"\n Populate dictionary with data from a given dict ``d``, and check if ``d``\n has required and optional keys. Set optionals with default if not present.\n\n If input ``d`` is None and ``required_keys`` is empty, just return\n ``opt_keys``.\n\n Parameters\n ----------\n d : dict or None\n Input dictionary containing the data to be checked. If is ``None``, then\n a copy of ``opt_keys`` is returned. If ``opt_keys`` is ``None``, a\n ``TypeError`` is raised. If ``d``is ``None`` and ``required_keys`` is\n not, then a ``ValueError`` israised.\n required_keys : list or None, optional\n Keys that must be present and set in ``d``. (default: None)\n opt_keys : dict or None, optional\n Keys that are optional. ``opt_keys`` provides optional keys and default\n values ``d`` is filled with if not present in ``d``. (default: None)\n noleft : bool, optional\n If True, raises a ``KeyError``, when ``d`` contains etxra keys, other\n than those given in ``required_keys`` and ``opt_keys``. (default: True)\n\n Returns\n -------\n out : dict\n Contains all required and optional keys, using default values, where\n optional keys were missing. If ``d`` was None, a copy of ``opt_keys`` is\n returned, if ``opt_keys`` was not ``None``.\n \"\"\"\n if required_keys is None:\n required_keys = []\n if opt_keys is None:\n opt_keys = {}\n if d is None:\n if not required_keys:\n if opt_keys is None:\n raise TypeError('`d` and òpt_keys` are both None.')\n return opt_keys.copy()\n else:\n raise ValueError('`d` is None, but `required_keys` is not empty.')\n d = d.copy()\n out = {}\n for key in required_keys:\n if key in d:\n out[key] = d.pop(key)\n else:\n raise KeyError(\"Dict is missing required key '{}'.\".format(key))\n for key, val in opt_keys.items():\n out[key] = d.pop(key, val)\n if d and noleft:\n raise KeyError(\"Leftover keys ['{}'].\".format(\"', '\".join(list(d.\n keys()))))\n return out\n",
"step-4": "from __future__ import absolute_import\nimport numpy as np\n\n\ndef arr2str(arr, sep=', ', fmt='{}'):\n \"\"\"\n Make a string from a list seperated by ``sep`` and each item formatted\n with ``fmt``.\n \"\"\"\n return sep.join([fmt.format(v) for v in arr])\n\n\ndef indent_wrap(s, indent=0, wrap=80):\n \"\"\"\n Wraps and indents a string ``s``.\n\n Parameters\n ----------\n s : str\n The string to wrap.\n indent : int\n How far to indent each new line.\n wrape : int\n Number of character after which to wrap the string.\n\n Returns\n -------\n s : str\n Indented and wrapped string, each line has length ``wrap``, except the\n last one, which may have less than ``wrap`` characters.\n\n Example\n -------\n >>> s = 2 * \"abcdefghijklmnopqrstuvwxyz\"\n >>> indent_wrap(s, indent=0, wrap=26)\n 'abcdefghijklmnopqrstuvwxyz\nabcdefghijklmnopqrstuvwxyz'\n >>> indent_wrap(s, indent=2, wrap=26)\n ' abcdefghijklmnopqrstuvwx\n yzabcdefghijklmnopqrstuv\n wxyz'\n \"\"\"\n split = wrap - indent\n chunks = [(indent * ' ' + s[i:i + split]) for i in range(0, len(s), split)]\n return '\\n'.join(chunks)\n\n\ndef serialize_ndarrays(d):\n \"\"\"\n Recursively traverse through iterable object ``d`` and convert all occuring\n ndarrays to lists to make it JSON serializable.\n\n Note: Works for 1D dicts with ndarrays at first level. Certainly not tested\n and meant to work for all use cases.\n Made with code from: http://code.activestate.com/recipes/577504/\n\n Parameters\n ----------\n d : iterable\n Can be dict, list, set, tuple or frozenset.\n\n Returns\n -------\n d : iterable\n Same as input, but all ndarrays replaced by lists.\n \"\"\"\n\n def dict_handler(d):\n return d.items()\n handlers = {list: enumerate, tuple: enumerate, set: enumerate,\n frozenset: enumerate, dict: dict_handler}\n\n def serialize(o):\n for typ, handler in handlers.items():\n if isinstance(o, typ):\n for key, val in handler(o):\n if isinstance(val, np.ndarray):\n o[key] = val.tolist()\n else:\n o[key] = serialize_ndarrays(o[key])\n return o\n return serialize(d)\n\n\ndef fill_dict_defaults(d, required_keys=None, opt_keys=None, noleft=True):\n \"\"\"\n Populate dictionary with data from a given dict ``d``, and check if ``d``\n has required and optional keys. Set optionals with default if not present.\n\n If input ``d`` is None and ``required_keys`` is empty, just return\n ``opt_keys``.\n\n Parameters\n ----------\n d : dict or None\n Input dictionary containing the data to be checked. If is ``None``, then\n a copy of ``opt_keys`` is returned. If ``opt_keys`` is ``None``, a\n ``TypeError`` is raised. If ``d``is ``None`` and ``required_keys`` is\n not, then a ``ValueError`` israised.\n required_keys : list or None, optional\n Keys that must be present and set in ``d``. (default: None)\n opt_keys : dict or None, optional\n Keys that are optional. ``opt_keys`` provides optional keys and default\n values ``d`` is filled with if not present in ``d``. (default: None)\n noleft : bool, optional\n If True, raises a ``KeyError``, when ``d`` contains etxra keys, other\n than those given in ``required_keys`` and ``opt_keys``. (default: True)\n\n Returns\n -------\n out : dict\n Contains all required and optional keys, using default values, where\n optional keys were missing. If ``d`` was None, a copy of ``opt_keys`` is\n returned, if ``opt_keys`` was not ``None``.\n \"\"\"\n if required_keys is None:\n required_keys = []\n if opt_keys is None:\n opt_keys = {}\n if d is None:\n if not required_keys:\n if opt_keys is None:\n raise TypeError('`d` and òpt_keys` are both None.')\n return opt_keys.copy()\n else:\n raise ValueError('`d` is None, but `required_keys` is not empty.')\n d = d.copy()\n out = {}\n for key in required_keys:\n if key in d:\n out[key] = d.pop(key)\n else:\n raise KeyError(\"Dict is missing required key '{}'.\".format(key))\n for key, val in opt_keys.items():\n out[key] = d.pop(key, val)\n if d and noleft:\n raise KeyError(\"Leftover keys ['{}'].\".format(\"', '\".join(list(d.\n keys()))))\n return out\n",
"step-5": "# coding: utf8\n\nfrom __future__ import absolute_import\n\nimport numpy as np\n\n\ndef arr2str(arr, sep=\", \", fmt=\"{}\"):\n \"\"\"\n Make a string from a list seperated by ``sep`` and each item formatted\n with ``fmt``.\n \"\"\"\n return sep.join([fmt.format(v) for v in arr])\n\n\ndef indent_wrap(s, indent=0, wrap=80):\n \"\"\"\n Wraps and indents a string ``s``.\n\n Parameters\n ----------\n s : str\n The string to wrap.\n indent : int\n How far to indent each new line.\n wrape : int\n Number of character after which to wrap the string.\n\n Returns\n -------\n s : str\n Indented and wrapped string, each line has length ``wrap``, except the\n last one, which may have less than ``wrap`` characters.\n\n Example\n -------\n >>> s = 2 * \"abcdefghijklmnopqrstuvwxyz\"\n >>> indent_wrap(s, indent=0, wrap=26)\n 'abcdefghijklmnopqrstuvwxyz\\nabcdefghijklmnopqrstuvwxyz'\n >>> indent_wrap(s, indent=2, wrap=26)\n ' abcdefghijklmnopqrstuvwx\\n yzabcdefghijklmnopqrstuv\\n wxyz'\n \"\"\"\n split = wrap - indent\n chunks = [indent * \" \" + s[i:i + split] for i in range(0, len(s), split)]\n return \"\\n\".join(chunks)\n\n\ndef serialize_ndarrays(d):\n \"\"\"\n Recursively traverse through iterable object ``d`` and convert all occuring\n ndarrays to lists to make it JSON serializable.\n\n Note: Works for 1D dicts with ndarrays at first level. Certainly not tested\n and meant to work for all use cases.\n Made with code from: http://code.activestate.com/recipes/577504/\n\n Parameters\n ----------\n d : iterable\n Can be dict, list, set, tuple or frozenset.\n\n Returns\n -------\n d : iterable\n Same as input, but all ndarrays replaced by lists.\n \"\"\"\n def dict_handler(d):\n return d.items()\n\n handlers = {list: enumerate, tuple: enumerate,\n set: enumerate, frozenset: enumerate,\n dict: dict_handler}\n\n def serialize(o):\n for typ, handler in handlers.items():\n if isinstance(o, typ):\n for key, val in handler(o):\n if isinstance(val, np.ndarray):\n o[key] = val.tolist()\n else:\n o[key] = serialize_ndarrays(o[key])\n return o\n\n return serialize(d)\n\n\ndef fill_dict_defaults(d, required_keys=None, opt_keys=None, noleft=True):\n \"\"\"\n Populate dictionary with data from a given dict ``d``, and check if ``d``\n has required and optional keys. Set optionals with default if not present.\n\n If input ``d`` is None and ``required_keys`` is empty, just return\n ``opt_keys``.\n\n Parameters\n ----------\n d : dict or None\n Input dictionary containing the data to be checked. If is ``None``, then\n a copy of ``opt_keys`` is returned. If ``opt_keys`` is ``None``, a\n ``TypeError`` is raised. If ``d``is ``None`` and ``required_keys`` is\n not, then a ``ValueError`` israised.\n required_keys : list or None, optional\n Keys that must be present and set in ``d``. (default: None)\n opt_keys : dict or None, optional\n Keys that are optional. ``opt_keys`` provides optional keys and default\n values ``d`` is filled with if not present in ``d``. (default: None)\n noleft : bool, optional\n If True, raises a ``KeyError``, when ``d`` contains etxra keys, other\n than those given in ``required_keys`` and ``opt_keys``. (default: True)\n\n Returns\n -------\n out : dict\n Contains all required and optional keys, using default values, where\n optional keys were missing. If ``d`` was None, a copy of ``opt_keys`` is\n returned, if ``opt_keys`` was not ``None``.\n \"\"\"\n if required_keys is None:\n required_keys = []\n if opt_keys is None:\n opt_keys = {}\n if d is None:\n if not required_keys:\n if opt_keys is None:\n raise TypeError(\"`d` and òpt_keys` are both None.\")\n return opt_keys.copy()\n else:\n raise ValueError(\"`d` is None, but `required_keys` is not empty.\")\n\n d = d.copy()\n out = {}\n # Set required keys\n for key in required_keys:\n if key in d:\n out[key] = d.pop(key)\n else:\n raise KeyError(\"Dict is missing required key '{}'.\".format(key))\n # Set optional values, if key not given\n for key, val in opt_keys.items():\n out[key] = d.pop(key, val)\n # Complain when extra keys are left and noleft is True\n if d and noleft:\n raise KeyError(\"Leftover keys ['{}'].\".format(\n \"', '\".join(list(d.keys()))))\n return out\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
"""Calculator is built using "ping pong" algorithm, without eval() etc.
Main final function: calculate_expression().
calculate_expression() uses two functions in utils.py: clear_and_convert() and calculator_without_parentheses().
calculator_without_parentheses() uses two remaining functions:
math_operation() -> ping_calculate_pong() -> calculator_without_parentheses().
Allowed operations: +, -, *, /, **, use of parentheses. Spaces don't matter.
Negative numbers should be written as: (-34), float numbers: 3.4
Expression example: ((-2.3) + 3 ** (2 - 2)) * 2.2 + (6/(3 + 3)* (-2)) ** 2
"""
def math_operation(expression):
"""Simple calculator for two numbers in expression like 3 + 3."""
if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():
# eliminates the error call for float and negative numbers
if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit() or \
not str(expression[2]).replace('.', '1').replace('-', '1').isdigit():
raise ValueError(f'{expression} - check this fragment, something wrong.')
if expression[2] == 0 and expression[1] == '/':
raise ValueError(f'{expression} - division by zero.')
operator = expression[1]
if operator == '**':
return expression[0]**expression[2]
elif operator == '*':
return expression[0]*expression[2]
elif operator == '/':
return expression[0]/expression[2]
elif operator == '+':
return expression[0]+expression[2]
elif operator == '-':
return expression[0]-expression[2]
def ping_calculate_pong(expression, operator_index):
"""The function takes two arguments.
Argument 1: an expression from which we will extract one subexpression.
Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.
The function:
1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;
2. calculates subexpression result using function math_operation();
3. replaces in expression: subexpression to subexpression result - pong.
"""
if len(expression) < 3 or operator_index == len(expression)-1 or operator_index == 0:
raise ValueError(f'{expression} - check this fragment, something wrong.')
sub_expression = expression[operator_index - 1:operator_index + 2]
sub_result = math_operation(sub_expression)
expression[operator_index+1] = sub_result
del expression[operator_index-1:operator_index+1]
def calculator_without_parentheses(expression):
"""The function:
1. prioritizes mathematical operations in expression without any parentheses;
2. transfers expression and indexes of math operators to the function ping_calculate_pong();
3. returns result of calculations.
"""
j = 1
while len(expression) > j:
if "**" in expression:
ping_calculate_pong(expression, expression.index('**'))
elif '*' in expression or '/' in expression:
if '*' in expression and '/' in expression:
if expression.index('*') < expression.index('/'):
ping_calculate_pong(expression, expression.index('*'))
else:
ping_calculate_pong(expression, expression.index('/'))
elif '/' not in expression:
ping_calculate_pong(expression, expression.index('*'))
elif '*' not in expression:
ping_calculate_pong(expression, expression.index('/'))
elif '+' in expression or '-' in expression:
if '+' in expression and '-' in expression:
if expression.index('+') < expression.index('-'):
ping_calculate_pong(expression, expression.index('+'))
else:
ping_calculate_pong(expression, expression.index('-'))
elif '-' not in expression:
ping_calculate_pong(expression, expression.index('+'))
elif '+' not in expression:
ping_calculate_pong(expression, expression.index('-'))
else:
j += 1 # protection against a possible eternal loop when an incorrect expression is entered
return expression
def clear_and_convert(string_math_expression):
"""This function takes string expression and converts it to list with int, float, and 'math signs'."""
# clear the expression of spaces and convert it to the list
cleared_expression = list(filter(lambda x: x != ' ', string_math_expression))
# check characters in the expression for correctness
check_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '-', '*', '/', '(', ')', '.']
for element in cleared_expression:
if element not in check_list:
raise ValueError(f'Houston, we have a problem. Element "{element}" in expression is not correct.')
# find multi-digit numbers and create new list num_exp with int
num_exp = []
number = ''
len_cleared_expression = len(cleared_expression)
for i, element in enumerate(cleared_expression):
if element.isdigit():
number += element
if i == len_cleared_expression - 1 or not cleared_expression[i+1].isdigit():
num_exp.append(int(number))
number = ''
else:
num_exp.append(element)
# find float numbers and update list num_exp
while '.' in num_exp:
i = num_exp.index('.')
if (i != 0 and i != len(num_exp) - 1
and isinstance(num_exp[i-1], int)
and isinstance(num_exp[i+1], int)):
float_number = float(str(num_exp[i-1]) + num_exp[i] + str(num_exp[i+1]))
num_exp[i+1] = float_number
del num_exp[i-1:i+1]
else:
raise ValueError('Something wrong with ".".')
# find negative numbers and create new list with negative numbers
neg_exp = []
excluded_index = None
neg_check_list = ['+', '-', '*', '/', '(']
len_num_exp = len(num_exp)
for i, element in enumerate(num_exp):
if element == '-':
if i == len_num_exp - 1:
raise ValueError('Something wrong with "-".')
elif isinstance(num_exp[i+1], int) and (i == 0 or num_exp[i-1] in neg_check_list):
n_number = int('-' + str(num_exp[i+1]))
neg_exp.append(n_number)
excluded_index = i + 1
elif isinstance(num_exp[i+1], float) and (i == 0 or num_exp[i-1] in neg_check_list):
n_number = float('-' + str(num_exp[i+1]))
neg_exp.append(n_number)
excluded_index = i + 1
else:
neg_exp.append(element)
elif i != excluded_index:
neg_exp.append(element)
# find exponent operator and create new list with final converted expression
converted_expression = []
i = 0
len_neg_exp = len(neg_exp)
while i < len_neg_exp:
if (i == 0 or i == len_neg_exp - 1) and neg_exp[i] == '*':
raise ValueError('Something wrong with "*".')
elif neg_exp[i] == '*' and neg_exp[i+1] == '*':
converted_expression.append('**')
i += 2
else:
converted_expression.append(neg_exp[i])
i += 1
return converted_expression
def calculate_expression(str_math_expression):
"""This function:
1. uses clear_and_convert() to prepare the string math expression for further calculations;
2. finds all subexpressions inside parentheses (if there are such);
3. transfers subexpression to calculator_without_parentheses() for further calculations;
4. replaces subexpression with the result;
5. returns final result of all calculations.
"""
expression = clear_and_convert(str_math_expression)
for element in expression.copy():
if ')' in expression:
if '(' in expression:
if expression.index(')') > expression.index('('):
z = expression.index(')')
a = z
while expression[a] != '(':
a -= 1
fragment = expression[a+1:z]
fr_result = calculator_without_parentheses(fragment)
if len(fr_result) != 1: # checking for an input error in a fragment of the expression like ((()))
raise ValueError(f'{fr_result} - check this fragment, something wrong.')
expression[z] = fr_result[0]
del expression[a:z]
else:
raise ValueError('Something wrong with parentheses.')
else:
raise ValueError('Something wrong with parentheses.')
else:
expression = calculator_without_parentheses(expression)
if len(expression) != 1:
raise ValueError('Something wrong in your expression.')
if len(expression) == 1:
return str(round(expression[0], 5))
|
normal
|
{
"blob_id": "c336bb6cdadfb836ab68ebd5bbb210f63af3d084",
"index": 2287,
"step-1": "<mask token>\n\n\ndef ping_calculate_pong(expression, operator_index):\n \"\"\"The function takes two arguments.\n Argument 1: an expression from which we will extract one subexpression.\n Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.\n The function:\n 1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;\n 2. calculates subexpression result using function math_operation();\n 3. replaces in expression: subexpression to subexpression result - pong.\n \"\"\"\n if len(expression) < 3 or operator_index == len(expression\n ) - 1 or operator_index == 0:\n raise ValueError(\n f'{expression} - check this fragment, something wrong.')\n sub_expression = expression[operator_index - 1:operator_index + 2]\n sub_result = math_operation(sub_expression)\n expression[operator_index + 1] = sub_result\n del expression[operator_index - 1:operator_index + 1]\n\n\ndef calculator_without_parentheses(expression):\n \"\"\"The function:\n 1. prioritizes mathematical operations in expression without any parentheses;\n 2. transfers expression and indexes of math operators to the function ping_calculate_pong();\n 3. returns result of calculations.\n \"\"\"\n j = 1\n while len(expression) > j:\n if '**' in expression:\n ping_calculate_pong(expression, expression.index('**'))\n elif '*' in expression or '/' in expression:\n if '*' in expression and '/' in expression:\n if expression.index('*') < expression.index('/'):\n ping_calculate_pong(expression, expression.index('*'))\n else:\n ping_calculate_pong(expression, expression.index('/'))\n elif '/' not in expression:\n ping_calculate_pong(expression, expression.index('*'))\n elif '*' not in expression:\n ping_calculate_pong(expression, expression.index('/'))\n elif '+' in expression or '-' in expression:\n if '+' in expression and '-' in expression:\n if expression.index('+') < expression.index('-'):\n ping_calculate_pong(expression, expression.index('+'))\n else:\n ping_calculate_pong(expression, expression.index('-'))\n elif '-' not in expression:\n ping_calculate_pong(expression, expression.index('+'))\n elif '+' not in expression:\n ping_calculate_pong(expression, expression.index('-'))\n else:\n j += 1\n return expression\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef math_operation(expression):\n \"\"\"Simple calculator for two numbers in expression like 3 + 3.\"\"\"\n if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():\n if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit(\n ) or not str(expression[2]).replace('.', '1').replace('-', '1'\n ).isdigit():\n raise ValueError(\n f'{expression} - check this fragment, something wrong.')\n if expression[2] == 0 and expression[1] == '/':\n raise ValueError(f'{expression} - division by zero.')\n operator = expression[1]\n if operator == '**':\n return expression[0] ** expression[2]\n elif operator == '*':\n return expression[0] * expression[2]\n elif operator == '/':\n return expression[0] / expression[2]\n elif operator == '+':\n return expression[0] + expression[2]\n elif operator == '-':\n return expression[0] - expression[2]\n\n\ndef ping_calculate_pong(expression, operator_index):\n \"\"\"The function takes two arguments.\n Argument 1: an expression from which we will extract one subexpression.\n Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.\n The function:\n 1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;\n 2. calculates subexpression result using function math_operation();\n 3. replaces in expression: subexpression to subexpression result - pong.\n \"\"\"\n if len(expression) < 3 or operator_index == len(expression\n ) - 1 or operator_index == 0:\n raise ValueError(\n f'{expression} - check this fragment, something wrong.')\n sub_expression = expression[operator_index - 1:operator_index + 2]\n sub_result = math_operation(sub_expression)\n expression[operator_index + 1] = sub_result\n del expression[operator_index - 1:operator_index + 1]\n\n\ndef calculator_without_parentheses(expression):\n \"\"\"The function:\n 1. prioritizes mathematical operations in expression without any parentheses;\n 2. transfers expression and indexes of math operators to the function ping_calculate_pong();\n 3. returns result of calculations.\n \"\"\"\n j = 1\n while len(expression) > j:\n if '**' in expression:\n ping_calculate_pong(expression, expression.index('**'))\n elif '*' in expression or '/' in expression:\n if '*' in expression and '/' in expression:\n if expression.index('*') < expression.index('/'):\n ping_calculate_pong(expression, expression.index('*'))\n else:\n ping_calculate_pong(expression, expression.index('/'))\n elif '/' not in expression:\n ping_calculate_pong(expression, expression.index('*'))\n elif '*' not in expression:\n ping_calculate_pong(expression, expression.index('/'))\n elif '+' in expression or '-' in expression:\n if '+' in expression and '-' in expression:\n if expression.index('+') < expression.index('-'):\n ping_calculate_pong(expression, expression.index('+'))\n else:\n ping_calculate_pong(expression, expression.index('-'))\n elif '-' not in expression:\n ping_calculate_pong(expression, expression.index('+'))\n elif '+' not in expression:\n ping_calculate_pong(expression, expression.index('-'))\n else:\n j += 1\n return expression\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef math_operation(expression):\n \"\"\"Simple calculator for two numbers in expression like 3 + 3.\"\"\"\n if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():\n if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit(\n ) or not str(expression[2]).replace('.', '1').replace('-', '1'\n ).isdigit():\n raise ValueError(\n f'{expression} - check this fragment, something wrong.')\n if expression[2] == 0 and expression[1] == '/':\n raise ValueError(f'{expression} - division by zero.')\n operator = expression[1]\n if operator == '**':\n return expression[0] ** expression[2]\n elif operator == '*':\n return expression[0] * expression[2]\n elif operator == '/':\n return expression[0] / expression[2]\n elif operator == '+':\n return expression[0] + expression[2]\n elif operator == '-':\n return expression[0] - expression[2]\n\n\ndef ping_calculate_pong(expression, operator_index):\n \"\"\"The function takes two arguments.\n Argument 1: an expression from which we will extract one subexpression.\n Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.\n The function:\n 1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;\n 2. calculates subexpression result using function math_operation();\n 3. replaces in expression: subexpression to subexpression result - pong.\n \"\"\"\n if len(expression) < 3 or operator_index == len(expression\n ) - 1 or operator_index == 0:\n raise ValueError(\n f'{expression} - check this fragment, something wrong.')\n sub_expression = expression[operator_index - 1:operator_index + 2]\n sub_result = math_operation(sub_expression)\n expression[operator_index + 1] = sub_result\n del expression[operator_index - 1:operator_index + 1]\n\n\ndef calculator_without_parentheses(expression):\n \"\"\"The function:\n 1. prioritizes mathematical operations in expression without any parentheses;\n 2. transfers expression and indexes of math operators to the function ping_calculate_pong();\n 3. returns result of calculations.\n \"\"\"\n j = 1\n while len(expression) > j:\n if '**' in expression:\n ping_calculate_pong(expression, expression.index('**'))\n elif '*' in expression or '/' in expression:\n if '*' in expression and '/' in expression:\n if expression.index('*') < expression.index('/'):\n ping_calculate_pong(expression, expression.index('*'))\n else:\n ping_calculate_pong(expression, expression.index('/'))\n elif '/' not in expression:\n ping_calculate_pong(expression, expression.index('*'))\n elif '*' not in expression:\n ping_calculate_pong(expression, expression.index('/'))\n elif '+' in expression or '-' in expression:\n if '+' in expression and '-' in expression:\n if expression.index('+') < expression.index('-'):\n ping_calculate_pong(expression, expression.index('+'))\n else:\n ping_calculate_pong(expression, expression.index('-'))\n elif '-' not in expression:\n ping_calculate_pong(expression, expression.index('+'))\n elif '+' not in expression:\n ping_calculate_pong(expression, expression.index('-'))\n else:\n j += 1\n return expression\n\n\ndef clear_and_convert(string_math_expression):\n \"\"\"This function takes string expression and converts it to list with int, float, and 'math signs'.\"\"\"\n cleared_expression = list(filter(lambda x: x != ' ',\n string_math_expression))\n check_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+',\n '-', '*', '/', '(', ')', '.']\n for element in cleared_expression:\n if element not in check_list:\n raise ValueError(\n f'Houston, we have a problem. Element \"{element}\" in expression is not correct.'\n )\n num_exp = []\n number = ''\n len_cleared_expression = len(cleared_expression)\n for i, element in enumerate(cleared_expression):\n if element.isdigit():\n number += element\n if i == len_cleared_expression - 1 or not cleared_expression[i + 1\n ].isdigit():\n num_exp.append(int(number))\n number = ''\n else:\n num_exp.append(element)\n while '.' in num_exp:\n i = num_exp.index('.')\n if i != 0 and i != len(num_exp) - 1 and isinstance(num_exp[i - 1], int\n ) and isinstance(num_exp[i + 1], int):\n float_number = float(str(num_exp[i - 1]) + num_exp[i] + str(\n num_exp[i + 1]))\n num_exp[i + 1] = float_number\n del num_exp[i - 1:i + 1]\n else:\n raise ValueError('Something wrong with \".\".')\n neg_exp = []\n excluded_index = None\n neg_check_list = ['+', '-', '*', '/', '(']\n len_num_exp = len(num_exp)\n for i, element in enumerate(num_exp):\n if element == '-':\n if i == len_num_exp - 1:\n raise ValueError('Something wrong with \"-\".')\n elif isinstance(num_exp[i + 1], int) and (i == 0 or num_exp[i -\n 1] in neg_check_list):\n n_number = int('-' + str(num_exp[i + 1]))\n neg_exp.append(n_number)\n excluded_index = i + 1\n elif isinstance(num_exp[i + 1], float) and (i == 0 or num_exp[i -\n 1] in neg_check_list):\n n_number = float('-' + str(num_exp[i + 1]))\n neg_exp.append(n_number)\n excluded_index = i + 1\n else:\n neg_exp.append(element)\n elif i != excluded_index:\n neg_exp.append(element)\n converted_expression = []\n i = 0\n len_neg_exp = len(neg_exp)\n while i < len_neg_exp:\n if (i == 0 or i == len_neg_exp - 1) and neg_exp[i] == '*':\n raise ValueError('Something wrong with \"*\".')\n elif neg_exp[i] == '*' and neg_exp[i + 1] == '*':\n converted_expression.append('**')\n i += 2\n else:\n converted_expression.append(neg_exp[i])\n i += 1\n return converted_expression\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef math_operation(expression):\n \"\"\"Simple calculator for two numbers in expression like 3 + 3.\"\"\"\n if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():\n if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit(\n ) or not str(expression[2]).replace('.', '1').replace('-', '1'\n ).isdigit():\n raise ValueError(\n f'{expression} - check this fragment, something wrong.')\n if expression[2] == 0 and expression[1] == '/':\n raise ValueError(f'{expression} - division by zero.')\n operator = expression[1]\n if operator == '**':\n return expression[0] ** expression[2]\n elif operator == '*':\n return expression[0] * expression[2]\n elif operator == '/':\n return expression[0] / expression[2]\n elif operator == '+':\n return expression[0] + expression[2]\n elif operator == '-':\n return expression[0] - expression[2]\n\n\ndef ping_calculate_pong(expression, operator_index):\n \"\"\"The function takes two arguments.\n Argument 1: an expression from which we will extract one subexpression.\n Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.\n The function:\n 1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;\n 2. calculates subexpression result using function math_operation();\n 3. replaces in expression: subexpression to subexpression result - pong.\n \"\"\"\n if len(expression) < 3 or operator_index == len(expression\n ) - 1 or operator_index == 0:\n raise ValueError(\n f'{expression} - check this fragment, something wrong.')\n sub_expression = expression[operator_index - 1:operator_index + 2]\n sub_result = math_operation(sub_expression)\n expression[operator_index + 1] = sub_result\n del expression[operator_index - 1:operator_index + 1]\n\n\ndef calculator_without_parentheses(expression):\n \"\"\"The function:\n 1. prioritizes mathematical operations in expression without any parentheses;\n 2. transfers expression and indexes of math operators to the function ping_calculate_pong();\n 3. returns result of calculations.\n \"\"\"\n j = 1\n while len(expression) > j:\n if '**' in expression:\n ping_calculate_pong(expression, expression.index('**'))\n elif '*' in expression or '/' in expression:\n if '*' in expression and '/' in expression:\n if expression.index('*') < expression.index('/'):\n ping_calculate_pong(expression, expression.index('*'))\n else:\n ping_calculate_pong(expression, expression.index('/'))\n elif '/' not in expression:\n ping_calculate_pong(expression, expression.index('*'))\n elif '*' not in expression:\n ping_calculate_pong(expression, expression.index('/'))\n elif '+' in expression or '-' in expression:\n if '+' in expression and '-' in expression:\n if expression.index('+') < expression.index('-'):\n ping_calculate_pong(expression, expression.index('+'))\n else:\n ping_calculate_pong(expression, expression.index('-'))\n elif '-' not in expression:\n ping_calculate_pong(expression, expression.index('+'))\n elif '+' not in expression:\n ping_calculate_pong(expression, expression.index('-'))\n else:\n j += 1\n return expression\n\n\ndef clear_and_convert(string_math_expression):\n \"\"\"This function takes string expression and converts it to list with int, float, and 'math signs'.\"\"\"\n cleared_expression = list(filter(lambda x: x != ' ',\n string_math_expression))\n check_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+',\n '-', '*', '/', '(', ')', '.']\n for element in cleared_expression:\n if element not in check_list:\n raise ValueError(\n f'Houston, we have a problem. Element \"{element}\" in expression is not correct.'\n )\n num_exp = []\n number = ''\n len_cleared_expression = len(cleared_expression)\n for i, element in enumerate(cleared_expression):\n if element.isdigit():\n number += element\n if i == len_cleared_expression - 1 or not cleared_expression[i + 1\n ].isdigit():\n num_exp.append(int(number))\n number = ''\n else:\n num_exp.append(element)\n while '.' in num_exp:\n i = num_exp.index('.')\n if i != 0 and i != len(num_exp) - 1 and isinstance(num_exp[i - 1], int\n ) and isinstance(num_exp[i + 1], int):\n float_number = float(str(num_exp[i - 1]) + num_exp[i] + str(\n num_exp[i + 1]))\n num_exp[i + 1] = float_number\n del num_exp[i - 1:i + 1]\n else:\n raise ValueError('Something wrong with \".\".')\n neg_exp = []\n excluded_index = None\n neg_check_list = ['+', '-', '*', '/', '(']\n len_num_exp = len(num_exp)\n for i, element in enumerate(num_exp):\n if element == '-':\n if i == len_num_exp - 1:\n raise ValueError('Something wrong with \"-\".')\n elif isinstance(num_exp[i + 1], int) and (i == 0 or num_exp[i -\n 1] in neg_check_list):\n n_number = int('-' + str(num_exp[i + 1]))\n neg_exp.append(n_number)\n excluded_index = i + 1\n elif isinstance(num_exp[i + 1], float) and (i == 0 or num_exp[i -\n 1] in neg_check_list):\n n_number = float('-' + str(num_exp[i + 1]))\n neg_exp.append(n_number)\n excluded_index = i + 1\n else:\n neg_exp.append(element)\n elif i != excluded_index:\n neg_exp.append(element)\n converted_expression = []\n i = 0\n len_neg_exp = len(neg_exp)\n while i < len_neg_exp:\n if (i == 0 or i == len_neg_exp - 1) and neg_exp[i] == '*':\n raise ValueError('Something wrong with \"*\".')\n elif neg_exp[i] == '*' and neg_exp[i + 1] == '*':\n converted_expression.append('**')\n i += 2\n else:\n converted_expression.append(neg_exp[i])\n i += 1\n return converted_expression\n\n\ndef calculate_expression(str_math_expression):\n \"\"\"This function:\n 1. uses clear_and_convert() to prepare the string math expression for further calculations;\n 2. finds all subexpressions inside parentheses (if there are such);\n 3. transfers subexpression to calculator_without_parentheses() for further calculations;\n 4. replaces subexpression with the result;\n 5. returns final result of all calculations.\n \"\"\"\n expression = clear_and_convert(str_math_expression)\n for element in expression.copy():\n if ')' in expression:\n if '(' in expression:\n if expression.index(')') > expression.index('('):\n z = expression.index(')')\n a = z\n while expression[a] != '(':\n a -= 1\n fragment = expression[a + 1:z]\n fr_result = calculator_without_parentheses(fragment)\n if len(fr_result) != 1:\n raise ValueError(\n f'{fr_result} - check this fragment, something wrong.'\n )\n expression[z] = fr_result[0]\n del expression[a:z]\n else:\n raise ValueError('Something wrong with parentheses.')\n else:\n raise ValueError('Something wrong with parentheses.')\n else:\n expression = calculator_without_parentheses(expression)\n if len(expression) != 1:\n raise ValueError('Something wrong in your expression.')\n if len(expression) == 1:\n return str(round(expression[0], 5))\n",
"step-5": "\"\"\"Calculator is built using \"ping pong\" algorithm, without eval() etc.\nMain final function: calculate_expression().\ncalculate_expression() uses two functions in utils.py: clear_and_convert() and calculator_without_parentheses().\ncalculator_without_parentheses() uses two remaining functions:\nmath_operation() -> ping_calculate_pong() -> calculator_without_parentheses().\n\nAllowed operations: +, -, *, /, **, use of parentheses. Spaces don't matter.\nNegative numbers should be written as: (-34), float numbers: 3.4\nExpression example: ((-2.3) + 3 ** (2 - 2)) * 2.2 + (6/(3 + 3)* (-2)) ** 2\n\"\"\"\n\n\ndef math_operation(expression):\n \"\"\"Simple calculator for two numbers in expression like 3 + 3.\"\"\"\n if not str(expression[0]).isdigit() or not str(expression[2]).isdigit():\n # eliminates the error call for float and negative numbers\n if not str(expression[0]).replace('.', '1').replace('-', '1').isdigit() or \\\n not str(expression[2]).replace('.', '1').replace('-', '1').isdigit():\n raise ValueError(f'{expression} - check this fragment, something wrong.')\n if expression[2] == 0 and expression[1] == '/':\n raise ValueError(f'{expression} - division by zero.')\n operator = expression[1]\n if operator == '**':\n return expression[0]**expression[2]\n elif operator == '*':\n return expression[0]*expression[2]\n elif operator == '/':\n return expression[0]/expression[2]\n elif operator == '+':\n return expression[0]+expression[2]\n elif operator == '-':\n return expression[0]-expression[2]\n\n\ndef ping_calculate_pong(expression, operator_index):\n \"\"\"The function takes two arguments.\n Argument 1: an expression from which we will extract one subexpression.\n Argument 2: the index of the mathematical operator around which function takes the subexpression to extract.\n The function:\n 1. takes the expression and extract one subexpression around math operator (like 2 + 2) - ping;\n 2. calculates subexpression result using function math_operation();\n 3. replaces in expression: subexpression to subexpression result - pong.\n \"\"\"\n if len(expression) < 3 or operator_index == len(expression)-1 or operator_index == 0:\n raise ValueError(f'{expression} - check this fragment, something wrong.')\n sub_expression = expression[operator_index - 1:operator_index + 2]\n sub_result = math_operation(sub_expression)\n expression[operator_index+1] = sub_result\n del expression[operator_index-1:operator_index+1]\n\n\ndef calculator_without_parentheses(expression):\n \"\"\"The function:\n 1. prioritizes mathematical operations in expression without any parentheses;\n 2. transfers expression and indexes of math operators to the function ping_calculate_pong();\n 3. returns result of calculations.\n \"\"\"\n j = 1\n while len(expression) > j:\n if \"**\" in expression:\n ping_calculate_pong(expression, expression.index('**'))\n elif '*' in expression or '/' in expression:\n if '*' in expression and '/' in expression:\n if expression.index('*') < expression.index('/'):\n ping_calculate_pong(expression, expression.index('*'))\n else:\n ping_calculate_pong(expression, expression.index('/'))\n elif '/' not in expression:\n ping_calculate_pong(expression, expression.index('*'))\n elif '*' not in expression:\n ping_calculate_pong(expression, expression.index('/'))\n elif '+' in expression or '-' in expression:\n if '+' in expression and '-' in expression:\n if expression.index('+') < expression.index('-'):\n ping_calculate_pong(expression, expression.index('+'))\n else:\n ping_calculate_pong(expression, expression.index('-'))\n elif '-' not in expression:\n ping_calculate_pong(expression, expression.index('+'))\n elif '+' not in expression:\n ping_calculate_pong(expression, expression.index('-'))\n else:\n j += 1 # protection against a possible eternal loop when an incorrect expression is entered\n return expression\n\n\ndef clear_and_convert(string_math_expression):\n \"\"\"This function takes string expression and converts it to list with int, float, and 'math signs'.\"\"\"\n # clear the expression of spaces and convert it to the list\n cleared_expression = list(filter(lambda x: x != ' ', string_math_expression))\n # check characters in the expression for correctness\n check_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '-', '*', '/', '(', ')', '.']\n for element in cleared_expression:\n if element not in check_list:\n raise ValueError(f'Houston, we have a problem. Element \"{element}\" in expression is not correct.')\n # find multi-digit numbers and create new list num_exp with int\n num_exp = []\n number = ''\n len_cleared_expression = len(cleared_expression)\n for i, element in enumerate(cleared_expression):\n if element.isdigit():\n number += element\n if i == len_cleared_expression - 1 or not cleared_expression[i+1].isdigit():\n num_exp.append(int(number))\n number = ''\n else:\n num_exp.append(element)\n # find float numbers and update list num_exp\n while '.' in num_exp:\n i = num_exp.index('.')\n if (i != 0 and i != len(num_exp) - 1\n and isinstance(num_exp[i-1], int)\n and isinstance(num_exp[i+1], int)):\n float_number = float(str(num_exp[i-1]) + num_exp[i] + str(num_exp[i+1]))\n num_exp[i+1] = float_number\n del num_exp[i-1:i+1]\n else:\n raise ValueError('Something wrong with \".\".')\n # find negative numbers and create new list with negative numbers\n neg_exp = []\n excluded_index = None\n neg_check_list = ['+', '-', '*', '/', '(']\n len_num_exp = len(num_exp)\n for i, element in enumerate(num_exp):\n if element == '-':\n if i == len_num_exp - 1:\n raise ValueError('Something wrong with \"-\".')\n elif isinstance(num_exp[i+1], int) and (i == 0 or num_exp[i-1] in neg_check_list):\n n_number = int('-' + str(num_exp[i+1]))\n neg_exp.append(n_number)\n excluded_index = i + 1\n elif isinstance(num_exp[i+1], float) and (i == 0 or num_exp[i-1] in neg_check_list):\n n_number = float('-' + str(num_exp[i+1]))\n neg_exp.append(n_number)\n excluded_index = i + 1\n else:\n neg_exp.append(element)\n elif i != excluded_index:\n neg_exp.append(element)\n # find exponent operator and create new list with final converted expression\n converted_expression = []\n i = 0\n len_neg_exp = len(neg_exp)\n while i < len_neg_exp:\n if (i == 0 or i == len_neg_exp - 1) and neg_exp[i] == '*':\n raise ValueError('Something wrong with \"*\".')\n elif neg_exp[i] == '*' and neg_exp[i+1] == '*':\n converted_expression.append('**')\n i += 2\n else:\n converted_expression.append(neg_exp[i])\n i += 1\n return converted_expression\n\n\ndef calculate_expression(str_math_expression):\n \"\"\"This function:\n 1. uses clear_and_convert() to prepare the string math expression for further calculations;\n 2. finds all subexpressions inside parentheses (if there are such);\n 3. transfers subexpression to calculator_without_parentheses() for further calculations;\n 4. replaces subexpression with the result;\n 5. returns final result of all calculations.\n \"\"\"\n expression = clear_and_convert(str_math_expression)\n for element in expression.copy():\n if ')' in expression:\n if '(' in expression:\n if expression.index(')') > expression.index('('):\n z = expression.index(')')\n a = z\n while expression[a] != '(':\n a -= 1\n fragment = expression[a+1:z]\n fr_result = calculator_without_parentheses(fragment)\n if len(fr_result) != 1: # checking for an input error in a fragment of the expression like ((()))\n raise ValueError(f'{fr_result} - check this fragment, something wrong.')\n expression[z] = fr_result[0]\n del expression[a:z]\n else:\n raise ValueError('Something wrong with parentheses.')\n else:\n raise ValueError('Something wrong with parentheses.')\n else:\n expression = calculator_without_parentheses(expression)\n if len(expression) != 1:\n raise ValueError('Something wrong in your expression.')\n if len(expression) == 1:\n return str(round(expression[0], 5))\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.shortcuts import render, HttpResponse
from django.views.generic import TemplateView
from .models import Person, Stock_history
from django.http import Http404, HttpResponseRedirect
from .forms import NameForm, UploadFileForm
from .back import handle_uploaded_file, read_file
class IndexView(TemplateView):
def get(self, request):
price_history = Stock_history.objects.all()
context = {
'entry': price_history
}
return render(request, 'budget/index.html', context)
class DetailView(TemplateView):
def get(self, request, person_id):
try:
persons = Person.objects.all()
person = Person.objects.get(id=person_id)
except Person.DoesNotExist:
raise Http404("Person does not exist")
context = {
'persons': persons,
'person': person,
'first_name': person.first_name,
'last_name': person.last_name,
'income': person.income,
}
return render(request, 'budget/detail.html', context)
class PersonView(TemplateView):
def get(self, request):
persons = Person.objects.all()
context = {
'persons': persons,
}
return render(request, 'budget/person.html', context)
class AddView(TemplateView):
template = 'budget/add.html'
def get(self, request):
form = NameForm
context = {'form': form}
return render(request, self.template, context)
def post(self, request):
form = NameForm(request.POST)
if form.is_valid():
text = form.cleaned_data
form = NameForm()
p = Person(first_name=text['first_name'], last_name=text['last_name'], income = text['income'])
p.save()
context = {
'form': form,
'text': text,
}
return render(request, self.template, context)
class UploadView(TemplateView):
template_name = 'budget/upload.html'
def get(self, request):
form = UploadFileForm()
return render(request, self.template_name, {'form': form})
def post(self, request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
handle_uploaded_file(request.FILES['file'])
read_file(request.FILES['file'])
return HttpResponseRedirect('/upload')
#else:
# form = UploadFileForm()
return render(request, self.template_name, {'form': form})
|
normal
|
{
"blob_id": "2d65ffa3fc8a5360702337d749884903b2cb0423",
"index": 2353,
"step-1": "<mask token>\n\n\nclass PersonView(TemplateView):\n\n def get(self, request):\n persons = Person.objects.all()\n context = {'persons': persons}\n return render(request, 'budget/person.html', context)\n\n\nclass AddView(TemplateView):\n template = 'budget/add.html'\n\n def get(self, request):\n form = NameForm\n context = {'form': form}\n return render(request, self.template, context)\n\n def post(self, request):\n form = NameForm(request.POST)\n if form.is_valid():\n text = form.cleaned_data\n form = NameForm()\n p = Person(first_name=text['first_name'], last_name=text[\n 'last_name'], income=text['income'])\n p.save()\n context = {'form': form, 'text': text}\n return render(request, self.template, context)\n\n\nclass UploadView(TemplateView):\n template_name = 'budget/upload.html'\n\n def get(self, request):\n form = UploadFileForm()\n return render(request, self.template_name, {'form': form})\n\n def post(self, request):\n if request.method == 'POST':\n form = UploadFileForm(request.POST, request.FILES)\n if form.is_valid():\n handle_uploaded_file(request.FILES['file'])\n read_file(request.FILES['file'])\n return HttpResponseRedirect('/upload')\n return render(request, self.template_name, {'form': form})\n",
"step-2": "<mask token>\n\n\nclass DetailView(TemplateView):\n <mask token>\n\n\nclass PersonView(TemplateView):\n\n def get(self, request):\n persons = Person.objects.all()\n context = {'persons': persons}\n return render(request, 'budget/person.html', context)\n\n\nclass AddView(TemplateView):\n template = 'budget/add.html'\n\n def get(self, request):\n form = NameForm\n context = {'form': form}\n return render(request, self.template, context)\n\n def post(self, request):\n form = NameForm(request.POST)\n if form.is_valid():\n text = form.cleaned_data\n form = NameForm()\n p = Person(first_name=text['first_name'], last_name=text[\n 'last_name'], income=text['income'])\n p.save()\n context = {'form': form, 'text': text}\n return render(request, self.template, context)\n\n\nclass UploadView(TemplateView):\n template_name = 'budget/upload.html'\n\n def get(self, request):\n form = UploadFileForm()\n return render(request, self.template_name, {'form': form})\n\n def post(self, request):\n if request.method == 'POST':\n form = UploadFileForm(request.POST, request.FILES)\n if form.is_valid():\n handle_uploaded_file(request.FILES['file'])\n read_file(request.FILES['file'])\n return HttpResponseRedirect('/upload')\n return render(request, self.template_name, {'form': form})\n",
"step-3": "<mask token>\n\n\nclass IndexView(TemplateView):\n <mask token>\n\n\nclass DetailView(TemplateView):\n\n def get(self, request, person_id):\n try:\n persons = Person.objects.all()\n person = Person.objects.get(id=person_id)\n except Person.DoesNotExist:\n raise Http404('Person does not exist')\n context = {'persons': persons, 'person': person, 'first_name':\n person.first_name, 'last_name': person.last_name, 'income':\n person.income}\n return render(request, 'budget/detail.html', context)\n\n\nclass PersonView(TemplateView):\n\n def get(self, request):\n persons = Person.objects.all()\n context = {'persons': persons}\n return render(request, 'budget/person.html', context)\n\n\nclass AddView(TemplateView):\n template = 'budget/add.html'\n\n def get(self, request):\n form = NameForm\n context = {'form': form}\n return render(request, self.template, context)\n\n def post(self, request):\n form = NameForm(request.POST)\n if form.is_valid():\n text = form.cleaned_data\n form = NameForm()\n p = Person(first_name=text['first_name'], last_name=text[\n 'last_name'], income=text['income'])\n p.save()\n context = {'form': form, 'text': text}\n return render(request, self.template, context)\n\n\nclass UploadView(TemplateView):\n template_name = 'budget/upload.html'\n\n def get(self, request):\n form = UploadFileForm()\n return render(request, self.template_name, {'form': form})\n\n def post(self, request):\n if request.method == 'POST':\n form = UploadFileForm(request.POST, request.FILES)\n if form.is_valid():\n handle_uploaded_file(request.FILES['file'])\n read_file(request.FILES['file'])\n return HttpResponseRedirect('/upload')\n return render(request, self.template_name, {'form': form})\n",
"step-4": "<mask token>\n\n\nclass IndexView(TemplateView):\n\n def get(self, request):\n price_history = Stock_history.objects.all()\n context = {'entry': price_history}\n return render(request, 'budget/index.html', context)\n\n\nclass DetailView(TemplateView):\n\n def get(self, request, person_id):\n try:\n persons = Person.objects.all()\n person = Person.objects.get(id=person_id)\n except Person.DoesNotExist:\n raise Http404('Person does not exist')\n context = {'persons': persons, 'person': person, 'first_name':\n person.first_name, 'last_name': person.last_name, 'income':\n person.income}\n return render(request, 'budget/detail.html', context)\n\n\nclass PersonView(TemplateView):\n\n def get(self, request):\n persons = Person.objects.all()\n context = {'persons': persons}\n return render(request, 'budget/person.html', context)\n\n\nclass AddView(TemplateView):\n template = 'budget/add.html'\n\n def get(self, request):\n form = NameForm\n context = {'form': form}\n return render(request, self.template, context)\n\n def post(self, request):\n form = NameForm(request.POST)\n if form.is_valid():\n text = form.cleaned_data\n form = NameForm()\n p = Person(first_name=text['first_name'], last_name=text[\n 'last_name'], income=text['income'])\n p.save()\n context = {'form': form, 'text': text}\n return render(request, self.template, context)\n\n\nclass UploadView(TemplateView):\n template_name = 'budget/upload.html'\n\n def get(self, request):\n form = UploadFileForm()\n return render(request, self.template_name, {'form': form})\n\n def post(self, request):\n if request.method == 'POST':\n form = UploadFileForm(request.POST, request.FILES)\n if form.is_valid():\n handle_uploaded_file(request.FILES['file'])\n read_file(request.FILES['file'])\n return HttpResponseRedirect('/upload')\n return render(request, self.template_name, {'form': form})\n",
"step-5": "from django.shortcuts import render, HttpResponse\nfrom django.views.generic import TemplateView\nfrom .models import Person, Stock_history\nfrom django.http import Http404, HttpResponseRedirect\nfrom .forms import NameForm, UploadFileForm\n\nfrom .back import handle_uploaded_file, read_file\n\nclass IndexView(TemplateView):\n def get(self, request):\n price_history = Stock_history.objects.all()\n context = {\n 'entry': price_history\n }\n return render(request, 'budget/index.html', context)\n\nclass DetailView(TemplateView):\n def get(self, request, person_id):\n try:\n persons = Person.objects.all()\n person = Person.objects.get(id=person_id)\n except Person.DoesNotExist:\n raise Http404(\"Person does not exist\")\n \n context = {\n 'persons': persons,\n 'person': person,\n 'first_name': person.first_name, \n 'last_name': person.last_name,\n 'income': person.income,\n\n }\n return render(request, 'budget/detail.html', context)\n\n\nclass PersonView(TemplateView):\n def get(self, request):\n persons = Person.objects.all()\n\n context = {\n 'persons': persons,\n }\n return render(request, 'budget/person.html', context)\n\nclass AddView(TemplateView):\n template = 'budget/add.html'\n\n def get(self, request):\n form = NameForm\n context = {'form': form}\n return render(request, self.template, context)\n\n\n def post(self, request):\n form = NameForm(request.POST)\n if form.is_valid():\n text = form.cleaned_data\n form = NameForm()\n p = Person(first_name=text['first_name'], last_name=text['last_name'], income = text['income'])\n p.save()\n context = {\n 'form': form,\n 'text': text,\n }\n\n return render(request, self.template, context)\n\n\nclass UploadView(TemplateView):\n template_name = 'budget/upload.html'\n\n def get(self, request):\n form = UploadFileForm()\n return render(request, self.template_name, {'form': form})\n\n def post(self, request):\n if request.method == 'POST':\n form = UploadFileForm(request.POST, request.FILES)\n if form.is_valid():\n handle_uploaded_file(request.FILES['file'])\n read_file(request.FILES['file'])\n return HttpResponseRedirect('/upload')\n #else:\n # form = UploadFileForm()\n return render(request, self.template_name, {'form': form})\n",
"step-ids": [
10,
11,
13,
14,
16
]
}
|
[
10,
11,
13,
14,
16
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 11 14:55:12 2019
@author: Furankyyy
"""
import numpy as np
import matplotlib.pyplot as plt
import timeit
###worst sort function###
#define the function that checks whether the list is in ascending order
def right_permutation(arr):
if len(arr)==1: #if there's only one element, then the order is always correct
return True
for i in range(len(arr)-1): #check every elements from the first to the second to the last
if arr[i+1]<arr[i]: #if the i+1th element is smaller than ith, the order is wrong, break the loop and return false
break
else:
if i == len(arr)-2: #if the i+1th element is greater than/equal to ith, check if we have already checked all the elements
return True #if we've already checked all the elements (i.e. i==len(arr)-2), return true; if not, continue the loop
return False
#define the worst sort function
def worstsort(arr):
sort_arr=[] #initialize output
check = False #initialize the result of right_permutation()
while check == False: #while the order is wrong, generate a new permyutation and check if its order is correct
sort_arr = np.random.permutation(arr)
check = right_permutation(sort_arr)
return sort_arr
#test cases
test1=[5,4,3,2,1]
test2=[1,2,3,4,5] #best case
test3=[2,2,2,2,2] #best case as well!
test4=[2] #only one element
print(worstsort(test1))
print(worstsort(test2))
print(worstsort(test3))
print(worstsort(test4))
#the best case is when the input list is already sorted, in this case, we only need to run the right_permutation once
#we have a for loop in right_permutation, so the best case complexity is O(n)
#given a random input of size n, the chance that the input x_k is correctly sorted is Pr(x_k) = 1/P_n = 1/n!
#since in this worst algorithm, we do not "remember" the permutations that we've already checked
#so each time, the Pr(sorted) remains the same 1/n!
#Then we would expect to have n! times to have the corrected sorted list
#the reason is that we have E[Pr(x1)+Pr(x2)+...+Pr(x_k)]=1, since Pr(x_k)=1/n!, we would expect k = n!
#this reasoning is the same as the random indicator variable in the book, where we have the pr(I) for each choice (permutation) and we sum them to find the expected value
#so the averaage case complexity is O(n!)
#to calculate what n is best for this function
def factorial(n):
result=1
for i in range(n):
result=result*(i+1)
return result
x=np.arange(0,7,1)
y_factorial=list(map(factorial,x))
y_compare=x*x
plt.plot(x,y_factorial,label="Factorial of n")
plt.plot(x,y_compare,label="n square")
plt.title("Complexity comparison")
plt.legend()
#from the plot we can see that for algorithms with comlexity of O(n^2) and O(n!), the difference comes when n=5
#when n=4, the two algorithms do not vary that much, but when n=5, they have a >100 times difference
#therefore, this method is feasible when n<=4
#p.s. constants are discounted (they are relatively unimportant)
###median finder###
#the worst case for the median finder is that the elements in the input list are unique
#the best case is that all elements are the same --> no matter which we choose, it is the median
#to consider the times we try before stopping, we need to consider the worst case --> all elements are different
#then the chance to find the exact median is 1/n
#the number of elements lying in the input deviation range x is x//(100/n)+1 for this worst case
#explanation: divide the 100% to n parts, if all elements are different then each element takes the 1 part, the x//(range for 1 part)+1 is the num of elements lying in the range
#therefore, the probability of choosing the element in the range given by x is (x//(100/n)+1)/n
#I want to try the expected times of choosing the correct element(s) for the worst case
#Pr(failure) for 1 try is 1-(x//(100/n)+1)/n
#Pr(failure) for the first k try is (1-(x//(100/n)+1)/n)^k, which scales with x and n.
#so the Pr(at least one success) for the first k try is 1-Pr(failure)=1-(1-(x//(100/n)+1)/n)^k
#we want to find a k taht makes this Pr large enough
#so we want to find a small k minimizing Pr(failure) for the first k try
#to simplify the problem, we regard x as constant and assume the "//" is "/"
#(1-(x//(100/n)+1)/n)^k = ((n-xn/100-1)/n)^k =(1-x/100-1/n)^k
#x/100 is a constant
#-->(1-1/n)^k
#when n is sufficiently large, (1-1/n) is nearly 1
#it is extremely hard to succeed if n is very large, I set the limit of k at 10000, simply because my laptop's computational ability
def median_finder(arr,x):
tried = 0 #record the number of times of choosing the random element
if abs(x) <= 0.5: #when x is valid
lower=np.percentile(arr,50-x/2)
upper=np.percentile(arr,50+x/2)
while tried <10000:
find = np.random.randint(0,len(arr)) #find a new element
if lower<=arr[find] and arr[find]<=upper: #if the chosen element is in the range, return it
return arr[find]
else:
tried += 1
return "Tried enough times, still cannot find the value"
else:
return "x not in the domain"
#test cases
test1=list(np.random.permutation(200))
test2=[4]*100
test3=[5]*1000
test4=test2+test3
print(median_finder(test1,0.5)) #worst case, exactly 2 elements in the range
print(median_finder(test2,0.5)) #best case
print(median_finder(test2,0)) #best case
print(median_finder(test3,0.5)) #best case
print(median_finder(test4,0)) #1000/1100 probability
print(median_finder(test4,0.5)) #same as above.
#time complexity
#best case running time is O(1)
#the time complexity of the worst case running time is E[k]=Sum(E[ki])
#E[ki]=Pr(correct)=(x//(100/n)+1)/n
#sum is from 1 to the limit tried k
#since x is between 0 and 0.5, we simply regard it as constant
#we also assume the "//" is "/"
#then the expression becomes: E[k]= k*(xn/100+1)/n
#as n goes to infinity, we can solve this by trying to use L'Hopital's rule
#the result is kx/100, which is a constant
#O(1)
data=np.empty((1,2))
for i in range(200,1200,50):
testlist=list(np.random.permutation(i))
time=timeit.timeit(stmt="median_finder(testlist,0.5)",setup="from __main__ import median_finder,testlist",number=100)
time=time/100
stack=np.array((time,i))
data=np.vstack((data,stack))
data=data[1:]
plt.figure()
plt.ylim(0,0.01)
plt.scatter(x=data[:,1],y=data[:,0])
plt.xlabel("Inputsize")
plt.ylabel("Running time")
plt.title("Median finder running time")
#from the plot we can see that the running time is almost constant --> O(1)
#space complexity is O(n), because each time we just store the (sorted) list of length n
|
normal
|
{
"blob_id": "7c82565a4184b2e779e2bb6ba70b497cc287af35",
"index": 5285,
"step-1": "<mask token>\n\n\ndef right_permutation(arr):\n if len(arr) == 1:\n return True\n for i in range(len(arr) - 1):\n if arr[i + 1] < arr[i]:\n break\n elif i == len(arr) - 2:\n return True\n return False\n\n\ndef worstsort(arr):\n sort_arr = []\n check = False\n while check == False:\n sort_arr = np.random.permutation(arr)\n check = right_permutation(sort_arr)\n return sort_arr\n\n\n<mask token>\n\n\ndef factorial(n):\n result = 1\n for i in range(n):\n result = result * (i + 1)\n return result\n\n\n<mask token>\n\n\ndef median_finder(arr, x):\n tried = 0\n if abs(x) <= 0.5:\n lower = np.percentile(arr, 50 - x / 2)\n upper = np.percentile(arr, 50 + x / 2)\n while tried < 10000:\n find = np.random.randint(0, len(arr))\n if lower <= arr[find] and arr[find] <= upper:\n return arr[find]\n else:\n tried += 1\n return 'Tried enough times, still cannot find the value'\n else:\n return 'x not in the domain'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef right_permutation(arr):\n if len(arr) == 1:\n return True\n for i in range(len(arr) - 1):\n if arr[i + 1] < arr[i]:\n break\n elif i == len(arr) - 2:\n return True\n return False\n\n\ndef worstsort(arr):\n sort_arr = []\n check = False\n while check == False:\n sort_arr = np.random.permutation(arr)\n check = right_permutation(sort_arr)\n return sort_arr\n\n\n<mask token>\nprint(worstsort(test1))\nprint(worstsort(test2))\nprint(worstsort(test3))\nprint(worstsort(test4))\n\n\ndef factorial(n):\n result = 1\n for i in range(n):\n result = result * (i + 1)\n return result\n\n\n<mask token>\nplt.plot(x, y_factorial, label='Factorial of n')\nplt.plot(x, y_compare, label='n square')\nplt.title('Complexity comparison')\nplt.legend()\n\n\ndef median_finder(arr, x):\n tried = 0\n if abs(x) <= 0.5:\n lower = np.percentile(arr, 50 - x / 2)\n upper = np.percentile(arr, 50 + x / 2)\n while tried < 10000:\n find = np.random.randint(0, len(arr))\n if lower <= arr[find] and arr[find] <= upper:\n return arr[find]\n else:\n tried += 1\n return 'Tried enough times, still cannot find the value'\n else:\n return 'x not in the domain'\n\n\n<mask token>\nprint(median_finder(test1, 0.5))\nprint(median_finder(test2, 0.5))\nprint(median_finder(test2, 0))\nprint(median_finder(test3, 0.5))\nprint(median_finder(test4, 0))\nprint(median_finder(test4, 0.5))\n<mask token>\nfor i in range(200, 1200, 50):\n testlist = list(np.random.permutation(i))\n time = timeit.timeit(stmt='median_finder(testlist,0.5)', setup=\n 'from __main__ import median_finder,testlist', number=100)\n time = time / 100\n stack = np.array((time, i))\n data = np.vstack((data, stack))\n<mask token>\nplt.figure()\nplt.ylim(0, 0.01)\nplt.scatter(x=data[:, 1], y=data[:, 0])\nplt.xlabel('Inputsize')\nplt.ylabel('Running time')\nplt.title('Median finder running time')\n",
"step-3": "<mask token>\n\n\ndef right_permutation(arr):\n if len(arr) == 1:\n return True\n for i in range(len(arr) - 1):\n if arr[i + 1] < arr[i]:\n break\n elif i == len(arr) - 2:\n return True\n return False\n\n\ndef worstsort(arr):\n sort_arr = []\n check = False\n while check == False:\n sort_arr = np.random.permutation(arr)\n check = right_permutation(sort_arr)\n return sort_arr\n\n\ntest1 = [5, 4, 3, 2, 1]\ntest2 = [1, 2, 3, 4, 5]\ntest3 = [2, 2, 2, 2, 2]\ntest4 = [2]\nprint(worstsort(test1))\nprint(worstsort(test2))\nprint(worstsort(test3))\nprint(worstsort(test4))\n\n\ndef factorial(n):\n result = 1\n for i in range(n):\n result = result * (i + 1)\n return result\n\n\nx = np.arange(0, 7, 1)\ny_factorial = list(map(factorial, x))\ny_compare = x * x\nplt.plot(x, y_factorial, label='Factorial of n')\nplt.plot(x, y_compare, label='n square')\nplt.title('Complexity comparison')\nplt.legend()\n\n\ndef median_finder(arr, x):\n tried = 0\n if abs(x) <= 0.5:\n lower = np.percentile(arr, 50 - x / 2)\n upper = np.percentile(arr, 50 + x / 2)\n while tried < 10000:\n find = np.random.randint(0, len(arr))\n if lower <= arr[find] and arr[find] <= upper:\n return arr[find]\n else:\n tried += 1\n return 'Tried enough times, still cannot find the value'\n else:\n return 'x not in the domain'\n\n\ntest1 = list(np.random.permutation(200))\ntest2 = [4] * 100\ntest3 = [5] * 1000\ntest4 = test2 + test3\nprint(median_finder(test1, 0.5))\nprint(median_finder(test2, 0.5))\nprint(median_finder(test2, 0))\nprint(median_finder(test3, 0.5))\nprint(median_finder(test4, 0))\nprint(median_finder(test4, 0.5))\ndata = np.empty((1, 2))\nfor i in range(200, 1200, 50):\n testlist = list(np.random.permutation(i))\n time = timeit.timeit(stmt='median_finder(testlist,0.5)', setup=\n 'from __main__ import median_finder,testlist', number=100)\n time = time / 100\n stack = np.array((time, i))\n data = np.vstack((data, stack))\ndata = data[1:]\nplt.figure()\nplt.ylim(0, 0.01)\nplt.scatter(x=data[:, 1], y=data[:, 0])\nplt.xlabel('Inputsize')\nplt.ylabel('Running time')\nplt.title('Median finder running time')\n",
"step-4": "<mask token>\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport timeit\n\n\ndef right_permutation(arr):\n if len(arr) == 1:\n return True\n for i in range(len(arr) - 1):\n if arr[i + 1] < arr[i]:\n break\n elif i == len(arr) - 2:\n return True\n return False\n\n\ndef worstsort(arr):\n sort_arr = []\n check = False\n while check == False:\n sort_arr = np.random.permutation(arr)\n check = right_permutation(sort_arr)\n return sort_arr\n\n\ntest1 = [5, 4, 3, 2, 1]\ntest2 = [1, 2, 3, 4, 5]\ntest3 = [2, 2, 2, 2, 2]\ntest4 = [2]\nprint(worstsort(test1))\nprint(worstsort(test2))\nprint(worstsort(test3))\nprint(worstsort(test4))\n\n\ndef factorial(n):\n result = 1\n for i in range(n):\n result = result * (i + 1)\n return result\n\n\nx = np.arange(0, 7, 1)\ny_factorial = list(map(factorial, x))\ny_compare = x * x\nplt.plot(x, y_factorial, label='Factorial of n')\nplt.plot(x, y_compare, label='n square')\nplt.title('Complexity comparison')\nplt.legend()\n\n\ndef median_finder(arr, x):\n tried = 0\n if abs(x) <= 0.5:\n lower = np.percentile(arr, 50 - x / 2)\n upper = np.percentile(arr, 50 + x / 2)\n while tried < 10000:\n find = np.random.randint(0, len(arr))\n if lower <= arr[find] and arr[find] <= upper:\n return arr[find]\n else:\n tried += 1\n return 'Tried enough times, still cannot find the value'\n else:\n return 'x not in the domain'\n\n\ntest1 = list(np.random.permutation(200))\ntest2 = [4] * 100\ntest3 = [5] * 1000\ntest4 = test2 + test3\nprint(median_finder(test1, 0.5))\nprint(median_finder(test2, 0.5))\nprint(median_finder(test2, 0))\nprint(median_finder(test3, 0.5))\nprint(median_finder(test4, 0))\nprint(median_finder(test4, 0.5))\ndata = np.empty((1, 2))\nfor i in range(200, 1200, 50):\n testlist = list(np.random.permutation(i))\n time = timeit.timeit(stmt='median_finder(testlist,0.5)', setup=\n 'from __main__ import median_finder,testlist', number=100)\n time = time / 100\n stack = np.array((time, i))\n data = np.vstack((data, stack))\ndata = data[1:]\nplt.figure()\nplt.ylim(0, 0.01)\nplt.scatter(x=data[:, 1], y=data[:, 0])\nplt.xlabel('Inputsize')\nplt.ylabel('Running time')\nplt.title('Median finder running time')\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Feb 11 14:55:12 2019\n\n@author: Furankyyy\n\"\"\"\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport timeit\n\n###worst sort function###\n\n#define the function that checks whether the list is in ascending order\ndef right_permutation(arr):\n if len(arr)==1: #if there's only one element, then the order is always correct\n return True\n for i in range(len(arr)-1): #check every elements from the first to the second to the last\n if arr[i+1]<arr[i]: #if the i+1th element is smaller than ith, the order is wrong, break the loop and return false\n break\n else:\n if i == len(arr)-2: #if the i+1th element is greater than/equal to ith, check if we have already checked all the elements\n return True #if we've already checked all the elements (i.e. i==len(arr)-2), return true; if not, continue the loop\n return False\n \n \n#define the worst sort function\ndef worstsort(arr):\n sort_arr=[] #initialize output\n check = False #initialize the result of right_permutation()\n while check == False: #while the order is wrong, generate a new permyutation and check if its order is correct\n sort_arr = np.random.permutation(arr)\n check = right_permutation(sort_arr)\n return sort_arr\n\n#test cases\ntest1=[5,4,3,2,1]\ntest2=[1,2,3,4,5] #best case\ntest3=[2,2,2,2,2] #best case as well!\ntest4=[2] #only one element\nprint(worstsort(test1))\nprint(worstsort(test2))\nprint(worstsort(test3))\nprint(worstsort(test4))\n\n\n#the best case is when the input list is already sorted, in this case, we only need to run the right_permutation once\n#we have a for loop in right_permutation, so the best case complexity is O(n)\n\n\n#given a random input of size n, the chance that the input x_k is correctly sorted is Pr(x_k) = 1/P_n = 1/n! \n#since in this worst algorithm, we do not \"remember\" the permutations that we've already checked\n#so each time, the Pr(sorted) remains the same 1/n!\n#Then we would expect to have n! times to have the corrected sorted list \n#the reason is that we have E[Pr(x1)+Pr(x2)+...+Pr(x_k)]=1, since Pr(x_k)=1/n!, we would expect k = n!\n#this reasoning is the same as the random indicator variable in the book, where we have the pr(I) for each choice (permutation) and we sum them to find the expected value\n#so the averaage case complexity is O(n!)\n\n\n#to calculate what n is best for this function\n\ndef factorial(n):\n result=1\n for i in range(n):\n result=result*(i+1)\n return result\n\nx=np.arange(0,7,1)\ny_factorial=list(map(factorial,x))\ny_compare=x*x\n\nplt.plot(x,y_factorial,label=\"Factorial of n\")\nplt.plot(x,y_compare,label=\"n square\")\nplt.title(\"Complexity comparison\")\nplt.legend()\n\n#from the plot we can see that for algorithms with comlexity of O(n^2) and O(n!), the difference comes when n=5\n#when n=4, the two algorithms do not vary that much, but when n=5, they have a >100 times difference\n#therefore, this method is feasible when n<=4\n#p.s. constants are discounted (they are relatively unimportant)\n\n\n\n\n###median finder###\n\n#the worst case for the median finder is that the elements in the input list are unique\n#the best case is that all elements are the same --> no matter which we choose, it is the median\n\n#to consider the times we try before stopping, we need to consider the worst case --> all elements are different\n#then the chance to find the exact median is 1/n\n#the number of elements lying in the input deviation range x is x//(100/n)+1 for this worst case\n#explanation: divide the 100% to n parts, if all elements are different then each element takes the 1 part, the x//(range for 1 part)+1 is the num of elements lying in the range\n#therefore, the probability of choosing the element in the range given by x is (x//(100/n)+1)/n\n#I want to try the expected times of choosing the correct element(s) for the worst case\n\n#Pr(failure) for 1 try is 1-(x//(100/n)+1)/n\n#Pr(failure) for the first k try is (1-(x//(100/n)+1)/n)^k, which scales with x and n.\n\n#so the Pr(at least one success) for the first k try is 1-Pr(failure)=1-(1-(x//(100/n)+1)/n)^k\n#we want to find a k taht makes this Pr large enough\n#so we want to find a small k minimizing Pr(failure) for the first k try\n#to simplify the problem, we regard x as constant and assume the \"//\" is \"/\"\n#(1-(x//(100/n)+1)/n)^k = ((n-xn/100-1)/n)^k =(1-x/100-1/n)^k\n#x/100 is a constant\n#-->(1-1/n)^k\n#when n is sufficiently large, (1-1/n) is nearly 1\n#it is extremely hard to succeed if n is very large, I set the limit of k at 10000, simply because my laptop's computational ability\n\ndef median_finder(arr,x):\n tried = 0 #record the number of times of choosing the random element\n if abs(x) <= 0.5: #when x is valid\n lower=np.percentile(arr,50-x/2)\n upper=np.percentile(arr,50+x/2)\n while tried <10000:\n find = np.random.randint(0,len(arr)) #find a new element\n if lower<=arr[find] and arr[find]<=upper: #if the chosen element is in the range, return it\n return arr[find]\n else: \n tried += 1 \n return \"Tried enough times, still cannot find the value\"\n else:\n return \"x not in the domain\"\n\n#test cases\ntest1=list(np.random.permutation(200))\ntest2=[4]*100\ntest3=[5]*1000\ntest4=test2+test3\n\nprint(median_finder(test1,0.5)) #worst case, exactly 2 elements in the range\nprint(median_finder(test2,0.5)) #best case\nprint(median_finder(test2,0)) #best case\nprint(median_finder(test3,0.5)) #best case\nprint(median_finder(test4,0)) #1000/1100 probability \nprint(median_finder(test4,0.5)) #same as above.\n\n\n#time complexity\n\n#best case running time is O(1)\n\n#the time complexity of the worst case running time is E[k]=Sum(E[ki])\n#E[ki]=Pr(correct)=(x//(100/n)+1)/n\n#sum is from 1 to the limit tried k\n#since x is between 0 and 0.5, we simply regard it as constant\n#we also assume the \"//\" is \"/\"\n#then the expression becomes: E[k]= k*(xn/100+1)/n\n#as n goes to infinity, we can solve this by trying to use L'Hopital's rule\n#the result is kx/100, which is a constant\n#O(1)\n\n\ndata=np.empty((1,2))\n\nfor i in range(200,1200,50): \n testlist=list(np.random.permutation(i))\n time=timeit.timeit(stmt=\"median_finder(testlist,0.5)\",setup=\"from __main__ import median_finder,testlist\",number=100)\n time=time/100\n stack=np.array((time,i))\n\n data=np.vstack((data,stack))\n\ndata=data[1:]\n\nplt.figure()\nplt.ylim(0,0.01)\nplt.scatter(x=data[:,1],y=data[:,0])\nplt.xlabel(\"Inputsize\")\nplt.ylabel(\"Running time\")\nplt.title(\"Median finder running time\")\n\n#from the plot we can see that the running time is almost constant --> O(1)\n\n\n#space complexity is O(n), because each time we just store the (sorted) list of length n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__copyright__ = """
This code is licensed under the MIT license.
Copyright University Innsbruck, Institute for General, Inorganic, and Theoretical Chemistry, Podewitz Group
See LICENSE for details
"""
from scipy.signal import argrelextrema
from typing import List, Tuple
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import os
class ButtonActions(object):
def __init__(self):
self.axs = []
self.integrals = []
self.scs = []
self.annots = []
def plot_rdf(self, display):
matplotlib.rcParams.update({'font.size': 10})
self.fig = plt.figure(figsize=(display.width, display.height))
self.display = display
rows, cols = self._get_rows_and_cols(display)
count = 0 # only count existing -> not enumerate
for existing, (symbol, name) in zip(display.existing_elements, display.rdf_names.items()):
if existing:
count += 1
if os.path.exists('rdf-' + str(name) + '.dat'):
arr = np.loadtxt("rdf-" + str(name) + ".dat")
else:
print("ERROR: RDF analysis for " + str(name) + " was not performed in this directory!")
ax = self.fig.add_subplot(rows, cols, count)
txt = ax.text(0.1, 0.5, '', transform=ax.transAxes)
txt.set_text("ERROR: RDF analysis for " + str(name) + "\nwas not performed in this directory!")
plt.plot()
continue
x = arr[:, 0]
y = arr[:, 1]
ax = self.fig.add_subplot(rows, cols, count)
self.axs.append(ax)
# determine integrals
sc_x, sc_y, integrals = self._find_local_minima_and_maxima(x, y, name)
sc = plt.scatter(sc_x, sc_y, s=10, c=display.colors['mark'])
self.integrals.append(integrals)
self.scs.append(sc)
annot = ax.annotate("", xy=(0, 0), xytext=(20, 20), textcoords="offset points",
bbox=dict(boxstyle="round", fc="w"), arrowprops=dict(arrowstyle="->"))
annot.set_visible(False)
self.annots.append(annot)
# title and label specifications
plt.xlabel("Distance of " + str(name) + ' to oxygen atoms in water / \u00c5')
plt.ylabel('RDF')
plt.xticks(np.arange(0, np.max(x) + 0.5, step=0.5))
ax.set_xlim([0, np.max(x)])
ax.axhline(y=1, ls='--', color=display.colors['mark'])
plt.plot(x, y, linestyle="-", color='#80b1d3')
plt.ion() # avoids 'The event loop is already running' error message
self.fig.canvas.mpl_connect('motion_notify_event', lambda event: self._hover(event))
plt.show()
def _get_rows_and_cols(self, display) -> Tuple[int, int]:
true_count = sum(display.existing_elements)
if true_count % 2 == 0:
rows = int(round(true_count / 2))
cols = int(round(true_count / 2))
if true_count == 2:
rows = 2
else:
rows = int(round(true_count / 2 + 0.5))
cols = int(round(true_count / 2 + 0.5))
if true_count == 5:
cols = 2
return rows, cols
def _find_local_minima_and_maxima(self, distances: np.array, values: np.array, name: str) -> Tuple[List[float],
List[float],
List[float]]:
n_local = 5
maxima = argrelextrema(values, np.greater, order=n_local)[0]
minima = argrelextrema(values, np.less, order=n_local)[0]
extrema = np.asarray(list(maxima) + list(minima))
ext_distances = [distances[x] for x in extrema]
ext_values = [values[x] for x in extrema]
integrals = self._get_integrals(extrema, name)
return ext_distances, ext_values, integrals
def _get_integrals(self, indices: np.array, name: str) -> List[float]:
arr = np.loadtxt("int-rdf-" + str(name) + ".dat")
return [arr[:, 1][i] for i in indices]
def _update_annot(self, ind, subplot_number: int):
index = ind['ind'][0]
integral = self.integrals[subplot_number][index]
text = "{0:.2f} waters".format(integral)
annot = self.annots[subplot_number]
annot.xy = self.scs[subplot_number].get_offsets()[index]
annot.set_text(text)
annot.get_bbox_patch().set_facecolor(self.display.colors['mark'])
annot.get_bbox_patch().set_alpha(0.4)
def _hover(self, event):
for i, a in enumerate(self.axs):
if event.inaxes == a:
contains, ind = self.scs[i].contains(event)
annot = self.annots[i]
visible = annot.get_visible()
if contains:
self._update_annot(ind, i)
annot.set_visible(True)
self.fig.canvas.draw_idle()
else:
if visible:
annot.set_visible(False)
self.fig.canvas.draw_idle()
|
normal
|
{
"blob_id": "8c42e06fd92f0110b3ba8c4e7cc0ac45b9e44378",
"index": 3150,
"step-1": "<mask token>\n\n\nclass ButtonActions(object):\n <mask token>\n\n def plot_rdf(self, display):\n matplotlib.rcParams.update({'font.size': 10})\n self.fig = plt.figure(figsize=(display.width, display.height))\n self.display = display\n rows, cols = self._get_rows_and_cols(display)\n count = 0\n for existing, (symbol, name) in zip(display.existing_elements,\n display.rdf_names.items()):\n if existing:\n count += 1\n if os.path.exists('rdf-' + str(name) + '.dat'):\n arr = np.loadtxt('rdf-' + str(name) + '.dat')\n else:\n print('ERROR: RDF analysis for ' + str(name) +\n ' was not performed in this directory!')\n ax = self.fig.add_subplot(rows, cols, count)\n txt = ax.text(0.1, 0.5, '', transform=ax.transAxes)\n txt.set_text('ERROR: RDF analysis for ' + str(name) +\n \"\"\"\nwas not performed in this directory!\"\"\")\n plt.plot()\n continue\n x = arr[:, 0]\n y = arr[:, 1]\n ax = self.fig.add_subplot(rows, cols, count)\n self.axs.append(ax)\n sc_x, sc_y, integrals = self._find_local_minima_and_maxima(x,\n y, name)\n sc = plt.scatter(sc_x, sc_y, s=10, c=display.colors['mark'])\n self.integrals.append(integrals)\n self.scs.append(sc)\n annot = ax.annotate('', xy=(0, 0), xytext=(20, 20),\n textcoords='offset points', bbox=dict(boxstyle='round',\n fc='w'), arrowprops=dict(arrowstyle='->'))\n annot.set_visible(False)\n self.annots.append(annot)\n plt.xlabel('Distance of ' + str(name) +\n ' to oxygen atoms in water / Å')\n plt.ylabel('RDF')\n plt.xticks(np.arange(0, np.max(x) + 0.5, step=0.5))\n ax.set_xlim([0, np.max(x)])\n ax.axhline(y=1, ls='--', color=display.colors['mark'])\n plt.plot(x, y, linestyle='-', color='#80b1d3')\n plt.ion()\n self.fig.canvas.mpl_connect('motion_notify_event', lambda event:\n self._hover(event))\n plt.show()\n <mask token>\n <mask token>\n <mask token>\n\n def _update_annot(self, ind, subplot_number: int):\n index = ind['ind'][0]\n integral = self.integrals[subplot_number][index]\n text = '{0:.2f} waters'.format(integral)\n annot = self.annots[subplot_number]\n annot.xy = self.scs[subplot_number].get_offsets()[index]\n annot.set_text(text)\n annot.get_bbox_patch().set_facecolor(self.display.colors['mark'])\n annot.get_bbox_patch().set_alpha(0.4)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ButtonActions(object):\n\n def __init__(self):\n self.axs = []\n self.integrals = []\n self.scs = []\n self.annots = []\n\n def plot_rdf(self, display):\n matplotlib.rcParams.update({'font.size': 10})\n self.fig = plt.figure(figsize=(display.width, display.height))\n self.display = display\n rows, cols = self._get_rows_and_cols(display)\n count = 0\n for existing, (symbol, name) in zip(display.existing_elements,\n display.rdf_names.items()):\n if existing:\n count += 1\n if os.path.exists('rdf-' + str(name) + '.dat'):\n arr = np.loadtxt('rdf-' + str(name) + '.dat')\n else:\n print('ERROR: RDF analysis for ' + str(name) +\n ' was not performed in this directory!')\n ax = self.fig.add_subplot(rows, cols, count)\n txt = ax.text(0.1, 0.5, '', transform=ax.transAxes)\n txt.set_text('ERROR: RDF analysis for ' + str(name) +\n \"\"\"\nwas not performed in this directory!\"\"\")\n plt.plot()\n continue\n x = arr[:, 0]\n y = arr[:, 1]\n ax = self.fig.add_subplot(rows, cols, count)\n self.axs.append(ax)\n sc_x, sc_y, integrals = self._find_local_minima_and_maxima(x,\n y, name)\n sc = plt.scatter(sc_x, sc_y, s=10, c=display.colors['mark'])\n self.integrals.append(integrals)\n self.scs.append(sc)\n annot = ax.annotate('', xy=(0, 0), xytext=(20, 20),\n textcoords='offset points', bbox=dict(boxstyle='round',\n fc='w'), arrowprops=dict(arrowstyle='->'))\n annot.set_visible(False)\n self.annots.append(annot)\n plt.xlabel('Distance of ' + str(name) +\n ' to oxygen atoms in water / Å')\n plt.ylabel('RDF')\n plt.xticks(np.arange(0, np.max(x) + 0.5, step=0.5))\n ax.set_xlim([0, np.max(x)])\n ax.axhline(y=1, ls='--', color=display.colors['mark'])\n plt.plot(x, y, linestyle='-', color='#80b1d3')\n plt.ion()\n self.fig.canvas.mpl_connect('motion_notify_event', lambda event:\n self._hover(event))\n plt.show()\n\n def _get_rows_and_cols(self, display) ->Tuple[int, int]:\n true_count = sum(display.existing_elements)\n if true_count % 2 == 0:\n rows = int(round(true_count / 2))\n cols = int(round(true_count / 2))\n if true_count == 2:\n rows = 2\n else:\n rows = int(round(true_count / 2 + 0.5))\n cols = int(round(true_count / 2 + 0.5))\n if true_count == 5:\n cols = 2\n return rows, cols\n\n def _find_local_minima_and_maxima(self, distances: np.array, values: np\n .array, name: str) ->Tuple[List[float], List[float], List[float]]:\n n_local = 5\n maxima = argrelextrema(values, np.greater, order=n_local)[0]\n minima = argrelextrema(values, np.less, order=n_local)[0]\n extrema = np.asarray(list(maxima) + list(minima))\n ext_distances = [distances[x] for x in extrema]\n ext_values = [values[x] for x in extrema]\n integrals = self._get_integrals(extrema, name)\n return ext_distances, ext_values, integrals\n\n def _get_integrals(self, indices: np.array, name: str) ->List[float]:\n arr = np.loadtxt('int-rdf-' + str(name) + '.dat')\n return [arr[:, 1][i] for i in indices]\n\n def _update_annot(self, ind, subplot_number: int):\n index = ind['ind'][0]\n integral = self.integrals[subplot_number][index]\n text = '{0:.2f} waters'.format(integral)\n annot = self.annots[subplot_number]\n annot.xy = self.scs[subplot_number].get_offsets()[index]\n annot.set_text(text)\n annot.get_bbox_patch().set_facecolor(self.display.colors['mark'])\n annot.get_bbox_patch().set_alpha(0.4)\n\n def _hover(self, event):\n for i, a in enumerate(self.axs):\n if event.inaxes == a:\n contains, ind = self.scs[i].contains(event)\n annot = self.annots[i]\n visible = annot.get_visible()\n if contains:\n self._update_annot(ind, i)\n annot.set_visible(True)\n self.fig.canvas.draw_idle()\n elif visible:\n annot.set_visible(False)\n self.fig.canvas.draw_idle()\n",
"step-3": "__copyright__ = \"\"\"\nThis code is licensed under the MIT license.\nCopyright University Innsbruck, Institute for General, Inorganic, and Theoretical Chemistry, Podewitz Group\nSee LICENSE for details\n\"\"\"\n<mask token>\n\n\nclass ButtonActions(object):\n\n def __init__(self):\n self.axs = []\n self.integrals = []\n self.scs = []\n self.annots = []\n\n def plot_rdf(self, display):\n matplotlib.rcParams.update({'font.size': 10})\n self.fig = plt.figure(figsize=(display.width, display.height))\n self.display = display\n rows, cols = self._get_rows_and_cols(display)\n count = 0\n for existing, (symbol, name) in zip(display.existing_elements,\n display.rdf_names.items()):\n if existing:\n count += 1\n if os.path.exists('rdf-' + str(name) + '.dat'):\n arr = np.loadtxt('rdf-' + str(name) + '.dat')\n else:\n print('ERROR: RDF analysis for ' + str(name) +\n ' was not performed in this directory!')\n ax = self.fig.add_subplot(rows, cols, count)\n txt = ax.text(0.1, 0.5, '', transform=ax.transAxes)\n txt.set_text('ERROR: RDF analysis for ' + str(name) +\n \"\"\"\nwas not performed in this directory!\"\"\")\n plt.plot()\n continue\n x = arr[:, 0]\n y = arr[:, 1]\n ax = self.fig.add_subplot(rows, cols, count)\n self.axs.append(ax)\n sc_x, sc_y, integrals = self._find_local_minima_and_maxima(x,\n y, name)\n sc = plt.scatter(sc_x, sc_y, s=10, c=display.colors['mark'])\n self.integrals.append(integrals)\n self.scs.append(sc)\n annot = ax.annotate('', xy=(0, 0), xytext=(20, 20),\n textcoords='offset points', bbox=dict(boxstyle='round',\n fc='w'), arrowprops=dict(arrowstyle='->'))\n annot.set_visible(False)\n self.annots.append(annot)\n plt.xlabel('Distance of ' + str(name) +\n ' to oxygen atoms in water / Å')\n plt.ylabel('RDF')\n plt.xticks(np.arange(0, np.max(x) + 0.5, step=0.5))\n ax.set_xlim([0, np.max(x)])\n ax.axhline(y=1, ls='--', color=display.colors['mark'])\n plt.plot(x, y, linestyle='-', color='#80b1d3')\n plt.ion()\n self.fig.canvas.mpl_connect('motion_notify_event', lambda event:\n self._hover(event))\n plt.show()\n\n def _get_rows_and_cols(self, display) ->Tuple[int, int]:\n true_count = sum(display.existing_elements)\n if true_count % 2 == 0:\n rows = int(round(true_count / 2))\n cols = int(round(true_count / 2))\n if true_count == 2:\n rows = 2\n else:\n rows = int(round(true_count / 2 + 0.5))\n cols = int(round(true_count / 2 + 0.5))\n if true_count == 5:\n cols = 2\n return rows, cols\n\n def _find_local_minima_and_maxima(self, distances: np.array, values: np\n .array, name: str) ->Tuple[List[float], List[float], List[float]]:\n n_local = 5\n maxima = argrelextrema(values, np.greater, order=n_local)[0]\n minima = argrelextrema(values, np.less, order=n_local)[0]\n extrema = np.asarray(list(maxima) + list(minima))\n ext_distances = [distances[x] for x in extrema]\n ext_values = [values[x] for x in extrema]\n integrals = self._get_integrals(extrema, name)\n return ext_distances, ext_values, integrals\n\n def _get_integrals(self, indices: np.array, name: str) ->List[float]:\n arr = np.loadtxt('int-rdf-' + str(name) + '.dat')\n return [arr[:, 1][i] for i in indices]\n\n def _update_annot(self, ind, subplot_number: int):\n index = ind['ind'][0]\n integral = self.integrals[subplot_number][index]\n text = '{0:.2f} waters'.format(integral)\n annot = self.annots[subplot_number]\n annot.xy = self.scs[subplot_number].get_offsets()[index]\n annot.set_text(text)\n annot.get_bbox_patch().set_facecolor(self.display.colors['mark'])\n annot.get_bbox_patch().set_alpha(0.4)\n\n def _hover(self, event):\n for i, a in enumerate(self.axs):\n if event.inaxes == a:\n contains, ind = self.scs[i].contains(event)\n annot = self.annots[i]\n visible = annot.get_visible()\n if contains:\n self._update_annot(ind, i)\n annot.set_visible(True)\n self.fig.canvas.draw_idle()\n elif visible:\n annot.set_visible(False)\n self.fig.canvas.draw_idle()\n",
"step-4": "__copyright__ = \"\"\"\nThis code is licensed under the MIT license.\nCopyright University Innsbruck, Institute for General, Inorganic, and Theoretical Chemistry, Podewitz Group\nSee LICENSE for details\n\"\"\"\nfrom scipy.signal import argrelextrema\nfrom typing import List, Tuple\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\n\n\nclass ButtonActions(object):\n\n def __init__(self):\n self.axs = []\n self.integrals = []\n self.scs = []\n self.annots = []\n\n def plot_rdf(self, display):\n matplotlib.rcParams.update({'font.size': 10})\n self.fig = plt.figure(figsize=(display.width, display.height))\n self.display = display\n rows, cols = self._get_rows_and_cols(display)\n count = 0\n for existing, (symbol, name) in zip(display.existing_elements,\n display.rdf_names.items()):\n if existing:\n count += 1\n if os.path.exists('rdf-' + str(name) + '.dat'):\n arr = np.loadtxt('rdf-' + str(name) + '.dat')\n else:\n print('ERROR: RDF analysis for ' + str(name) +\n ' was not performed in this directory!')\n ax = self.fig.add_subplot(rows, cols, count)\n txt = ax.text(0.1, 0.5, '', transform=ax.transAxes)\n txt.set_text('ERROR: RDF analysis for ' + str(name) +\n \"\"\"\nwas not performed in this directory!\"\"\")\n plt.plot()\n continue\n x = arr[:, 0]\n y = arr[:, 1]\n ax = self.fig.add_subplot(rows, cols, count)\n self.axs.append(ax)\n sc_x, sc_y, integrals = self._find_local_minima_and_maxima(x,\n y, name)\n sc = plt.scatter(sc_x, sc_y, s=10, c=display.colors['mark'])\n self.integrals.append(integrals)\n self.scs.append(sc)\n annot = ax.annotate('', xy=(0, 0), xytext=(20, 20),\n textcoords='offset points', bbox=dict(boxstyle='round',\n fc='w'), arrowprops=dict(arrowstyle='->'))\n annot.set_visible(False)\n self.annots.append(annot)\n plt.xlabel('Distance of ' + str(name) +\n ' to oxygen atoms in water / Å')\n plt.ylabel('RDF')\n plt.xticks(np.arange(0, np.max(x) + 0.5, step=0.5))\n ax.set_xlim([0, np.max(x)])\n ax.axhline(y=1, ls='--', color=display.colors['mark'])\n plt.plot(x, y, linestyle='-', color='#80b1d3')\n plt.ion()\n self.fig.canvas.mpl_connect('motion_notify_event', lambda event:\n self._hover(event))\n plt.show()\n\n def _get_rows_and_cols(self, display) ->Tuple[int, int]:\n true_count = sum(display.existing_elements)\n if true_count % 2 == 0:\n rows = int(round(true_count / 2))\n cols = int(round(true_count / 2))\n if true_count == 2:\n rows = 2\n else:\n rows = int(round(true_count / 2 + 0.5))\n cols = int(round(true_count / 2 + 0.5))\n if true_count == 5:\n cols = 2\n return rows, cols\n\n def _find_local_minima_and_maxima(self, distances: np.array, values: np\n .array, name: str) ->Tuple[List[float], List[float], List[float]]:\n n_local = 5\n maxima = argrelextrema(values, np.greater, order=n_local)[0]\n minima = argrelextrema(values, np.less, order=n_local)[0]\n extrema = np.asarray(list(maxima) + list(minima))\n ext_distances = [distances[x] for x in extrema]\n ext_values = [values[x] for x in extrema]\n integrals = self._get_integrals(extrema, name)\n return ext_distances, ext_values, integrals\n\n def _get_integrals(self, indices: np.array, name: str) ->List[float]:\n arr = np.loadtxt('int-rdf-' + str(name) + '.dat')\n return [arr[:, 1][i] for i in indices]\n\n def _update_annot(self, ind, subplot_number: int):\n index = ind['ind'][0]\n integral = self.integrals[subplot_number][index]\n text = '{0:.2f} waters'.format(integral)\n annot = self.annots[subplot_number]\n annot.xy = self.scs[subplot_number].get_offsets()[index]\n annot.set_text(text)\n annot.get_bbox_patch().set_facecolor(self.display.colors['mark'])\n annot.get_bbox_patch().set_alpha(0.4)\n\n def _hover(self, event):\n for i, a in enumerate(self.axs):\n if event.inaxes == a:\n contains, ind = self.scs[i].contains(event)\n annot = self.annots[i]\n visible = annot.get_visible()\n if contains:\n self._update_annot(ind, i)\n annot.set_visible(True)\n self.fig.canvas.draw_idle()\n elif visible:\n annot.set_visible(False)\n self.fig.canvas.draw_idle()\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n__copyright__ = \"\"\"\nThis code is licensed under the MIT license.\nCopyright University Innsbruck, Institute for General, Inorganic, and Theoretical Chemistry, Podewitz Group\nSee LICENSE for details\n\"\"\"\n\nfrom scipy.signal import argrelextrema\nfrom typing import List, Tuple\nimport matplotlib\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\n\n\nclass ButtonActions(object):\n def __init__(self):\n self.axs = []\n self.integrals = []\n self.scs = []\n self.annots = []\n\n def plot_rdf(self, display):\n matplotlib.rcParams.update({'font.size': 10})\n self.fig = plt.figure(figsize=(display.width, display.height))\n self.display = display\n\n rows, cols = self._get_rows_and_cols(display)\n\n count = 0 # only count existing -> not enumerate\n for existing, (symbol, name) in zip(display.existing_elements, display.rdf_names.items()):\n if existing:\n count += 1\n if os.path.exists('rdf-' + str(name) + '.dat'):\n arr = np.loadtxt(\"rdf-\" + str(name) + \".dat\")\n else:\n print(\"ERROR: RDF analysis for \" + str(name) + \" was not performed in this directory!\")\n ax = self.fig.add_subplot(rows, cols, count)\n txt = ax.text(0.1, 0.5, '', transform=ax.transAxes)\n txt.set_text(\"ERROR: RDF analysis for \" + str(name) + \"\\nwas not performed in this directory!\")\n plt.plot()\n continue\n\n x = arr[:, 0]\n y = arr[:, 1]\n ax = self.fig.add_subplot(rows, cols, count)\n self.axs.append(ax)\n\n # determine integrals\n sc_x, sc_y, integrals = self._find_local_minima_and_maxima(x, y, name)\n sc = plt.scatter(sc_x, sc_y, s=10, c=display.colors['mark'])\n self.integrals.append(integrals)\n self.scs.append(sc)\n annot = ax.annotate(\"\", xy=(0, 0), xytext=(20, 20), textcoords=\"offset points\",\n bbox=dict(boxstyle=\"round\", fc=\"w\"), arrowprops=dict(arrowstyle=\"->\"))\n annot.set_visible(False)\n self.annots.append(annot)\n\n # title and label specifications\n plt.xlabel(\"Distance of \" + str(name) + ' to oxygen atoms in water / \\u00c5')\n plt.ylabel('RDF')\n plt.xticks(np.arange(0, np.max(x) + 0.5, step=0.5))\n ax.set_xlim([0, np.max(x)])\n ax.axhline(y=1, ls='--', color=display.colors['mark'])\n plt.plot(x, y, linestyle=\"-\", color='#80b1d3')\n\n plt.ion() # avoids 'The event loop is already running' error message\n self.fig.canvas.mpl_connect('motion_notify_event', lambda event: self._hover(event))\n plt.show()\n\n def _get_rows_and_cols(self, display) -> Tuple[int, int]:\n true_count = sum(display.existing_elements)\n if true_count % 2 == 0:\n rows = int(round(true_count / 2))\n cols = int(round(true_count / 2))\n if true_count == 2:\n rows = 2\n else:\n rows = int(round(true_count / 2 + 0.5))\n cols = int(round(true_count / 2 + 0.5))\n if true_count == 5:\n cols = 2\n return rows, cols\n\n def _find_local_minima_and_maxima(self, distances: np.array, values: np.array, name: str) -> Tuple[List[float],\n List[float],\n List[float]]:\n n_local = 5\n maxima = argrelextrema(values, np.greater, order=n_local)[0]\n minima = argrelextrema(values, np.less, order=n_local)[0]\n extrema = np.asarray(list(maxima) + list(minima))\n ext_distances = [distances[x] for x in extrema]\n ext_values = [values[x] for x in extrema]\n integrals = self._get_integrals(extrema, name)\n return ext_distances, ext_values, integrals\n\n def _get_integrals(self, indices: np.array, name: str) -> List[float]:\n arr = np.loadtxt(\"int-rdf-\" + str(name) + \".dat\")\n return [arr[:, 1][i] for i in indices]\n\n def _update_annot(self, ind, subplot_number: int):\n index = ind['ind'][0]\n integral = self.integrals[subplot_number][index]\n text = \"{0:.2f} waters\".format(integral)\n annot = self.annots[subplot_number]\n annot.xy = self.scs[subplot_number].get_offsets()[index]\n annot.set_text(text)\n annot.get_bbox_patch().set_facecolor(self.display.colors['mark'])\n annot.get_bbox_patch().set_alpha(0.4)\n\n def _hover(self, event):\n for i, a in enumerate(self.axs):\n if event.inaxes == a:\n contains, ind = self.scs[i].contains(event)\n annot = self.annots[i]\n visible = annot.get_visible()\n if contains:\n self._update_annot(ind, i)\n annot.set_visible(True)\n self.fig.canvas.draw_idle()\n else:\n if visible:\n annot.set_visible(False)\n self.fig.canvas.draw_idle()\n",
"step-ids": [
3,
8,
9,
10,
11
]
}
|
[
3,
8,
9,
10,
11
] |
skipped = 0
class Node(object):
"""docstring for Node"""
def __init__(self, value, indentifier):
super(Node, self).__init__()
self.value = value
self.identifier = indentifier
self.next = None
class Graph(object):
"""docstring for Graph"""
def __init__(self, values, edges):
super(Graph, self).__init__()
self.node_values = values
self.vertices = len(values)
self.edges = edges
self.graph = [None] * self.vertices
# self.edges.sort()
self.grand_sum = sum(self.node_values)
def build_adjacency_list(self):
for edge in self.edges:
fro = edge[0] - 1
to = edge[1]- 1
# Adding the node to the source node
node = Node(self.node_values[to], to)
node.next = self.graph[fro]
self.graph[fro] = node
# Adding the source node to the destination as
# it is the undirected graph
node = Node(self.node_values[fro], fro)
node.next = self.graph[to]
self.graph[to] = node
def print_graph(self):
for i in range(self.vertices):
node = self.graph[i]
print("Vertex:", i)
while(node!=None):
print(node.value, node.identifier)
node = node.next
print("<<"*20)
def get_tree_nodes(self, start_node, nodes, edge, total):
if(start_node==None):
return nodes
while(start_node!=None):
if(start_node.identifier==edge[0] or start_node.identifier==edge[2] or (start_node.identifier in nodes)):
print("skipping ", start_node.identifier)
else:
print("adding ", start_node.identifier)
nodes.append(start_node.identifier)
total[0] += start_node.value
next_n = self.graph[start_node.identifier]
self.get_tree_nodes(next_n, nodes, edge, total)
start_node = start_node.next
return nodes
def split_and_compute_tree_sum(self, t1_nodes = [], t2_nodes = [], edge=[], ton = False):
t1_total = 0
t2_total = 0
total = [0]
start_node = self.graph[edge[1]]
if(start_node.next != None):
t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)
if(len(t2_nodes)==0 and edge[1]!=edge[2]):
t2_nodes.append(edge[1])
total[0] += self.node_values[edge[1]]
t2_total = total[0]
if(not ton and t2_total < self.grand_sum/2):
for i in range(self.vertices):
if(i not in t2_nodes):
t1_nodes.append(i)
t1_total = self.grand_sum - t2_total
print("t2_nodes", t2_nodes)
print("t2_total", t2_total)
return t1_total, t2_total
def check(self, tree1_total, tree2_total, tree3_total):
print("###"*10)
print("FINAL tree1_total: ", tree1_total)
print("FINAL tree2_total: ", tree2_total)
print("FINAL tree3_total: ", tree3_total)
print("###"*10)
if (tree1_total == tree2_total) or (tree1_total == tree3_total) or (tree2_total == tree3_total):
mx = max(tree1_total, tree2_total, tree3_total)
if([tree1_total, tree2_total, tree3_total].count(mx) >= 2):
ret = mx - min(tree1_total, tree2_total, tree3_total)
return ret, True
return -1, False
def split_tree_into_two(self):
ret = -1
found = False
global skipped
for entry in range(self.vertices):
tree1_nodes = []
tree2_nodes = []
tree3_nodes = []
temp_nodes = []
n = self.graph[entry]
while(n!=None):
edge = [entry, n.identifier, -1]
if(n.identifier <= entry):
n = n.next
skipped += 1
continue
print("##MAIN##. SPLIT POINT EDGE: ", edge)
tree1_nodes = []
tree2_nodes = []
tree1_total, tree2_total = self.split_and_compute_tree_sum(tree1_nodes, tree2_nodes, edge)
print("ORIGINALS: ", tree1_total, tree2_total)
if(min(tree1_total, tree2_total) < self.grand_sum/3 or (max(tree1_total, tree2_total) > (2*self.grand_sum)/3)):
n = n.next
continue
if(tree1_total > tree2_total):
ret, found = self.find_third_tree(tree1_total, tree2_total,tree1_nodes, 1, edge[1])
elif(tree2_total > tree1_total):
ret, found = self.find_third_tree(tree1_total, tree2_total,tree2_nodes, 2, edge[0])
elif (tree1_total == tree2_total):
ret = tree1_total
found = True
else:
found = True
if(found):
break
n = n.next
if(found):
break
return ret
def find_third_tree(self, tree1_total, tree2_total, nodes, t = 1, m=0):
ret , found = -1, False
global skipped
consumed = []
for i in range(len(nodes)):
skip_n = nodes[i]
consumed.append(skip_n)
n = self.graph[skip_n]
while(n!=None):
if(n.identifier in consumed):
n = n.next
skipped += 1
continue
edge = [skip_n, n.identifier, m]
print("2. SPLIT POINT EDGE: ", edge)
print("tree1_total",tree1_total)
tree3_nodes = []
temp_nodes = []
_,tree3_total = self.split_and_compute_tree_sum(temp_nodes, tree3_nodes, edge, True)
if(t==1):
ret , found = self.check(tree1_total - tree3_total, tree2_total, tree3_total)
elif(t==2):
ret , found = self.check(tree1_total, tree2_total - tree3_total, tree3_total)
if(found):
break
n = n.next
if(found):
break
return ret, found
def balancedForest(values, edges):
mygraph = Graph(values, edges)
mygraph.build_adjacency_list()
mygraph.print_graph()
return mygraph.split_tree_into_two()
import unittest
class BalancedForestTest(unittest.TestCase):
def test1(self):
expected = 10
c = [1, 1, 1, 18, 10, 11, 5, 6]
edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]
self.assertEqual(balancedForest(c, edges), expected)
def test2(self):
expected = 13
c = [12, 7, 11, 17, 20, 10]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test3(self):
expected = 19
c = [15, 12, 8, 14, 13]
edges = [[4,5],[1,2],[1,3],[1,4]]
self.assertEqual(balancedForest(c, edges), expected)
def test4(self):
expected = 2
c = [1,2,2,1,1]
edges = [[1,2],[1,3],[3,5],[1,4]]
self.assertEqual(balancedForest(c, edges), expected)
def test5(self):
expected = -1
c = [1,3,5]
edges = [[1,3],[1,2]]
self.assertEqual(balancedForest(c, edges), expected)
def test6(self):
expected = -1
c = [7, 7, 4, 1, 1, 1]
edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]
self.assertEqual(balancedForest(c, edges), expected)
def test7(self):
expected = 0
c = [1, 3, 4, 4]
edges = [(1, 2), (1, 3), (1, 4)]
self.assertEqual(balancedForest(c, edges), expected)
def test8(self):
expected = 297
c = [100, 99, 98, 100, 99, 98]
edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]
self.assertEqual(balancedForest(c, edges), expected)
def test9(self):
expected = 4
c = [12, 10, 8, 12, 14, 12]
edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]
self.assertEqual(balancedForest(c, edges), expected)
print("SKIPPED", skipped)
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "e361215c44305f1ecc1cbe9e19345ee08bdd30f5",
"index": 2393,
"step-1": "<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n <mask token>\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n <mask token>\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Node(object):\n <mask token>\n\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1] - 1\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print('Vertex:', i)\n while node != None:\n print(node.value, node.identifier)\n node = node.next\n print('<<' * 20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n if start_node == None:\n return nodes\n while start_node != None:\n if start_node.identifier == edge[0\n ] or start_node.identifier == edge[2\n ] or start_node.identifier in nodes:\n print('skipping ', start_node.identifier)\n else:\n print('adding ', start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],\n ton=False):\n t1_total = 0\n t2_total = 0\n total = [0]\n start_node = self.graph[edge[1]]\n if start_node.next != None:\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n if len(t2_nodes) == 0 and edge[1] != edge[2]:\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n t2_total = total[0]\n if not ton and t2_total < self.grand_sum / 2:\n for i in range(self.vertices):\n if i not in t2_nodes:\n t1_nodes.append(i)\n t1_total = self.grand_sum - t2_total\n print('t2_nodes', t2_nodes)\n print('t2_total', t2_total)\n return t1_total, t2_total\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print('###' * 10)\n print('FINAL tree1_total: ', tree1_total)\n print('FINAL tree2_total: ', tree2_total)\n print('FINAL tree3_total: ', tree3_total)\n print('###' * 10)\n if (tree1_total == tree2_total or tree1_total == tree3_total or \n tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n n = self.graph[entry]\n while n != None:\n edge = [entry, n.identifier, -1]\n if n.identifier <= entry:\n n = n.next\n skipped += 1\n continue\n print('##MAIN##. SPLIT POINT EDGE: ', edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(\n tree1_nodes, tree2_nodes, edge)\n print('ORIGINALS: ', tree1_total, tree2_total)\n if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(\n tree1_total, tree2_total) > 2 * self.grand_sum / 3:\n n = n.next\n continue\n if tree1_total > tree2_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree1_nodes, 1, edge[1])\n elif tree2_total > tree1_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree2_nodes, 2, edge[0])\n elif tree1_total == tree2_total:\n ret = tree1_total\n found = True\n else:\n found = True\n if found:\n break\n n = n.next\n if found:\n break\n return ret\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):\n ret, found = -1, False\n global skipped\n consumed = []\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while n != None:\n if n.identifier in consumed:\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print('2. SPLIT POINT EDGE: ', edge)\n print('tree1_total', tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _, tree3_total = self.split_and_compute_tree_sum(temp_nodes,\n tree3_nodes, edge, True)\n if t == 1:\n ret, found = self.check(tree1_total - tree3_total,\n tree2_total, tree3_total)\n elif t == 2:\n ret, found = self.check(tree1_total, tree2_total -\n tree3_total, tree3_total)\n if found:\n break\n n = n.next\n if found:\n break\n return ret, found\n\n\n<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1, 2, 2, 1, 1]\n edges = [[1, 2], [1, 3], [3, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n print('SKIPPED', skipped)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Node(object):\n \"\"\"docstring for Node\"\"\"\n\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1] - 1\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print('Vertex:', i)\n while node != None:\n print(node.value, node.identifier)\n node = node.next\n print('<<' * 20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n if start_node == None:\n return nodes\n while start_node != None:\n if start_node.identifier == edge[0\n ] or start_node.identifier == edge[2\n ] or start_node.identifier in nodes:\n print('skipping ', start_node.identifier)\n else:\n print('adding ', start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],\n ton=False):\n t1_total = 0\n t2_total = 0\n total = [0]\n start_node = self.graph[edge[1]]\n if start_node.next != None:\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n if len(t2_nodes) == 0 and edge[1] != edge[2]:\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n t2_total = total[0]\n if not ton and t2_total < self.grand_sum / 2:\n for i in range(self.vertices):\n if i not in t2_nodes:\n t1_nodes.append(i)\n t1_total = self.grand_sum - t2_total\n print('t2_nodes', t2_nodes)\n print('t2_total', t2_total)\n return t1_total, t2_total\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print('###' * 10)\n print('FINAL tree1_total: ', tree1_total)\n print('FINAL tree2_total: ', tree2_total)\n print('FINAL tree3_total: ', tree3_total)\n print('###' * 10)\n if (tree1_total == tree2_total or tree1_total == tree3_total or \n tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n n = self.graph[entry]\n while n != None:\n edge = [entry, n.identifier, -1]\n if n.identifier <= entry:\n n = n.next\n skipped += 1\n continue\n print('##MAIN##. SPLIT POINT EDGE: ', edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(\n tree1_nodes, tree2_nodes, edge)\n print('ORIGINALS: ', tree1_total, tree2_total)\n if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(\n tree1_total, tree2_total) > 2 * self.grand_sum / 3:\n n = n.next\n continue\n if tree1_total > tree2_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree1_nodes, 1, edge[1])\n elif tree2_total > tree1_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree2_nodes, 2, edge[0])\n elif tree1_total == tree2_total:\n ret = tree1_total\n found = True\n else:\n found = True\n if found:\n break\n n = n.next\n if found:\n break\n return ret\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):\n ret, found = -1, False\n global skipped\n consumed = []\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while n != None:\n if n.identifier in consumed:\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print('2. SPLIT POINT EDGE: ', edge)\n print('tree1_total', tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _, tree3_total = self.split_and_compute_tree_sum(temp_nodes,\n tree3_nodes, edge, True)\n if t == 1:\n ret, found = self.check(tree1_total - tree3_total,\n tree2_total, tree3_total)\n elif t == 2:\n ret, found = self.check(tree1_total, tree2_total -\n tree3_total, tree3_total)\n if found:\n break\n n = n.next\n if found:\n break\n return ret, found\n\n\ndef balancedForest(values, edges):\n mygraph = Graph(values, edges)\n mygraph.build_adjacency_list()\n mygraph.print_graph()\n return mygraph.split_tree_into_two()\n\n\n<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1, 2, 2, 1, 1]\n edges = [[1, 2], [1, 3], [3, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n print('SKIPPED', skipped)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Node(object):\n \"\"\"docstring for Node\"\"\"\n\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1] - 1\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print('Vertex:', i)\n while node != None:\n print(node.value, node.identifier)\n node = node.next\n print('<<' * 20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n if start_node == None:\n return nodes\n while start_node != None:\n if start_node.identifier == edge[0\n ] or start_node.identifier == edge[2\n ] or start_node.identifier in nodes:\n print('skipping ', start_node.identifier)\n else:\n print('adding ', start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n def split_and_compute_tree_sum(self, t1_nodes=[], t2_nodes=[], edge=[],\n ton=False):\n t1_total = 0\n t2_total = 0\n total = [0]\n start_node = self.graph[edge[1]]\n if start_node.next != None:\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n if len(t2_nodes) == 0 and edge[1] != edge[2]:\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n t2_total = total[0]\n if not ton and t2_total < self.grand_sum / 2:\n for i in range(self.vertices):\n if i not in t2_nodes:\n t1_nodes.append(i)\n t1_total = self.grand_sum - t2_total\n print('t2_nodes', t2_nodes)\n print('t2_total', t2_total)\n return t1_total, t2_total\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print('###' * 10)\n print('FINAL tree1_total: ', tree1_total)\n print('FINAL tree2_total: ', tree2_total)\n print('FINAL tree3_total: ', tree3_total)\n print('###' * 10)\n if (tree1_total == tree2_total or tree1_total == tree3_total or \n tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if [tree1_total, tree2_total, tree3_total].count(mx) >= 2:\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n n = self.graph[entry]\n while n != None:\n edge = [entry, n.identifier, -1]\n if n.identifier <= entry:\n n = n.next\n skipped += 1\n continue\n print('##MAIN##. SPLIT POINT EDGE: ', edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(\n tree1_nodes, tree2_nodes, edge)\n print('ORIGINALS: ', tree1_total, tree2_total)\n if min(tree1_total, tree2_total) < self.grand_sum / 3 or max(\n tree1_total, tree2_total) > 2 * self.grand_sum / 3:\n n = n.next\n continue\n if tree1_total > tree2_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree1_nodes, 1, edge[1])\n elif tree2_total > tree1_total:\n ret, found = self.find_third_tree(tree1_total,\n tree2_total, tree2_nodes, 2, edge[0])\n elif tree1_total == tree2_total:\n ret = tree1_total\n found = True\n else:\n found = True\n if found:\n break\n n = n.next\n if found:\n break\n return ret\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t=1, m=0):\n ret, found = -1, False\n global skipped\n consumed = []\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while n != None:\n if n.identifier in consumed:\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print('2. SPLIT POINT EDGE: ', edge)\n print('tree1_total', tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _, tree3_total = self.split_and_compute_tree_sum(temp_nodes,\n tree3_nodes, edge, True)\n if t == 1:\n ret, found = self.check(tree1_total - tree3_total,\n tree2_total, tree3_total)\n elif t == 2:\n ret, found = self.check(tree1_total, tree2_total -\n tree3_total, tree3_total)\n if found:\n break\n n = n.next\n if found:\n break\n return ret, found\n\n\ndef balancedForest(values, edges):\n mygraph = Graph(values, edges)\n mygraph.build_adjacency_list()\n mygraph.print_graph()\n return mygraph.split_tree_into_two()\n\n\n<mask token>\n\n\nclass BalancedForestTest(unittest.TestCase):\n\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4, 5], [1, 2], [1, 3], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1, 2, 2, 1, 1]\n edges = [[1, 2], [1, 3], [3, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1, 3, 5]\n edges = [[1, 3], [1, 2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n print('SKIPPED', skipped)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "skipped = 0\n\nclass Node(object):\n \"\"\"docstring for Node\"\"\"\n def __init__(self, value, indentifier):\n super(Node, self).__init__()\n self.value = value\n self.identifier = indentifier\n self.next = None\n\n\nclass Graph(object):\n \"\"\"docstring for Graph\"\"\"\n def __init__(self, values, edges):\n super(Graph, self).__init__()\n self.node_values = values\n self.vertices = len(values)\n self.edges = edges\n self.graph = [None] * self.vertices\n # self.edges.sort()\n self.grand_sum = sum(self.node_values)\n\n def build_adjacency_list(self):\n for edge in self.edges:\n fro = edge[0] - 1\n to = edge[1]- 1\n\n # Adding the node to the source node\n node = Node(self.node_values[to], to)\n node.next = self.graph[fro]\n self.graph[fro] = node\n\n # Adding the source node to the destination as \n # it is the undirected graph \n node = Node(self.node_values[fro], fro)\n node.next = self.graph[to]\n self.graph[to] = node\n\n \n def print_graph(self):\n for i in range(self.vertices):\n node = self.graph[i]\n print(\"Vertex:\", i)\n while(node!=None):\n print(node.value, node.identifier)\n node = node.next\n print(\"<<\"*20)\n\n def get_tree_nodes(self, start_node, nodes, edge, total):\n\n if(start_node==None):\n return nodes\n\n while(start_node!=None):\n if(start_node.identifier==edge[0] or start_node.identifier==edge[2] or (start_node.identifier in nodes)):\n print(\"skipping \", start_node.identifier)\n else:\n print(\"adding \", start_node.identifier)\n nodes.append(start_node.identifier)\n total[0] += start_node.value\n next_n = self.graph[start_node.identifier]\n self.get_tree_nodes(next_n, nodes, edge, total)\n start_node = start_node.next\n return nodes\n\n\n def split_and_compute_tree_sum(self, t1_nodes = [], t2_nodes = [], edge=[], ton = False):\n t1_total = 0\n t2_total = 0\n total = [0]\n \n start_node = self.graph[edge[1]]\n if(start_node.next != None):\n t2_nodes = self.get_tree_nodes(start_node, t2_nodes, edge, total)\n\n if(len(t2_nodes)==0 and edge[1]!=edge[2]):\n t2_nodes.append(edge[1])\n total[0] += self.node_values[edge[1]]\n\n t2_total = total[0]\n if(not ton and t2_total < self.grand_sum/2):\n for i in range(self.vertices):\n if(i not in t2_nodes):\n t1_nodes.append(i)\n\n t1_total = self.grand_sum - t2_total\n\n print(\"t2_nodes\", t2_nodes)\n print(\"t2_total\", t2_total)\n\n return t1_total, t2_total\n\n\n def check(self, tree1_total, tree2_total, tree3_total):\n print(\"###\"*10)\n print(\"FINAL tree1_total: \", tree1_total)\n print(\"FINAL tree2_total: \", tree2_total)\n print(\"FINAL tree3_total: \", tree3_total)\n print(\"###\"*10)\n\n if (tree1_total == tree2_total) or (tree1_total == tree3_total) or (tree2_total == tree3_total):\n mx = max(tree1_total, tree2_total, tree3_total)\n if([tree1_total, tree2_total, tree3_total].count(mx) >= 2):\n ret = mx - min(tree1_total, tree2_total, tree3_total)\n return ret, True\n return -1, False\n\n def split_tree_into_two(self):\n ret = -1\n found = False\n global skipped\n\n for entry in range(self.vertices):\n tree1_nodes = []\n tree2_nodes = []\n tree3_nodes = []\n temp_nodes = []\n\n n = self.graph[entry]\n while(n!=None):\n edge = [entry, n.identifier, -1]\n if(n.identifier <= entry):\n n = n.next\n skipped += 1\n continue\n print(\"##MAIN##. SPLIT POINT EDGE: \", edge)\n tree1_nodes = []\n tree2_nodes = []\n tree1_total, tree2_total = self.split_and_compute_tree_sum(tree1_nodes, tree2_nodes, edge)\n print(\"ORIGINALS: \", tree1_total, tree2_total)\n if(min(tree1_total, tree2_total) < self.grand_sum/3 or (max(tree1_total, tree2_total) > (2*self.grand_sum)/3)):\n n = n.next\n continue\n\n if(tree1_total > tree2_total):\n ret, found = self.find_third_tree(tree1_total, tree2_total,tree1_nodes, 1, edge[1])\n elif(tree2_total > tree1_total):\n ret, found = self.find_third_tree(tree1_total, tree2_total,tree2_nodes, 2, edge[0])\n elif (tree1_total == tree2_total):\n ret = tree1_total\n found = True\n else:\n found = True\n if(found):\n break\n n = n.next\n if(found):\n break\n return ret\n\n\n def find_third_tree(self, tree1_total, tree2_total, nodes, t = 1, m=0):\n\n ret , found = -1, False\n global skipped\n consumed = []\n\n for i in range(len(nodes)):\n skip_n = nodes[i]\n consumed.append(skip_n)\n n = self.graph[skip_n]\n while(n!=None):\n if(n.identifier in consumed):\n n = n.next\n skipped += 1\n continue\n edge = [skip_n, n.identifier, m]\n print(\"2. SPLIT POINT EDGE: \", edge)\n print(\"tree1_total\",tree1_total)\n tree3_nodes = []\n temp_nodes = []\n _,tree3_total = self.split_and_compute_tree_sum(temp_nodes, tree3_nodes, edge, True)\n if(t==1):\n ret , found = self.check(tree1_total - tree3_total, tree2_total, tree3_total)\n elif(t==2):\n ret , found = self.check(tree1_total, tree2_total - tree3_total, tree3_total)\n if(found):\n break\n n = n.next\n if(found):\n break\n\n return ret, found\n\n\ndef balancedForest(values, edges):\n mygraph = Graph(values, edges)\n mygraph.build_adjacency_list()\n mygraph.print_graph()\n return mygraph.split_tree_into_two()\n\nimport unittest\n\nclass BalancedForestTest(unittest.TestCase):\n def test1(self):\n expected = 10\n c = [1, 1, 1, 18, 10, 11, 5, 6]\n edges = [[1, 2], [1, 4], [2, 3], [1, 8], [8, 7], [7, 6], [5, 7]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test2(self):\n expected = 13\n c = [12, 7, 11, 17, 20, 10]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test3(self):\n expected = 19\n c = [15, 12, 8, 14, 13]\n edges = [[4,5],[1,2],[1,3],[1,4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test4(self):\n expected = 2\n c = [1,2,2,1,1]\n edges = [[1,2],[1,3],[3,5],[1,4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test5(self):\n expected = -1\n c = [1,3,5]\n edges = [[1,3],[1,2]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test6(self):\n expected = -1\n c = [7, 7, 4, 1, 1, 1]\n edges = [(1, 2), (3, 1), (2, 4), (2, 5), (2, 6)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test7(self):\n expected = 0\n c = [1, 3, 4, 4]\n edges = [(1, 2), (1, 3), (1, 4)]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test8(self):\n expected = 297\n c = [100, 99, 98, 100, 99, 98]\n edges = [[1, 2], [2, 3], [4, 5], [6, 5], [1, 4]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n def test9(self):\n expected = 4\n c = [12, 10, 8, 12, 14, 12]\n edges = [[1, 2], [1, 3], [1, 4], [2, 5], [4, 6]]\n self.assertEqual(balancedForest(c, edges), expected)\n\n print(\"SKIPPED\", skipped)\n\n\nif __name__ == '__main__':\n unittest.main()",
"step-ids": [
7,
22,
24,
25,
28
]
}
|
[
7,
22,
24,
25,
28
] |
import random
x = int(raw_input('Please supply a number: '))
y = int(raw_input('Please supply a second number: '))
z = random.randint(x, y)
print(z)
|
normal
|
{
"blob_id": "104c49941a79948749b27217a0c728f19435f77a",
"index": 643,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(z)\n",
"step-3": "<mask token>\nx = int(raw_input('Please supply a number: '))\ny = int(raw_input('Please supply a second number: '))\nz = random.randint(x, y)\nprint(z)\n",
"step-4": "import random\nx = int(raw_input('Please supply a number: '))\ny = int(raw_input('Please supply a second number: '))\nz = random.randint(x, y)\nprint(z)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# author: MSJ
# date: 2021/3/11
# desc:冒泡排序
def bubble_sort(arr):
for i in range(1, len(arr)):
for j in range(0, len(arr) - i):
if arr[j] > arr[j + 1]:
tmp = arr[j]
arr[j] = arr[j + 1]
arr[j + 1] = tmp
return arr
if __name__ == '__main__':
r1 = bubble_sort([0, 5, 3, 2, 9, 20, 6, 7, 3])
print(r1)
|
normal
|
{
"blob_id": "6682c864a3da6f2c894a3a40359726b4eb97d040",
"index": 6109,
"step-1": "<mask token>\n",
"step-2": "def bubble_sort(arr):\n for i in range(1, len(arr)):\n for j in range(0, len(arr) - i):\n if arr[j] > arr[j + 1]:\n tmp = arr[j]\n arr[j] = arr[j + 1]\n arr[j + 1] = tmp\n return arr\n\n\n<mask token>\n",
"step-3": "def bubble_sort(arr):\n for i in range(1, len(arr)):\n for j in range(0, len(arr) - i):\n if arr[j] > arr[j + 1]:\n tmp = arr[j]\n arr[j] = arr[j + 1]\n arr[j + 1] = tmp\n return arr\n\n\nif __name__ == '__main__':\n r1 = bubble_sort([0, 5, 3, 2, 9, 20, 6, 7, 3])\n print(r1)\n",
"step-4": "#!/usr/bin/python\n# -*- coding: UTF-8 -*-\n# author: MSJ\n# date: 2021/3/11\n# desc:冒泡排序\n\n\ndef bubble_sort(arr):\n for i in range(1, len(arr)):\n for j in range(0, len(arr) - i):\n if arr[j] > arr[j + 1]:\n tmp = arr[j]\n arr[j] = arr[j + 1]\n arr[j + 1] = tmp\n\n return arr\n\n\nif __name__ == '__main__':\n r1 = bubble_sort([0, 5, 3, 2, 9, 20, 6, 7, 3])\n print(r1)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from partyparrot import convert_with_alphabet_emojis, convert
def test_convert_char_to_alphabet():
assert convert_with_alphabet_emojis("") == ""
assert convert_with_alphabet_emojis(" ") == " "
assert convert_with_alphabet_emojis("\n") == "\n"
assert (
convert_with_alphabet_emojis(" one two")
== " :alphabet-white-o::alphabet-white-n::alphabet-white-e: "
":alphabet-white-t::alphabet-white-w::alphabet-white-o:"
)
assert convert_with_alphabet_emojis("1_'") == ":alphabet-white-question:" * 3
assert (
convert_with_alphabet_emojis("?!")
== ":alphabet-white-question::alphabet-white-exclamation:"
)
def test_convert():
assert (
convert("Hello world", ":icon:", ":nbsp")
== ":icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::icon::icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::icon::icon::icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon:"
)
def test_convert_wrong_char():
txt = convert("@!*", ":icon:", ":nbsp")
assert (
txt
== ":icon::icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::icon::icon:\n:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:"
)
|
normal
|
{
"blob_id": "c3bfcb971a6b08cdf98200bd2b2a8fe6ac2dd083",
"index": 6969,
"step-1": "<mask token>\n\n\ndef test_convert_wrong_char():\n txt = convert('@!*', ':icon:', ':nbsp')\n assert txt == \"\"\":icon::icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::icon::icon:\n:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\"\"\"\n",
"step-2": "<mask token>\n\n\ndef test_convert():\n assert convert('Hello world', ':icon:', ':nbsp') == \"\"\":icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::icon::icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::icon::icon::icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon:\"\"\"\n\n\ndef test_convert_wrong_char():\n txt = convert('@!*', ':icon:', ':nbsp')\n assert txt == \"\"\":icon::icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::icon::icon:\n:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\"\"\"\n",
"step-3": "<mask token>\n\n\ndef test_convert_char_to_alphabet():\n assert convert_with_alphabet_emojis('') == ''\n assert convert_with_alphabet_emojis(' ') == ' '\n assert convert_with_alphabet_emojis('\\n') == '\\n'\n assert convert_with_alphabet_emojis(' one two'\n ) == ' :alphabet-white-o::alphabet-white-n::alphabet-white-e: :alphabet-white-t::alphabet-white-w::alphabet-white-o:'\n assert convert_with_alphabet_emojis(\"1_'\"\n ) == ':alphabet-white-question:' * 3\n assert convert_with_alphabet_emojis('?!'\n ) == ':alphabet-white-question::alphabet-white-exclamation:'\n\n\ndef test_convert():\n assert convert('Hello world', ':icon:', ':nbsp') == \"\"\":icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::icon::icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::icon::icon::icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon:\"\"\"\n\n\ndef test_convert_wrong_char():\n txt = convert('@!*', ':icon:', ':nbsp')\n assert txt == \"\"\":icon::icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::icon::icon:\n:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\"\"\"\n",
"step-4": "from partyparrot import convert_with_alphabet_emojis, convert\n\n\ndef test_convert_char_to_alphabet():\n assert convert_with_alphabet_emojis('') == ''\n assert convert_with_alphabet_emojis(' ') == ' '\n assert convert_with_alphabet_emojis('\\n') == '\\n'\n assert convert_with_alphabet_emojis(' one two'\n ) == ' :alphabet-white-o::alphabet-white-n::alphabet-white-e: :alphabet-white-t::alphabet-white-w::alphabet-white-o:'\n assert convert_with_alphabet_emojis(\"1_'\"\n ) == ':alphabet-white-question:' * 3\n assert convert_with_alphabet_emojis('?!'\n ) == ':alphabet-white-question::alphabet-white-exclamation:'\n\n\ndef test_convert():\n assert convert('Hello world', ':icon:', ':nbsp') == \"\"\":icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::icon::icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::icon::icon::icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\n:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon:\"\"\"\n\n\ndef test_convert_wrong_char():\n txt = convert('@!*', ':icon:', ':nbsp')\n assert txt == \"\"\":icon::icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::icon::icon:\n:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\n\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\"\"\"\n",
"step-5": "from partyparrot import convert_with_alphabet_emojis, convert\n\n\ndef test_convert_char_to_alphabet():\n assert convert_with_alphabet_emojis(\"\") == \"\"\n assert convert_with_alphabet_emojis(\" \") == \" \"\n assert convert_with_alphabet_emojis(\"\\n\") == \"\\n\"\n assert (\n convert_with_alphabet_emojis(\" one two\")\n == \" :alphabet-white-o::alphabet-white-n::alphabet-white-e: \"\n \":alphabet-white-t::alphabet-white-w::alphabet-white-o:\"\n )\n assert convert_with_alphabet_emojis(\"1_'\") == \":alphabet-white-question:\" * 3\n assert (\n convert_with_alphabet_emojis(\"?!\")\n == \":alphabet-white-question::alphabet-white-exclamation:\"\n )\n\n\ndef test_convert():\n assert (\n convert(\"Hello world\", \":icon:\", \":nbsp\")\n == \":icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::icon::icon:\\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\\n:icon::icon::icon::icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\\n:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:icon:\\n:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon::icon::nbsp:nbsp:icon::icon::nbsp:nbsp:nbsp:nbsp:nbsp:nbsp:icon::nbsp:icon::nbsp:nbsp:nbsp:icon::icon::nbsp:nbsp:icon::nbsp:nbsp:icon::nbsp:icon::icon::icon::icon::nbsp:icon::icon::icon:\"\n )\n\n\ndef test_convert_wrong_char():\n txt = convert(\"@!*\", \":icon:\", \":nbsp\")\n assert (\n txt\n == \":icon::icon::icon::nbsp:nbsp:icon::icon::icon::nbsp:nbsp:icon::icon::icon:\\n:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\\n\\n:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon::nbsp:nbsp:nbsp:nbsp:icon:\"\n )\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import helper
__author__ = 'AdrianLeo'
helper.greeting("Hey, dummy")
|
normal
|
{
"blob_id": "03156992355a756b2ae38735a98251eb611d4245",
"index": 2611,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nhelper.greeting('Hey, dummy')\n",
"step-3": "<mask token>\n__author__ = 'AdrianLeo'\nhelper.greeting('Hey, dummy')\n",
"step-4": "import helper\n__author__ = 'AdrianLeo'\nhelper.greeting('Hey, dummy')\n",
"step-5": "import helper\n\n__author__ = 'AdrianLeo'\n\nhelper.greeting(\"Hey, dummy\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
a = ord(input().rstrip())
if a < 97:
print('A')
else:
print('a')
'''
ord(A)=65
ord(Z)=90
ord(a)=97
ord(z)=122
'''
|
normal
|
{
"blob_id": "e7c454b2bf6cf324e1e318e374e07a83812c978b",
"index": 2381,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif a < 97:\n print('A')\nelse:\n print('a')\n<mask token>\n",
"step-3": "a = ord(input().rstrip())\nif a < 97:\n print('A')\nelse:\n print('a')\n<mask token>\n",
"step-4": "a = ord(input().rstrip())\n\nif a < 97:\n print('A')\nelse:\n print('a')\n \n\n''' \n\nord(A)=65\nord(Z)=90\nord(a)=97\nord(z)=122\n\n'''\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import torch
import torch.nn as nn
from model.common import UpsampleBlock, conv_, SELayer
def wrapper(args):
act = None
if args.act == 'relu':
act = nn.ReLU(True)
elif args.act == 'leak_relu':
act = nn.LeakyReLU(0.2, True)
elif args.act is None:
act = None
else:
raise NotImplementedError
return AFN(in_c=args.n_colors, out_c=args.n_colors, scale=args.scale, n_feats=args.n_feats, act=act)
class AFB_0(nn.Module):
def __init__(self, channels, n_blocks=2, act=nn.ReLU(True)):
super(AFB_0, self).__init__()
self.op = []
for _ in range(n_blocks):
self.op.append(conv_(channels, channels))
self.op.append(act)
self.op = nn.Sequential(*self.op)
def forward(self, x):
x = x + self.op(x)
return x
class AFB_L1(nn.Module):
def __init__(self, channels, n_l0=3, act=nn.ReLU(True)):
super(AFB_L1, self).__init__()
self.n = n_l0
self.convs_ = nn.ModuleList()
for _ in range(n_l0):
self.convs_.append(
AFB_0(channels, 2, act)
)
self.LFF = nn.Sequential(
SELayer(channels * n_l0, 16),
nn.Conv2d(channels * n_l0, channels, 1, padding=0, stride=1),
)
def forward(self, x):
res = []
ox = x
for i in range(self.n):
x = self.convs_[i](x)
res.append(x)
res = self.LFF(torch.cat(res, 1))
x = res + ox
return x
class AFB_L2(nn.Module):
def __init__(self, channels, n_l1=4, act=nn.ReLU(True)):
super(AFB_L2, self).__init__()
self.n = n_l1
self.convs_ = nn.ModuleList()
for _ in range(n_l1):
self.convs_.append(
AFB_L1(channels, 3, act)
)
self.LFF = nn.Sequential(
SELayer(channels * n_l1, 16),
nn.Conv2d(channels * n_l1, channels, 1, padding=0, stride=1),
)
def forward(self, x):
res = []
ox = x
for i in range(self.n):
x = self.convs_[i](x)
res.append(x)
res = self.LFF(torch.cat(res, 1))
x = res + ox
return x
class AFB_L3(nn.Module):
def __init__(self, channels, n_l2=4, act=nn.ReLU(True)):
super(AFB_L3, self).__init__()
self.n = n_l2
self.convs_ = nn.ModuleList()
for _ in range(n_l2):
self.convs_.append(
AFB_L2(channels, 4, act)
)
self.LFF = nn.Sequential(
SELayer(channels * n_l2, 16),
nn.Conv2d(channels * n_l2, channels, 1, padding=0, stride=1),
)
def forward(self, x):
res = []
ox = x
for i in range(self.n):
x = self.convs_[i](x)
res.append(x)
res = self.LFF(torch.cat(res, 1))
x = res + ox
return x
class AFN(nn.Module):
def __init__(self, in_c=3, out_c=3, scale=4, n_feats=128, n_l3=3, act=nn.LeakyReLU(0.2, True)):
super(AFN, self).__init__()
self.head = conv_(in_c, n_feats)
self.n = n_l3
self.AFBs = nn.ModuleList()
for i in range(n_l3):
self.AFBs.append(
AFB_L3(channels=n_feats, n_l2=4, act=act)
)
self.GFF = nn.Sequential(*[
SELayer(n_feats * n_l3),
conv_(n_feats * n_l3, n_feats, 1, padding=0, stride=1),
])
self.tail = nn.Sequential(*[
UpsampleBlock(scale, n_feats, kernel_size=3, stride=1, bias=True, act=act),
conv_(n_feats, out_c)
])
def forward(self, x):
res = []
x = self.head(x)
for i in range(self.n):
x = self.AFBs[i](x)
res.append(x)
res = self.GFF(torch.cat(res, 1))
x = res + x
x = self.tail(x)
return x
if __name__ == "__main__":
import numpy as np
import torch
import torchsummary
model = AFN(in_c=3, out_c=3, scale=8, n_feats=128, n_l3=3, act=nn.LeakyReLU(0.2, True))
print(torchsummary.summary(model, (3, 24, 24), device='cpu'))
x = np.random.uniform(0, 1, [2, 3, 24, 24]).astype(np.float32)
x = torch.tensor(x)
# loss = nn.L1Loss()
# Adam = torch.optim.Adam(model.parameters(), lr=1e-3, betas=(0.99, 0.999))
with torch.autograd.profiler.profile(use_cuda=True) as prof:
y = model(x)
print(prof)
print(y.shape)
|
normal
|
{
"blob_id": "b2c0ef4a0af12b267a54a7ae3fed9edeab2fb879",
"index": 6570,
"step-1": "<mask token>\n\n\nclass AFB_L1(nn.Module):\n <mask token>\n <mask token>\n\n\nclass AFB_L2(nn.Module):\n\n def __init__(self, channels, n_l1=4, act=nn.ReLU(True)):\n super(AFB_L2, self).__init__()\n self.n = n_l1\n self.convs_ = nn.ModuleList()\n for _ in range(n_l1):\n self.convs_.append(AFB_L1(channels, 3, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l1, 16), nn.Conv2d(\n channels * n_l1, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L3(nn.Module):\n\n def __init__(self, channels, n_l2=4, act=nn.ReLU(True)):\n super(AFB_L3, self).__init__()\n self.n = n_l2\n self.convs_ = nn.ModuleList()\n for _ in range(n_l2):\n self.convs_.append(AFB_L2(channels, 4, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l2, 16), nn.Conv2d(\n channels * n_l2, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFN(nn.Module):\n\n def __init__(self, in_c=3, out_c=3, scale=4, n_feats=128, n_l3=3, act=\n nn.LeakyReLU(0.2, True)):\n super(AFN, self).__init__()\n self.head = conv_(in_c, n_feats)\n self.n = n_l3\n self.AFBs = nn.ModuleList()\n for i in range(n_l3):\n self.AFBs.append(AFB_L3(channels=n_feats, n_l2=4, act=act))\n self.GFF = nn.Sequential(*[SELayer(n_feats * n_l3), conv_(n_feats *\n n_l3, n_feats, 1, padding=0, stride=1)])\n self.tail = nn.Sequential(*[UpsampleBlock(scale, n_feats,\n kernel_size=3, stride=1, bias=True, act=act), conv_(n_feats,\n out_c)])\n\n def forward(self, x):\n res = []\n x = self.head(x)\n for i in range(self.n):\n x = self.AFBs[i](x)\n res.append(x)\n res = self.GFF(torch.cat(res, 1))\n x = res + x\n x = self.tail(x)\n return x\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass AFB_0(nn.Module):\n\n def __init__(self, channels, n_blocks=2, act=nn.ReLU(True)):\n super(AFB_0, self).__init__()\n self.op = []\n for _ in range(n_blocks):\n self.op.append(conv_(channels, channels))\n self.op.append(act)\n self.op = nn.Sequential(*self.op)\n <mask token>\n\n\nclass AFB_L1(nn.Module):\n\n def __init__(self, channels, n_l0=3, act=nn.ReLU(True)):\n super(AFB_L1, self).__init__()\n self.n = n_l0\n self.convs_ = nn.ModuleList()\n for _ in range(n_l0):\n self.convs_.append(AFB_0(channels, 2, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l0, 16), nn.Conv2d(\n channels * n_l0, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L2(nn.Module):\n\n def __init__(self, channels, n_l1=4, act=nn.ReLU(True)):\n super(AFB_L2, self).__init__()\n self.n = n_l1\n self.convs_ = nn.ModuleList()\n for _ in range(n_l1):\n self.convs_.append(AFB_L1(channels, 3, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l1, 16), nn.Conv2d(\n channels * n_l1, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L3(nn.Module):\n\n def __init__(self, channels, n_l2=4, act=nn.ReLU(True)):\n super(AFB_L3, self).__init__()\n self.n = n_l2\n self.convs_ = nn.ModuleList()\n for _ in range(n_l2):\n self.convs_.append(AFB_L2(channels, 4, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l2, 16), nn.Conv2d(\n channels * n_l2, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFN(nn.Module):\n\n def __init__(self, in_c=3, out_c=3, scale=4, n_feats=128, n_l3=3, act=\n nn.LeakyReLU(0.2, True)):\n super(AFN, self).__init__()\n self.head = conv_(in_c, n_feats)\n self.n = n_l3\n self.AFBs = nn.ModuleList()\n for i in range(n_l3):\n self.AFBs.append(AFB_L3(channels=n_feats, n_l2=4, act=act))\n self.GFF = nn.Sequential(*[SELayer(n_feats * n_l3), conv_(n_feats *\n n_l3, n_feats, 1, padding=0, stride=1)])\n self.tail = nn.Sequential(*[UpsampleBlock(scale, n_feats,\n kernel_size=3, stride=1, bias=True, act=act), conv_(n_feats,\n out_c)])\n\n def forward(self, x):\n res = []\n x = self.head(x)\n for i in range(self.n):\n x = self.AFBs[i](x)\n res.append(x)\n res = self.GFF(torch.cat(res, 1))\n x = res + x\n x = self.tail(x)\n return x\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef wrapper(args):\n act = None\n if args.act == 'relu':\n act = nn.ReLU(True)\n elif args.act == 'leak_relu':\n act = nn.LeakyReLU(0.2, True)\n elif args.act is None:\n act = None\n else:\n raise NotImplementedError\n return AFN(in_c=args.n_colors, out_c=args.n_colors, scale=args.scale,\n n_feats=args.n_feats, act=act)\n\n\nclass AFB_0(nn.Module):\n\n def __init__(self, channels, n_blocks=2, act=nn.ReLU(True)):\n super(AFB_0, self).__init__()\n self.op = []\n for _ in range(n_blocks):\n self.op.append(conv_(channels, channels))\n self.op.append(act)\n self.op = nn.Sequential(*self.op)\n\n def forward(self, x):\n x = x + self.op(x)\n return x\n\n\nclass AFB_L1(nn.Module):\n\n def __init__(self, channels, n_l0=3, act=nn.ReLU(True)):\n super(AFB_L1, self).__init__()\n self.n = n_l0\n self.convs_ = nn.ModuleList()\n for _ in range(n_l0):\n self.convs_.append(AFB_0(channels, 2, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l0, 16), nn.Conv2d(\n channels * n_l0, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L2(nn.Module):\n\n def __init__(self, channels, n_l1=4, act=nn.ReLU(True)):\n super(AFB_L2, self).__init__()\n self.n = n_l1\n self.convs_ = nn.ModuleList()\n for _ in range(n_l1):\n self.convs_.append(AFB_L1(channels, 3, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l1, 16), nn.Conv2d(\n channels * n_l1, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L3(nn.Module):\n\n def __init__(self, channels, n_l2=4, act=nn.ReLU(True)):\n super(AFB_L3, self).__init__()\n self.n = n_l2\n self.convs_ = nn.ModuleList()\n for _ in range(n_l2):\n self.convs_.append(AFB_L2(channels, 4, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l2, 16), nn.Conv2d(\n channels * n_l2, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFN(nn.Module):\n\n def __init__(self, in_c=3, out_c=3, scale=4, n_feats=128, n_l3=3, act=\n nn.LeakyReLU(0.2, True)):\n super(AFN, self).__init__()\n self.head = conv_(in_c, n_feats)\n self.n = n_l3\n self.AFBs = nn.ModuleList()\n for i in range(n_l3):\n self.AFBs.append(AFB_L3(channels=n_feats, n_l2=4, act=act))\n self.GFF = nn.Sequential(*[SELayer(n_feats * n_l3), conv_(n_feats *\n n_l3, n_feats, 1, padding=0, stride=1)])\n self.tail = nn.Sequential(*[UpsampleBlock(scale, n_feats,\n kernel_size=3, stride=1, bias=True, act=act), conv_(n_feats,\n out_c)])\n\n def forward(self, x):\n res = []\n x = self.head(x)\n for i in range(self.n):\n x = self.AFBs[i](x)\n res.append(x)\n res = self.GFF(torch.cat(res, 1))\n x = res + x\n x = self.tail(x)\n return x\n\n\nif __name__ == '__main__':\n import numpy as np\n import torch\n import torchsummary\n model = AFN(in_c=3, out_c=3, scale=8, n_feats=128, n_l3=3, act=nn.\n LeakyReLU(0.2, True))\n print(torchsummary.summary(model, (3, 24, 24), device='cpu'))\n x = np.random.uniform(0, 1, [2, 3, 24, 24]).astype(np.float32)\n x = torch.tensor(x)\n with torch.autograd.profiler.profile(use_cuda=True) as prof:\n y = model(x)\n print(prof)\n print(y.shape)\n",
"step-4": "import torch\nimport torch.nn as nn\nfrom model.common import UpsampleBlock, conv_, SELayer\n\n\ndef wrapper(args):\n act = None\n if args.act == 'relu':\n act = nn.ReLU(True)\n elif args.act == 'leak_relu':\n act = nn.LeakyReLU(0.2, True)\n elif args.act is None:\n act = None\n else:\n raise NotImplementedError\n return AFN(in_c=args.n_colors, out_c=args.n_colors, scale=args.scale,\n n_feats=args.n_feats, act=act)\n\n\nclass AFB_0(nn.Module):\n\n def __init__(self, channels, n_blocks=2, act=nn.ReLU(True)):\n super(AFB_0, self).__init__()\n self.op = []\n for _ in range(n_blocks):\n self.op.append(conv_(channels, channels))\n self.op.append(act)\n self.op = nn.Sequential(*self.op)\n\n def forward(self, x):\n x = x + self.op(x)\n return x\n\n\nclass AFB_L1(nn.Module):\n\n def __init__(self, channels, n_l0=3, act=nn.ReLU(True)):\n super(AFB_L1, self).__init__()\n self.n = n_l0\n self.convs_ = nn.ModuleList()\n for _ in range(n_l0):\n self.convs_.append(AFB_0(channels, 2, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l0, 16), nn.Conv2d(\n channels * n_l0, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L2(nn.Module):\n\n def __init__(self, channels, n_l1=4, act=nn.ReLU(True)):\n super(AFB_L2, self).__init__()\n self.n = n_l1\n self.convs_ = nn.ModuleList()\n for _ in range(n_l1):\n self.convs_.append(AFB_L1(channels, 3, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l1, 16), nn.Conv2d(\n channels * n_l1, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L3(nn.Module):\n\n def __init__(self, channels, n_l2=4, act=nn.ReLU(True)):\n super(AFB_L3, self).__init__()\n self.n = n_l2\n self.convs_ = nn.ModuleList()\n for _ in range(n_l2):\n self.convs_.append(AFB_L2(channels, 4, act))\n self.LFF = nn.Sequential(SELayer(channels * n_l2, 16), nn.Conv2d(\n channels * n_l2, channels, 1, padding=0, stride=1))\n\n def forward(self, x):\n res = []\n ox = x\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFN(nn.Module):\n\n def __init__(self, in_c=3, out_c=3, scale=4, n_feats=128, n_l3=3, act=\n nn.LeakyReLU(0.2, True)):\n super(AFN, self).__init__()\n self.head = conv_(in_c, n_feats)\n self.n = n_l3\n self.AFBs = nn.ModuleList()\n for i in range(n_l3):\n self.AFBs.append(AFB_L3(channels=n_feats, n_l2=4, act=act))\n self.GFF = nn.Sequential(*[SELayer(n_feats * n_l3), conv_(n_feats *\n n_l3, n_feats, 1, padding=0, stride=1)])\n self.tail = nn.Sequential(*[UpsampleBlock(scale, n_feats,\n kernel_size=3, stride=1, bias=True, act=act), conv_(n_feats,\n out_c)])\n\n def forward(self, x):\n res = []\n x = self.head(x)\n for i in range(self.n):\n x = self.AFBs[i](x)\n res.append(x)\n res = self.GFF(torch.cat(res, 1))\n x = res + x\n x = self.tail(x)\n return x\n\n\nif __name__ == '__main__':\n import numpy as np\n import torch\n import torchsummary\n model = AFN(in_c=3, out_c=3, scale=8, n_feats=128, n_l3=3, act=nn.\n LeakyReLU(0.2, True))\n print(torchsummary.summary(model, (3, 24, 24), device='cpu'))\n x = np.random.uniform(0, 1, [2, 3, 24, 24]).astype(np.float32)\n x = torch.tensor(x)\n with torch.autograd.profiler.profile(use_cuda=True) as prof:\n y = model(x)\n print(prof)\n print(y.shape)\n",
"step-5": "import torch\nimport torch.nn as nn\nfrom model.common import UpsampleBlock, conv_, SELayer\n\ndef wrapper(args):\n act = None\n if args.act == 'relu':\n act = nn.ReLU(True)\n elif args.act == 'leak_relu':\n act = nn.LeakyReLU(0.2, True)\n elif args.act is None:\n act = None\n else:\n raise NotImplementedError\n\n return AFN(in_c=args.n_colors, out_c=args.n_colors, scale=args.scale, n_feats=args.n_feats, act=act)\n\nclass AFB_0(nn.Module):\n def __init__(self, channels, n_blocks=2, act=nn.ReLU(True)):\n super(AFB_0, self).__init__()\n self.op = []\n for _ in range(n_blocks):\n self.op.append(conv_(channels, channels))\n self.op.append(act)\n\n self.op = nn.Sequential(*self.op)\n\n def forward(self, x):\n x = x + self.op(x)\n return x\n\n\nclass AFB_L1(nn.Module):\n def __init__(self, channels, n_l0=3, act=nn.ReLU(True)):\n super(AFB_L1, self).__init__()\n\n self.n = n_l0\n self.convs_ = nn.ModuleList()\n for _ in range(n_l0):\n self.convs_.append(\n AFB_0(channels, 2, act)\n )\n\n self.LFF = nn.Sequential(\n SELayer(channels * n_l0, 16),\n nn.Conv2d(channels * n_l0, channels, 1, padding=0, stride=1),\n )\n\n def forward(self, x):\n res = []\n ox = x\n\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L2(nn.Module):\n def __init__(self, channels, n_l1=4, act=nn.ReLU(True)):\n super(AFB_L2, self).__init__()\n\n self.n = n_l1\n self.convs_ = nn.ModuleList()\n for _ in range(n_l1):\n self.convs_.append(\n AFB_L1(channels, 3, act)\n )\n\n self.LFF = nn.Sequential(\n SELayer(channels * n_l1, 16),\n nn.Conv2d(channels * n_l1, channels, 1, padding=0, stride=1),\n )\n\n def forward(self, x):\n res = []\n ox = x\n\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFB_L3(nn.Module):\n def __init__(self, channels, n_l2=4, act=nn.ReLU(True)):\n super(AFB_L3, self).__init__()\n\n self.n = n_l2\n self.convs_ = nn.ModuleList()\n for _ in range(n_l2):\n self.convs_.append(\n AFB_L2(channels, 4, act)\n )\n\n self.LFF = nn.Sequential(\n SELayer(channels * n_l2, 16),\n nn.Conv2d(channels * n_l2, channels, 1, padding=0, stride=1),\n )\n\n def forward(self, x):\n res = []\n ox = x\n\n for i in range(self.n):\n x = self.convs_[i](x)\n res.append(x)\n res = self.LFF(torch.cat(res, 1))\n x = res + ox\n return x\n\n\nclass AFN(nn.Module):\n def __init__(self, in_c=3, out_c=3, scale=4, n_feats=128, n_l3=3, act=nn.LeakyReLU(0.2, True)):\n super(AFN, self).__init__()\n\n self.head = conv_(in_c, n_feats)\n\n self.n = n_l3\n self.AFBs = nn.ModuleList()\n for i in range(n_l3):\n self.AFBs.append(\n AFB_L3(channels=n_feats, n_l2=4, act=act)\n )\n\n self.GFF = nn.Sequential(*[\n SELayer(n_feats * n_l3),\n conv_(n_feats * n_l3, n_feats, 1, padding=0, stride=1),\n ])\n\n self.tail = nn.Sequential(*[\n UpsampleBlock(scale, n_feats, kernel_size=3, stride=1, bias=True, act=act),\n conv_(n_feats, out_c)\n ])\n\n def forward(self, x):\n res = []\n x = self.head(x)\n\n for i in range(self.n):\n x = self.AFBs[i](x)\n res.append(x)\n\n res = self.GFF(torch.cat(res, 1))\n x = res + x\n\n x = self.tail(x)\n return x\n\nif __name__ == \"__main__\":\n import numpy as np\n import torch\n import torchsummary\n\n model = AFN(in_c=3, out_c=3, scale=8, n_feats=128, n_l3=3, act=nn.LeakyReLU(0.2, True))\n print(torchsummary.summary(model, (3, 24, 24), device='cpu'))\n\n x = np.random.uniform(0, 1, [2, 3, 24, 24]).astype(np.float32)\n x = torch.tensor(x)\n\n # loss = nn.L1Loss()\n # Adam = torch.optim.Adam(model.parameters(), lr=1e-3, betas=(0.99, 0.999))\n with torch.autograd.profiler.profile(use_cuda=True) as prof:\n y = model(x)\n print(prof)\n print(y.shape)\n",
"step-ids": [
10,
14,
17,
18,
19
]
}
|
[
10,
14,
17,
18,
19
] |
# -*- coding: utf-8 -*-
import scrapy
class Heiyan2Spider(scrapy.Spider):
name = 'heiyan2'
allowed_domains = ['heiyan.com']
start_urls = ['http://heiyan.com/']
def parse(self, response):
pass
|
normal
|
{
"blob_id": "d13c6d71bb871496b0c6ad2451a2f561484e7c68",
"index": 9634,
"step-1": "<mask token>\n\n\nclass Heiyan2Spider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Heiyan2Spider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n\n def parse(self, response):\n pass\n",
"step-3": "<mask token>\n\n\nclass Heiyan2Spider(scrapy.Spider):\n name = 'heiyan2'\n allowed_domains = ['heiyan.com']\n start_urls = ['http://heiyan.com/']\n\n def parse(self, response):\n pass\n",
"step-4": "import scrapy\n\n\nclass Heiyan2Spider(scrapy.Spider):\n name = 'heiyan2'\n allowed_domains = ['heiyan.com']\n start_urls = ['http://heiyan.com/']\n\n def parse(self, response):\n pass\n",
"step-5": "# -*- coding: utf-8 -*-\nimport scrapy\n\n\nclass Heiyan2Spider(scrapy.Spider):\n name = 'heiyan2'\n allowed_domains = ['heiyan.com']\n start_urls = ['http://heiyan.com/']\n\n def parse(self, response):\n pass\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#! /usr/bin/env python
t = int(raw_input())
for i in xrange(1, t+1):
N = raw_input()
N1 = N
track = set()
if N == '0':
print "Case #%s: " % i + "INSOMNIA"
continue
count = 2
while len(track) !=10:
temp = set(x for x in N1)
track = temp | track
N1 = str(count*int(N))
count +=1
print "Case #%s: %d" % (i, int(N1) - int(N))
|
normal
|
{
"blob_id": "8c6b7032c85354740d59aa91108ad8b5279e1d45",
"index": 2570,
"step-1": "#! /usr/bin/env python\n\nt = int(raw_input())\nfor i in xrange(1, t+1):\n N = raw_input()\n N1 = N\n track = set()\n if N == '0':\n print \"Case #%s: \" % i + \"INSOMNIA\"\n continue\n count = 2\n while len(track) !=10:\n temp = set(x for x in N1)\n track = temp | track\n N1 = str(count*int(N))\n count +=1\n print \"Case #%s: %d\" % (i, int(N1) - int(N))\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from dataclasses import dataclass
from typing import Optional
@dataclass
class Music(object):
url: str
title: Optional[str] = None
|
normal
|
{
"blob_id": "2506c5b042f04d1490ba2199a71e38829d4a0adc",
"index": 5738,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@dataclass\nclass Music(object):\n url: str\n title: Optional[str] = None\n",
"step-3": "from dataclasses import dataclass\nfrom typing import Optional\n\n\n@dataclass\nclass Music(object):\n url: str\n title: Optional[str] = None\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import math
#variables for current GPS Lat / Lon Readings
currentLat = 41.391240
currentLon = -73.956217
destLat = 41.393035
destLon = -73.953398
#variables for current UTM coordinates
currentX = 587262
currentY = 4582716
destX = 587499
destY = 4582919
#declination angle based on geographic location
#see #https://www.ngdc.noaa.gov/geomag-web/
#needed for "grid-to-magnetic" angle
declinationAngle = 13
########### Functions ############################################################################
def haversine(currentLat,currentLon, destLat, destLon):
#Calculate the great circle distance between two points
#on the earth (specified in decimal degrees - Lat/Lon coords) using Haversine Formula
haversineDistance = math.acos( math.sin(currentLat*math.pi/180)*math.sin(destLat*math.pi/180) + math.cos(currentLat*math.pi/180)*math.cos(destLat*math.pi/180)*math.cos(destLon*math.pi/180-currentLon*math.pi/180) ) * 6371000
haversineAngle = ( math.atan2(math.cos(currentLat)*math.sin(destLat)-math.sin(currentLat)*math.cos(destLat)*math.cos(destLon-currentLon), math.sin(destLon-currentLon)*math.cos(destLat)) ) * (180/math.pi)
#transform angle perspective - Haversine calculates angle with the perspective that 90 degrees points North
#for magnetic field reference, we need North to correspond to 0 degrees, so subtract 90
magBearing = haversineAngle - 90
#account for declination angle (Westerly declination, so add offset)
magBearing = magBearing + declinationAngle
#account for angle wrap
if magBearing < 0:
magBearing = magBearing + 360
elif magBearing > 360:
magBearing = magBearing - 360
return haversineDistance, magBearing
def distAndBearing_utm(currentX, currentY, destX, destY):
#calculate distance & bearing using UTM coordinates (x,y)-type coordinates
dx = destX - currentX
dy = destY - currentY
#calculate distance between the two points
utm_dist = math.sqrt( (dx)**2 + (dy)**2 )
#calculate the angle between the points
utm_angle = math.atan(dy/float(dx)) * (180/math.pi)
#If we treat the current (X,Y) point as the origin, then destination (X,Y) lies in a quadrant (either I,II,III, or IV), because ->
#the dx and dy (above) results in a + or - difference, which indicates the destination quadrant.
#The quadrant will determine the type of angle adjustment needed magnetically (based on N,S,E, and W heading)
if dx > 0 and dy > 0: #then destination is in quadrant I (between N and E); atan angle is positive
utm_angleTF = 90 - utm_angle
elif dx < 0 and dy > 0: #then destination is in quadrant II (between N and W)
#atan angle calculation is negative; (get rid of neg. sign, then add to 270 deg-West)
utm_angleTF = 270 + (-1 * utm_angle)
elif dx < 0 and dy < 0: #then destination is in quadrant III (between (W and S); atan angle is positive
utm_angleTF = 270 - utm_angle
else: # dx > 0 and dy <0, then quad IV (between S and E)
#angle calculation is negative; (get rid of neg. sign, then add to 90 deg-East)
utm_angleTF = 90 + (-1 * utm_angle)
#account for declination angle (Westerly declination angle, so add offset)
magUtmBearing = utm_angleTF + declinationAngle #add offset due to Westerly declination
#account for angle wrap
if magUtmBearing < 0:
magUtmBearing = magUtmBearing + 360
elif magUtmBearing > 360:
magUtmBearing = magUtmBearing - 360
return utm_dist, magUtmBearing
####### MAIN ########################################################
dist, bearing = haversine(currentLat,currentLon, destLat, destLon)
print "Distance & Bearning based on Lat/Lon is: ", dist, bearing
utm_dist, utm_angle = distAndBearing_utm(currentX, currentY, destX, destY)
print "Distance & Bearning based on UTM is: ", utm_dist, utm_angle
|
normal
|
{
"blob_id": "180d28ac77b6ff4488b3fd9c17a9ee4571e33631",
"index": 2694,
"step-1": "import math\n\n#variables for current GPS Lat / Lon Readings\ncurrentLat = 41.391240\ncurrentLon = -73.956217\ndestLat = 41.393035\ndestLon = -73.953398\n\n#variables for current UTM coordinates\ncurrentX = 587262\ncurrentY = 4582716\ndestX = 587499\ndestY = 4582919\n\n#declination angle based on geographic location\n#see #https://www.ngdc.noaa.gov/geomag-web/\n#needed for \"grid-to-magnetic\" angle\ndeclinationAngle = 13\n\n\n########### Functions ############################################################################\ndef haversine(currentLat,currentLon, destLat, destLon):\n #Calculate the great circle distance between two points \n #on the earth (specified in decimal degrees - Lat/Lon coords) using Haversine Formula\n \n haversineDistance = math.acos( math.sin(currentLat*math.pi/180)*math.sin(destLat*math.pi/180) + math.cos(currentLat*math.pi/180)*math.cos(destLat*math.pi/180)*math.cos(destLon*math.pi/180-currentLon*math.pi/180) ) * 6371000\n \n haversineAngle = ( math.atan2(math.cos(currentLat)*math.sin(destLat)-math.sin(currentLat)*math.cos(destLat)*math.cos(destLon-currentLon), math.sin(destLon-currentLon)*math.cos(destLat)) ) * (180/math.pi) \n \n #transform angle perspective - Haversine calculates angle with the perspective that 90 degrees points North\n #for magnetic field reference, we need North to correspond to 0 degrees, so subtract 90\n magBearing = haversineAngle - 90\n #account for declination angle (Westerly declination, so add offset)\n magBearing = magBearing + declinationAngle \n #account for angle wrap\n if magBearing < 0:\n magBearing = magBearing + 360 \n elif magBearing > 360:\n magBearing = magBearing - 360\n return haversineDistance, magBearing \n \ndef distAndBearing_utm(currentX, currentY, destX, destY):\n #calculate distance & bearing using UTM coordinates (x,y)-type coordinates\n dx = destX - currentX\n dy = destY - currentY\n #calculate distance between the two points\n utm_dist = math.sqrt( (dx)**2 + (dy)**2 )\n #calculate the angle between the points\n utm_angle = math.atan(dy/float(dx)) * (180/math.pi)\n \n #If we treat the current (X,Y) point as the origin, then destination (X,Y) lies in a quadrant (either I,II,III, or IV), because ->\n #the dx and dy (above) results in a + or - difference, which indicates the destination quadrant.\n #The quadrant will determine the type of angle adjustment needed magnetically (based on N,S,E, and W heading)\n if dx > 0 and dy > 0: #then destination is in quadrant I (between N and E); atan angle is positive\n utm_angleTF = 90 - utm_angle\n elif dx < 0 and dy > 0: #then destination is in quadrant II (between N and W)\n #atan angle calculation is negative; (get rid of neg. sign, then add to 270 deg-West)\n utm_angleTF = 270 + (-1 * utm_angle)\n elif dx < 0 and dy < 0: #then destination is in quadrant III (between (W and S); atan angle is positive\n utm_angleTF = 270 - utm_angle\n else: # dx > 0 and dy <0, then quad IV (between S and E)\n #angle calculation is negative; (get rid of neg. sign, then add to 90 deg-East)\n utm_angleTF = 90 + (-1 * utm_angle)\n \n #account for declination angle (Westerly declination angle, so add offset)\n magUtmBearing = utm_angleTF + declinationAngle #add offset due to Westerly declination \n #account for angle wrap\n if magUtmBearing < 0:\n magUtmBearing = magUtmBearing + 360 \n elif magUtmBearing > 360:\n magUtmBearing = magUtmBearing - 360\n \n return utm_dist, magUtmBearing \n\n\n####### MAIN ########################################################\ndist, bearing = haversine(currentLat,currentLon, destLat, destLon)\nprint \"Distance & Bearning based on Lat/Lon is: \", dist, bearing\nutm_dist, utm_angle = distAndBearing_utm(currentX, currentY, destX, destY)\nprint \"Distance & Bearning based on UTM is: \", utm_dist, utm_angle\n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#python -m marbles test_clean_rangos.py
import unittest
from marbles.mixins import mixins
import pandas as pd
import requests
from pyspark.sql import SparkSession
import psycopg2 as pg
import pandas as pd
from pyspark.sql.types import StructType, StructField, StringType
from src.features.build_features import get_clean_data
class Test_Ranges_Case(unittest.TestCase, mixins.CategoricalMixins):
'''
Verifica que los valores de la columna rangoatrasohoras
sean los indicados
'''
def test_that_all_ranges_are_present(self):
df = get_clean_data()
RANGOS=['cancelled', '0-1.5', '1.5-3.5' ,'3.5-']
self.assertCategoricalLevelsEqual(list(df.toPandas()["rangoatrasohoras"].unique()), RANGOS)
|
normal
|
{
"blob_id": "f7c6990b4ddbe5ef9d79ef2326e60cdf1f761db3",
"index": 4542,
"step-1": "<mask token>\n\n\nclass Test_Ranges_Case(unittest.TestCase, mixins.CategoricalMixins):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Test_Ranges_Case(unittest.TestCase, mixins.CategoricalMixins):\n <mask token>\n\n def test_that_all_ranges_are_present(self):\n df = get_clean_data()\n RANGOS = ['cancelled', '0-1.5', '1.5-3.5', '3.5-']\n self.assertCategoricalLevelsEqual(list(df.toPandas()[\n 'rangoatrasohoras'].unique()), RANGOS)\n",
"step-3": "<mask token>\n\n\nclass Test_Ranges_Case(unittest.TestCase, mixins.CategoricalMixins):\n \"\"\"\n Verifica que los valores de la columna rangoatrasohoras \n sean los indicados\n\n \"\"\"\n\n def test_that_all_ranges_are_present(self):\n df = get_clean_data()\n RANGOS = ['cancelled', '0-1.5', '1.5-3.5', '3.5-']\n self.assertCategoricalLevelsEqual(list(df.toPandas()[\n 'rangoatrasohoras'].unique()), RANGOS)\n",
"step-4": "import unittest\nfrom marbles.mixins import mixins\nimport pandas as pd\nimport requests\nfrom pyspark.sql import SparkSession\nimport psycopg2 as pg\nimport pandas as pd\nfrom pyspark.sql.types import StructType, StructField, StringType\nfrom src.features.build_features import get_clean_data\n\n\nclass Test_Ranges_Case(unittest.TestCase, mixins.CategoricalMixins):\n \"\"\"\n Verifica que los valores de la columna rangoatrasohoras \n sean los indicados\n\n \"\"\"\n\n def test_that_all_ranges_are_present(self):\n df = get_clean_data()\n RANGOS = ['cancelled', '0-1.5', '1.5-3.5', '3.5-']\n self.assertCategoricalLevelsEqual(list(df.toPandas()[\n 'rangoatrasohoras'].unique()), RANGOS)\n",
"step-5": "#python -m marbles test_clean_rangos.py\n\nimport unittest\nfrom marbles.mixins import mixins\nimport pandas as pd\nimport requests\nfrom pyspark.sql import SparkSession\nimport psycopg2 as pg\nimport pandas as pd\nfrom pyspark.sql.types import StructType, StructField, StringType\nfrom src.features.build_features import get_clean_data\n\nclass Test_Ranges_Case(unittest.TestCase, mixins.CategoricalMixins):\n\t'''\n Verifica que los valores de la columna rangoatrasohoras \n sean los indicados\n\n '''\n\n\tdef test_that_all_ranges_are_present(self):\n\n\n\t\tdf = get_clean_data()\n\t\tRANGOS=['cancelled', '0-1.5', '1.5-3.5' ,'3.5-']\n\t\tself.assertCategoricalLevelsEqual(list(df.toPandas()[\"rangoatrasohoras\"].unique()), RANGOS)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import os
import time
import requests
from dotenv import load_dotenv
from twilio.rest import Client
load_dotenv()
BASE_URL = 'https://api.vk.com/method/users.get'
def get_status(user_id):
params = {'user_ids': user_id, 'V': os.getenv('API_V'), 'access_token':
os.getenv('ACCESS_TOKEN'), 'fields': 'online'}
friends_status = requests.post(BASE_URL, params=params)
return friends_status.json()['response'][0]['online']
def send_sms(sms_text):
account_sid = os.getenv('TWILIO_ACCOUNT_SID')
auth_token = os.getenv('TWILIO_AUTH_TOKEN')
client = Client(account_sid, auth_token)
message = client.messages.create(body='Joing the dark side', from_=os.
getenv('NUMBER_FROM'), media_url=['https://demo.twilio.com/owl.png'
], to=os.getenv('NUMBER_TO'))
return message.sid
if __name__ == '__main__':
vk_id = input('Введите id ')
while True:
if get_status(vk_id) == 1:
send_sms(f'{vk_id} сейчас онлайн!')
break
time.sleep(5)
|
normal
|
{
"blob_id": "6b2a9e8c6e95f52e9ebf999b81f9170fc669cce4",
"index": 6329,
"step-1": "<mask token>\n\n\ndef send_sms(sms_text):\n account_sid = os.getenv('TWILIO_ACCOUNT_SID')\n auth_token = os.getenv('TWILIO_AUTH_TOKEN')\n client = Client(account_sid, auth_token)\n message = client.messages.create(body='Joing the dark side', from_=os.\n getenv('NUMBER_FROM'), media_url=['https://demo.twilio.com/owl.png'\n ], to=os.getenv('NUMBER_TO'))\n return message.sid\n\n\n<mask token>\n",
"step-2": "<mask token>\nload_dotenv()\n<mask token>\n\n\ndef get_status(user_id):\n params = {'user_ids': user_id, 'V': os.getenv('API_V'), 'access_token':\n os.getenv('ACCESS_TOKEN'), 'fields': 'online'}\n friends_status = requests.post(BASE_URL, params=params)\n return friends_status.json()['response'][0]['online']\n\n\ndef send_sms(sms_text):\n account_sid = os.getenv('TWILIO_ACCOUNT_SID')\n auth_token = os.getenv('TWILIO_AUTH_TOKEN')\n client = Client(account_sid, auth_token)\n message = client.messages.create(body='Joing the dark side', from_=os.\n getenv('NUMBER_FROM'), media_url=['https://demo.twilio.com/owl.png'\n ], to=os.getenv('NUMBER_TO'))\n return message.sid\n\n\nif __name__ == '__main__':\n vk_id = input('Введите id ')\n while True:\n if get_status(vk_id) == 1:\n send_sms(f'{vk_id} сейчас онлайн!')\n break\n time.sleep(5)\n",
"step-3": "<mask token>\nload_dotenv()\nBASE_URL = 'https://api.vk.com/method/users.get'\n\n\ndef get_status(user_id):\n params = {'user_ids': user_id, 'V': os.getenv('API_V'), 'access_token':\n os.getenv('ACCESS_TOKEN'), 'fields': 'online'}\n friends_status = requests.post(BASE_URL, params=params)\n return friends_status.json()['response'][0]['online']\n\n\ndef send_sms(sms_text):\n account_sid = os.getenv('TWILIO_ACCOUNT_SID')\n auth_token = os.getenv('TWILIO_AUTH_TOKEN')\n client = Client(account_sid, auth_token)\n message = client.messages.create(body='Joing the dark side', from_=os.\n getenv('NUMBER_FROM'), media_url=['https://demo.twilio.com/owl.png'\n ], to=os.getenv('NUMBER_TO'))\n return message.sid\n\n\nif __name__ == '__main__':\n vk_id = input('Введите id ')\n while True:\n if get_status(vk_id) == 1:\n send_sms(f'{vk_id} сейчас онлайн!')\n break\n time.sleep(5)\n",
"step-4": "import os\nimport time\nimport requests\nfrom dotenv import load_dotenv\nfrom twilio.rest import Client\nload_dotenv()\nBASE_URL = 'https://api.vk.com/method/users.get'\n\n\ndef get_status(user_id):\n params = {'user_ids': user_id, 'V': os.getenv('API_V'), 'access_token':\n os.getenv('ACCESS_TOKEN'), 'fields': 'online'}\n friends_status = requests.post(BASE_URL, params=params)\n return friends_status.json()['response'][0]['online']\n\n\ndef send_sms(sms_text):\n account_sid = os.getenv('TWILIO_ACCOUNT_SID')\n auth_token = os.getenv('TWILIO_AUTH_TOKEN')\n client = Client(account_sid, auth_token)\n message = client.messages.create(body='Joing the dark side', from_=os.\n getenv('NUMBER_FROM'), media_url=['https://demo.twilio.com/owl.png'\n ], to=os.getenv('NUMBER_TO'))\n return message.sid\n\n\nif __name__ == '__main__':\n vk_id = input('Введите id ')\n while True:\n if get_status(vk_id) == 1:\n send_sms(f'{vk_id} сейчас онлайн!')\n break\n time.sleep(5)\n",
"step-5": null,
"step-ids": [
1,
3,
4,
5
]
}
|
[
1,
3,
4,
5
] |
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements command to describe a given guest policy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.compute.os_config import osconfig_utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute.os_config import resource_args
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Describe(base.DescribeCommand):
"""Describe the given guest policy.
## EXAMPLES
To describe the guest policy 'policy1' in the project 'project1', run:
$ {command} policy1 --project=project1
To describe the guest policy 'policy1' in the organization '12345', run:
$ {command} policy1 --organization=12345
"""
@staticmethod
def Args(parser):
resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')
def Run(self, args):
guest_policy_ref = args.CONCEPTS.guest_policy.Parse()
release_track = self.ReleaseTrack()
client = osconfig_utils.GetClientInstance(release_track)
messages = osconfig_utils.GetClientMessages(release_track)
guest_policy_type = guest_policy_ref.type_
guest_policy_name = guest_policy_ref.result.RelativeName()
if guest_policy_type == type(guest_policy_type).organization_guest_policy:
request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.organizations_guestPolicies
elif guest_policy_type == type(guest_policy_type).folder_guest_policy:
request = messages.OsconfigFoldersGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.folders_guestPolicies
else:
request = messages.OsconfigProjectsGuestPoliciesGetRequest(
name=guest_policy_name)
service = client.projects_guestPolicies
return service.Get(request)
|
normal
|
{
"blob_id": "d6a677ed537f6493bb43bd893f3096dc058e27da",
"index": 507,
"step-1": "<mask token>\n\n\[email protected](base.ReleaseTrack.ALPHA)\nclass Describe(base.DescribeCommand):\n <mask token>\n <mask token>\n\n def Run(self, args):\n guest_policy_ref = args.CONCEPTS.guest_policy.Parse()\n release_track = self.ReleaseTrack()\n client = osconfig_utils.GetClientInstance(release_track)\n messages = osconfig_utils.GetClientMessages(release_track)\n guest_policy_type = guest_policy_ref.type_\n guest_policy_name = guest_policy_ref.result.RelativeName()\n if guest_policy_type == type(guest_policy_type\n ).organization_guest_policy:\n request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(\n name=guest_policy_name)\n service = client.organizations_guestPolicies\n elif guest_policy_type == type(guest_policy_type).folder_guest_policy:\n request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=\n guest_policy_name)\n service = client.folders_guestPolicies\n else:\n request = messages.OsconfigProjectsGuestPoliciesGetRequest(name\n =guest_policy_name)\n service = client.projects_guestPolicies\n return service.Get(request)\n",
"step-2": "<mask token>\n\n\[email protected](base.ReleaseTrack.ALPHA)\nclass Describe(base.DescribeCommand):\n <mask token>\n\n @staticmethod\n def Args(parser):\n resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')\n\n def Run(self, args):\n guest_policy_ref = args.CONCEPTS.guest_policy.Parse()\n release_track = self.ReleaseTrack()\n client = osconfig_utils.GetClientInstance(release_track)\n messages = osconfig_utils.GetClientMessages(release_track)\n guest_policy_type = guest_policy_ref.type_\n guest_policy_name = guest_policy_ref.result.RelativeName()\n if guest_policy_type == type(guest_policy_type\n ).organization_guest_policy:\n request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(\n name=guest_policy_name)\n service = client.organizations_guestPolicies\n elif guest_policy_type == type(guest_policy_type).folder_guest_policy:\n request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=\n guest_policy_name)\n service = client.folders_guestPolicies\n else:\n request = messages.OsconfigProjectsGuestPoliciesGetRequest(name\n =guest_policy_name)\n service = client.projects_guestPolicies\n return service.Get(request)\n",
"step-3": "<mask token>\n\n\[email protected](base.ReleaseTrack.ALPHA)\nclass Describe(base.DescribeCommand):\n \"\"\"Describe the given guest policy.\n\n ## EXAMPLES\n\n To describe the guest policy 'policy1' in the project 'project1', run:\n\n $ {command} policy1 --project=project1\n\n To describe the guest policy 'policy1' in the organization '12345', run:\n\n $ {command} policy1 --organization=12345\n\n \"\"\"\n\n @staticmethod\n def Args(parser):\n resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')\n\n def Run(self, args):\n guest_policy_ref = args.CONCEPTS.guest_policy.Parse()\n release_track = self.ReleaseTrack()\n client = osconfig_utils.GetClientInstance(release_track)\n messages = osconfig_utils.GetClientMessages(release_track)\n guest_policy_type = guest_policy_ref.type_\n guest_policy_name = guest_policy_ref.result.RelativeName()\n if guest_policy_type == type(guest_policy_type\n ).organization_guest_policy:\n request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(\n name=guest_policy_name)\n service = client.organizations_guestPolicies\n elif guest_policy_type == type(guest_policy_type).folder_guest_policy:\n request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=\n guest_policy_name)\n service = client.folders_guestPolicies\n else:\n request = messages.OsconfigProjectsGuestPoliciesGetRequest(name\n =guest_policy_name)\n service = client.projects_guestPolicies\n return service.Get(request)\n",
"step-4": "<mask token>\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import unicode_literals\nfrom googlecloudsdk.api_lib.compute.os_config import osconfig_utils\nfrom googlecloudsdk.calliope import base\nfrom googlecloudsdk.command_lib.compute.os_config import resource_args\n\n\[email protected](base.ReleaseTrack.ALPHA)\nclass Describe(base.DescribeCommand):\n \"\"\"Describe the given guest policy.\n\n ## EXAMPLES\n\n To describe the guest policy 'policy1' in the project 'project1', run:\n\n $ {command} policy1 --project=project1\n\n To describe the guest policy 'policy1' in the organization '12345', run:\n\n $ {command} policy1 --organization=12345\n\n \"\"\"\n\n @staticmethod\n def Args(parser):\n resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')\n\n def Run(self, args):\n guest_policy_ref = args.CONCEPTS.guest_policy.Parse()\n release_track = self.ReleaseTrack()\n client = osconfig_utils.GetClientInstance(release_track)\n messages = osconfig_utils.GetClientMessages(release_track)\n guest_policy_type = guest_policy_ref.type_\n guest_policy_name = guest_policy_ref.result.RelativeName()\n if guest_policy_type == type(guest_policy_type\n ).organization_guest_policy:\n request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(\n name=guest_policy_name)\n service = client.organizations_guestPolicies\n elif guest_policy_type == type(guest_policy_type).folder_guest_policy:\n request = messages.OsconfigFoldersGuestPoliciesGetRequest(name=\n guest_policy_name)\n service = client.folders_guestPolicies\n else:\n request = messages.OsconfigProjectsGuestPoliciesGetRequest(name\n =guest_policy_name)\n service = client.projects_guestPolicies\n return service.Get(request)\n",
"step-5": "# -*- coding: utf-8 -*- #\n# Copyright 2019 Google LLC. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Implements command to describe a given guest policy.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import unicode_literals\n\nfrom googlecloudsdk.api_lib.compute.os_config import osconfig_utils\nfrom googlecloudsdk.calliope import base\nfrom googlecloudsdk.command_lib.compute.os_config import resource_args\n\n\[email protected](base.ReleaseTrack.ALPHA)\nclass Describe(base.DescribeCommand):\n \"\"\"Describe the given guest policy.\n\n ## EXAMPLES\n\n To describe the guest policy 'policy1' in the project 'project1', run:\n\n $ {command} policy1 --project=project1\n\n To describe the guest policy 'policy1' in the organization '12345', run:\n\n $ {command} policy1 --organization=12345\n\n \"\"\"\n\n @staticmethod\n def Args(parser):\n resource_args.AddGuestPolicyResourceArg(parser, 'to describe.')\n\n def Run(self, args):\n guest_policy_ref = args.CONCEPTS.guest_policy.Parse()\n\n release_track = self.ReleaseTrack()\n client = osconfig_utils.GetClientInstance(release_track)\n messages = osconfig_utils.GetClientMessages(release_track)\n\n guest_policy_type = guest_policy_ref.type_\n guest_policy_name = guest_policy_ref.result.RelativeName()\n\n if guest_policy_type == type(guest_policy_type).organization_guest_policy:\n request = messages.OsconfigOrganizationsGuestPoliciesGetRequest(\n name=guest_policy_name)\n service = client.organizations_guestPolicies\n elif guest_policy_type == type(guest_policy_type).folder_guest_policy:\n request = messages.OsconfigFoldersGuestPoliciesGetRequest(\n name=guest_policy_name)\n service = client.folders_guestPolicies\n else:\n request = messages.OsconfigProjectsGuestPoliciesGetRequest(\n name=guest_policy_name)\n service = client.projects_guestPolicies\n\n return service.Get(request)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from redstork import PageObject
class AnnotController:
def get_annotations(self, project, page_index):
page = project.doc[page_index]
yield from page.flat_iter()
|
normal
|
{
"blob_id": "6ca2a9040897e49c6407b9b0760240fec93b4df0",
"index": 3067,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AnnotController:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AnnotController:\n\n def get_annotations(self, project, page_index):\n page = project.doc[page_index]\n yield from page.flat_iter()\n",
"step-4": "from redstork import PageObject\n\n\nclass AnnotController:\n\n def get_annotations(self, project, page_index):\n page = project.doc[page_index]\n yield from page.flat_iter()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from flask import Blueprint, request, render_template, session, redirect
log = Blueprint('login', __name__, )
@log.route('/login', methods=['GET', 'POST'])
def login():
print(request.path, )
if request.method == 'GET':
return render_template('exec/login.html')
else:
username = request.form.get('username')
password = request.form.get('password')
if username == 'henry' and password == '123':
session['username'] = 'henry'
return redirect('/detail')
return 'Failed'
|
normal
|
{
"blob_id": "763e2db4eb9ad5953273fb310c8e9714964a39e6",
"index": 9576,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/login', methods=['GET', 'POST'])\ndef login():\n print(request.path)\n if request.method == 'GET':\n return render_template('exec/login.html')\n else:\n username = request.form.get('username')\n password = request.form.get('password')\n if username == 'henry' and password == '123':\n session['username'] = 'henry'\n return redirect('/detail')\n return 'Failed'\n",
"step-3": "<mask token>\nlog = Blueprint('login', __name__)\n\n\[email protected]('/login', methods=['GET', 'POST'])\ndef login():\n print(request.path)\n if request.method == 'GET':\n return render_template('exec/login.html')\n else:\n username = request.form.get('username')\n password = request.form.get('password')\n if username == 'henry' and password == '123':\n session['username'] = 'henry'\n return redirect('/detail')\n return 'Failed'\n",
"step-4": "from flask import Blueprint, request, render_template, session, redirect\nlog = Blueprint('login', __name__)\n\n\[email protected]('/login', methods=['GET', 'POST'])\ndef login():\n print(request.path)\n if request.method == 'GET':\n return render_template('exec/login.html')\n else:\n username = request.form.get('username')\n password = request.form.get('password')\n if username == 'henry' and password == '123':\n session['username'] = 'henry'\n return redirect('/detail')\n return 'Failed'\n",
"step-5": "from flask import Blueprint, request, render_template, session, redirect\n\nlog = Blueprint('login', __name__, )\n\n\[email protected]('/login', methods=['GET', 'POST'])\ndef login():\n print(request.path, )\n if request.method == 'GET':\n return render_template('exec/login.html')\n else:\n username = request.form.get('username')\n password = request.form.get('password')\n if username == 'henry' and password == '123':\n session['username'] = 'henry'\n return redirect('/detail')\n return 'Failed'\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""
Created on Sat May 2 21:31:37 2020
@author: Emmanuel Torres Molina
"""
"""
Ejercicio 10 del TP2 de Teoría de los Circuitos II:
Un tono de 45 KHz y 200 mV de amplitud es distorsionada por un tono de 12 KHz
y 2V de amplitud. Diseñar un filtro pasa altos que atenúe la señal
interferente, de tal forma que el remanente no sea mayor que el 2% de los 200 mV.
La ganancia en alta frecuencia deberá ser de 0 db y la máxima atenuación
en la banda de paso menor a 1 dB. Emplear la aproximación que necesite menor
número de etapas.
En este caso el Filtro está Sintetizado por un Estructura RLC Pasiva + RL Pasivo.
"""
import numpy as np
from scipy.signal import TransferFunction as transf_f
import scipy.signal as sig
from splane import bodePlot, pzmap
from matplotlib import pyplot as plt
plt.close ('all')
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Vector Tiempo:
t0 = 0.0 # Tiempo Inicial
tf = 0.005
dt = 0.00005 # Incremento
t = np.arange (t0, tf, dt)
# ---------------------------------------------------------------------------
# Tono de Interés:
f_t = 45 * 10**3 # Frecuecia del Tono de mi Interés [Hz]
w_t = 2 * np.pi * f_t # [rad/seg]
A_t = 0.2 # Amplitud de mi Tono [V]
s_t = A_t * np.sin ( w_t * t )
# ---------------------------------------------------------------------------
# Ruido Interferente:
f_r = 12 * 10**3 # Frecuencia del Ruido Interferente [Hz]
w_r = 2 * np.pi * f_r # [rad/seg]
A_r= 2 # Amplitud del Ruido [V]
r_t = A_r * np.sin ( w_r * t )
sgnal = s_t + r_t
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Valores de los Elementos del Circuito:
# Etapa 1: RLC Pasivo
R1 = 290
C1 = 3.5e-9
L1 = 3.5e-3
k1 = 1
# Etapa 2: RL Pasivo
R2 = 700
C2 = 3.5e-9
L2 = 1.03e-3
k2 = 1
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Diseño del Filtro: Vamos a Realizar un Filtro High-Pass:
# Requisitos de Plantilla
alfa_max = 0.9 # Piden que sea menor a 1dB
alfa_min = 54 # el remanente no sea mayor que el 2% de los 200 mV
wp_hp = w_t
ws_hp = w_r
# Normalizo las Pulsaciones Angulares usando como norma: wp_hp
wp_hp_norm = wp_hp / wp_hp
ws_hp_norm = ws_hp / wp_hp
w0 = np.sqrt ( 1 / (L1*C1) )
# ---------------------------------------------------------------------------
# Filtro Prototipo Low-Pass: Transformación en Frecuencia: w_HP = -1 / w_LP
wp_lp_norm = abs(-1 / wp_hp_norm)
ws_lp_norm = abs(-1 / ws_hp_norm)
# Voy a Utilizar Aproximación de Chebyshev para Diseñal el Filtro:
eps = np.sqrt ( (10 **(alfa_max/10) ) - 1 )
# Orden del Filtro
N = np.arccosh ( np.sqrt ( (10**(alfa_min/10) - 1) / eps**2 ) ) / np.arccosh (ws_lp_norm)
N = np.ceil ( N ) # Redondeo para arriba
den1_lp = [1, 0.29, 1]
den2_lp = [1, 0.7, 0.29]
p1_lp = np.roots ( den1_lp )
p2_lp = np.roots ( den2_lp )
my_z_lp = np.array ([])
my_p_lp = np.concatenate ( (p1_lp, p2_lp), axis = None )
my_k_lp = 1 * 0.29
NUM, DEN = sig.zpk2tf ( my_z_lp, my_p_lp, my_k_lp )
NUM_lp, DEN_lp = sig.lp2lp ( NUM, DEN, w0 )
my_tf_lp = transf_f (NUM_lp,DEN_lp)
# ---------------------------------------------------------------------------
# Filtro Destino - Filtro High-Pass:
# Calculo W0:
NUM_hp, DEN_hp = sig.lp2hp ( NUM, DEN, w0 )
my_tf_hp = transf_f ( NUM_hp, DEN_hp )
my_z_hp, my_p_hp, my_k_hp = sig.tf2zpk (NUM_hp, DEN_hp )
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Filtrado de la Señal:
t, s_filtrada, x = sig.lsim2 ((my_tf_hp), sgnal, t )
# ---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
# Ploteo de las Señales, Respuesta en Frecuencia, etc.
fig1, axs = plt.subplots(4,1)
axs[0].plot ( t, s_t )
axs[0].grid ('True')
axs[0].set_title ('Señal Original')
axs[0].set_ylim(-0.2,0.2)
axs[0].set_ylabel('[V]')
axs[1].plot ( t, r_t )
axs[1].grid ('True')
axs[1].set_title ('Ruido Interferente')
axs[1].set_ylabel('[V]')
axs[1].set_xlim(0)
axs[2].plot (t, s_t + r_t )
axs[2].grid ('True')
axs[2].set_title ('Señal a Filtrar')
axs[2].set_ylabel('[V]')
axs[2].set_xlim(0)
axs[3].plot (t, s_filtrada )
axs[3].grid ('True')
axs[3].set_title ( 'Señal Filtrada' )
axs[3].set_xlabel ('t[seg]')
axs[3].set_ylabel('[V]')
axs[3].set_ylim(-0.2,0.2)
axs[3].set_xlim(0)
# Respuesta en Frecuencia:
bodePlot (my_tf_lp, 'Filtro Prototipo - Low Pass')
pzmap (my_tf_lp)
bodePlot (my_tf_hp, 'Filtro Destino - High Pass')
pzmap (my_tf_hp)
|
normal
|
{
"blob_id": "dd59f3b1d8b17defe4e7f30fec594d01475319d2",
"index": 6211,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.close('all')\n<mask token>\naxs[0].plot(t, s_t)\naxs[0].grid('True')\naxs[0].set_title('Señal Original')\naxs[0].set_ylim(-0.2, 0.2)\naxs[0].set_ylabel('[V]')\naxs[1].plot(t, r_t)\naxs[1].grid('True')\naxs[1].set_title('Ruido Interferente')\naxs[1].set_ylabel('[V]')\naxs[1].set_xlim(0)\naxs[2].plot(t, s_t + r_t)\naxs[2].grid('True')\naxs[2].set_title('Señal a Filtrar')\naxs[2].set_ylabel('[V]')\naxs[2].set_xlim(0)\naxs[3].plot(t, s_filtrada)\naxs[3].grid('True')\naxs[3].set_title('Señal Filtrada')\naxs[3].set_xlabel('t[seg]')\naxs[3].set_ylabel('[V]')\naxs[3].set_ylim(-0.2, 0.2)\naxs[3].set_xlim(0)\nbodePlot(my_tf_lp, 'Filtro Prototipo - Low Pass')\npzmap(my_tf_lp)\nbodePlot(my_tf_hp, 'Filtro Destino - High Pass')\npzmap(my_tf_hp)\n",
"step-3": "<mask token>\nplt.close('all')\nt0 = 0.0\ntf = 0.005\ndt = 5e-05\nt = np.arange(t0, tf, dt)\nf_t = 45 * 10 ** 3\nw_t = 2 * np.pi * f_t\nA_t = 0.2\ns_t = A_t * np.sin(w_t * t)\nf_r = 12 * 10 ** 3\nw_r = 2 * np.pi * f_r\nA_r = 2\nr_t = A_r * np.sin(w_r * t)\nsgnal = s_t + r_t\nR1 = 290\nC1 = 3.5e-09\nL1 = 0.0035\nk1 = 1\nR2 = 700\nC2 = 3.5e-09\nL2 = 0.00103\nk2 = 1\nalfa_max = 0.9\nalfa_min = 54\nwp_hp = w_t\nws_hp = w_r\nwp_hp_norm = wp_hp / wp_hp\nws_hp_norm = ws_hp / wp_hp\nw0 = np.sqrt(1 / (L1 * C1))\nwp_lp_norm = abs(-1 / wp_hp_norm)\nws_lp_norm = abs(-1 / ws_hp_norm)\neps = np.sqrt(10 ** (alfa_max / 10) - 1)\nN = np.arccosh(np.sqrt((10 ** (alfa_min / 10) - 1) / eps ** 2)) / np.arccosh(\n ws_lp_norm)\nN = np.ceil(N)\nden1_lp = [1, 0.29, 1]\nden2_lp = [1, 0.7, 0.29]\np1_lp = np.roots(den1_lp)\np2_lp = np.roots(den2_lp)\nmy_z_lp = np.array([])\nmy_p_lp = np.concatenate((p1_lp, p2_lp), axis=None)\nmy_k_lp = 1 * 0.29\nNUM, DEN = sig.zpk2tf(my_z_lp, my_p_lp, my_k_lp)\nNUM_lp, DEN_lp = sig.lp2lp(NUM, DEN, w0)\nmy_tf_lp = transf_f(NUM_lp, DEN_lp)\nNUM_hp, DEN_hp = sig.lp2hp(NUM, DEN, w0)\nmy_tf_hp = transf_f(NUM_hp, DEN_hp)\nmy_z_hp, my_p_hp, my_k_hp = sig.tf2zpk(NUM_hp, DEN_hp)\nt, s_filtrada, x = sig.lsim2(my_tf_hp, sgnal, t)\nfig1, axs = plt.subplots(4, 1)\naxs[0].plot(t, s_t)\naxs[0].grid('True')\naxs[0].set_title('Señal Original')\naxs[0].set_ylim(-0.2, 0.2)\naxs[0].set_ylabel('[V]')\naxs[1].plot(t, r_t)\naxs[1].grid('True')\naxs[1].set_title('Ruido Interferente')\naxs[1].set_ylabel('[V]')\naxs[1].set_xlim(0)\naxs[2].plot(t, s_t + r_t)\naxs[2].grid('True')\naxs[2].set_title('Señal a Filtrar')\naxs[2].set_ylabel('[V]')\naxs[2].set_xlim(0)\naxs[3].plot(t, s_filtrada)\naxs[3].grid('True')\naxs[3].set_title('Señal Filtrada')\naxs[3].set_xlabel('t[seg]')\naxs[3].set_ylabel('[V]')\naxs[3].set_ylim(-0.2, 0.2)\naxs[3].set_xlim(0)\nbodePlot(my_tf_lp, 'Filtro Prototipo - Low Pass')\npzmap(my_tf_lp)\nbodePlot(my_tf_hp, 'Filtro Destino - High Pass')\npzmap(my_tf_hp)\n",
"step-4": "<mask token>\nimport numpy as np\nfrom scipy.signal import TransferFunction as transf_f\nimport scipy.signal as sig\nfrom splane import bodePlot, pzmap\nfrom matplotlib import pyplot as plt\nplt.close('all')\nt0 = 0.0\ntf = 0.005\ndt = 5e-05\nt = np.arange(t0, tf, dt)\nf_t = 45 * 10 ** 3\nw_t = 2 * np.pi * f_t\nA_t = 0.2\ns_t = A_t * np.sin(w_t * t)\nf_r = 12 * 10 ** 3\nw_r = 2 * np.pi * f_r\nA_r = 2\nr_t = A_r * np.sin(w_r * t)\nsgnal = s_t + r_t\nR1 = 290\nC1 = 3.5e-09\nL1 = 0.0035\nk1 = 1\nR2 = 700\nC2 = 3.5e-09\nL2 = 0.00103\nk2 = 1\nalfa_max = 0.9\nalfa_min = 54\nwp_hp = w_t\nws_hp = w_r\nwp_hp_norm = wp_hp / wp_hp\nws_hp_norm = ws_hp / wp_hp\nw0 = np.sqrt(1 / (L1 * C1))\nwp_lp_norm = abs(-1 / wp_hp_norm)\nws_lp_norm = abs(-1 / ws_hp_norm)\neps = np.sqrt(10 ** (alfa_max / 10) - 1)\nN = np.arccosh(np.sqrt((10 ** (alfa_min / 10) - 1) / eps ** 2)) / np.arccosh(\n ws_lp_norm)\nN = np.ceil(N)\nden1_lp = [1, 0.29, 1]\nden2_lp = [1, 0.7, 0.29]\np1_lp = np.roots(den1_lp)\np2_lp = np.roots(den2_lp)\nmy_z_lp = np.array([])\nmy_p_lp = np.concatenate((p1_lp, p2_lp), axis=None)\nmy_k_lp = 1 * 0.29\nNUM, DEN = sig.zpk2tf(my_z_lp, my_p_lp, my_k_lp)\nNUM_lp, DEN_lp = sig.lp2lp(NUM, DEN, w0)\nmy_tf_lp = transf_f(NUM_lp, DEN_lp)\nNUM_hp, DEN_hp = sig.lp2hp(NUM, DEN, w0)\nmy_tf_hp = transf_f(NUM_hp, DEN_hp)\nmy_z_hp, my_p_hp, my_k_hp = sig.tf2zpk(NUM_hp, DEN_hp)\nt, s_filtrada, x = sig.lsim2(my_tf_hp, sgnal, t)\nfig1, axs = plt.subplots(4, 1)\naxs[0].plot(t, s_t)\naxs[0].grid('True')\naxs[0].set_title('Señal Original')\naxs[0].set_ylim(-0.2, 0.2)\naxs[0].set_ylabel('[V]')\naxs[1].plot(t, r_t)\naxs[1].grid('True')\naxs[1].set_title('Ruido Interferente')\naxs[1].set_ylabel('[V]')\naxs[1].set_xlim(0)\naxs[2].plot(t, s_t + r_t)\naxs[2].grid('True')\naxs[2].set_title('Señal a Filtrar')\naxs[2].set_ylabel('[V]')\naxs[2].set_xlim(0)\naxs[3].plot(t, s_filtrada)\naxs[3].grid('True')\naxs[3].set_title('Señal Filtrada')\naxs[3].set_xlabel('t[seg]')\naxs[3].set_ylabel('[V]')\naxs[3].set_ylim(-0.2, 0.2)\naxs[3].set_xlim(0)\nbodePlot(my_tf_lp, 'Filtro Prototipo - Low Pass')\npzmap(my_tf_lp)\nbodePlot(my_tf_hp, 'Filtro Destino - High Pass')\npzmap(my_tf_hp)\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sat May 2 21:31:37 2020\r\n\r\n@author: Emmanuel Torres Molina\r\n\"\"\"\r\n\r\n\"\"\"\r\nEjercicio 10 del TP2 de Teoría de los Circuitos II:\r\nUn tono de 45 KHz y 200 mV de amplitud es distorsionada por un tono de 12 KHz \r\ny 2V de amplitud. Diseñar un filtro pasa altos que atenúe la señal\r\ninterferente, de tal forma que el remanente no sea mayor que el 2% de los 200 mV.\r\nLa ganancia en alta frecuencia deberá ser de 0 db y la máxima atenuación\r\nen la banda de paso menor a 1 dB. Emplear la aproximación que necesite menor \r\nnúmero de etapas.\r\nEn este caso el Filtro está Sintetizado por un Estructura RLC Pasiva + RL Pasivo.\r\n\"\"\"\r\n\r\nimport numpy as np\r\nfrom scipy.signal import TransferFunction as transf_f\r\nimport scipy.signal as sig\r\nfrom splane import bodePlot, pzmap\r\nfrom matplotlib import pyplot as plt\r\n\r\nplt.close ('all')\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Vector Tiempo:\r\nt0 = 0.0 # Tiempo Inicial\r\ntf = 0.005\r\ndt = 0.00005 # Incremento\r\nt = np.arange (t0, tf, dt)\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Tono de Interés:\r\n\r\nf_t = 45 * 10**3 # Frecuecia del Tono de mi Interés [Hz]\r\nw_t = 2 * np.pi * f_t # [rad/seg]\r\nA_t = 0.2 # Amplitud de mi Tono [V]\r\n\r\ns_t = A_t * np.sin ( w_t * t )\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Ruido Interferente:\r\n\r\nf_r = 12 * 10**3 # Frecuencia del Ruido Interferente [Hz]\r\nw_r = 2 * np.pi * f_r # [rad/seg]\r\nA_r= 2 # Amplitud del Ruido [V]\r\n\r\nr_t = A_r * np.sin ( w_r * t )\r\n\r\nsgnal = s_t + r_t\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Valores de los Elementos del Circuito:\r\n \r\n# Etapa 1: RLC Pasivo\r\nR1 = 290\r\nC1 = 3.5e-9\r\nL1 = 3.5e-3\r\nk1 = 1\r\n\r\n# Etapa 2: RL Pasivo\r\nR2 = 700\r\nC2 = 3.5e-9\r\nL2 = 1.03e-3\r\nk2 = 1\r\n\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Diseño del Filtro: Vamos a Realizar un Filtro High-Pass:\r\n \r\n# Requisitos de Plantilla\r\nalfa_max = 0.9 # Piden que sea menor a 1dB\r\nalfa_min = 54 # el remanente no sea mayor que el 2% de los 200 mV\r\nwp_hp = w_t\r\nws_hp = w_r\r\n\r\n# Normalizo las Pulsaciones Angulares usando como norma: wp_hp\r\nwp_hp_norm = wp_hp / wp_hp\r\nws_hp_norm = ws_hp / wp_hp\r\n\r\nw0 = np.sqrt ( 1 / (L1*C1) )\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Filtro Prototipo Low-Pass: Transformación en Frecuencia: w_HP = -1 / w_LP\r\nwp_lp_norm = abs(-1 / wp_hp_norm)\r\nws_lp_norm = abs(-1 / ws_hp_norm)\r\n\r\n\r\n# Voy a Utilizar Aproximación de Chebyshev para Diseñal el Filtro:\r\n\r\neps = np.sqrt ( (10 **(alfa_max/10) ) - 1 )\r\n\r\n# Orden del Filtro\r\nN = np.arccosh ( np.sqrt ( (10**(alfa_min/10) - 1) / eps**2 ) ) / np.arccosh (ws_lp_norm)\r\nN = np.ceil ( N ) # Redondeo para arriba\r\n\r\nden1_lp = [1, 0.29, 1]\r\nden2_lp = [1, 0.7, 0.29]\r\n\r\np1_lp = np.roots ( den1_lp )\r\np2_lp = np.roots ( den2_lp )\r\n\r\nmy_z_lp = np.array ([])\r\nmy_p_lp = np.concatenate ( (p1_lp, p2_lp), axis = None )\r\nmy_k_lp = 1 * 0.29\r\n\r\nNUM, DEN = sig.zpk2tf ( my_z_lp, my_p_lp, my_k_lp )\r\nNUM_lp, DEN_lp = sig.lp2lp ( NUM, DEN, w0 )\r\n\r\nmy_tf_lp = transf_f (NUM_lp,DEN_lp)\r\n\r\n# ---------------------------------------------------------------------------\r\n\r\n# Filtro Destino - Filtro High-Pass:\r\n \r\n# Calculo W0:\r\n\r\nNUM_hp, DEN_hp = sig.lp2hp ( NUM, DEN, w0 )\r\n\r\nmy_tf_hp = transf_f ( NUM_hp, DEN_hp )\r\n\r\nmy_z_hp, my_p_hp, my_k_hp = sig.tf2zpk (NUM_hp, DEN_hp )\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Filtrado de la Señal:\r\n \r\nt, s_filtrada, x = sig.lsim2 ((my_tf_hp), sgnal, t )\r\n\r\n# ---------------------------------------------------------------------------\r\n# ---------------------------------------------------------------------------\r\n\r\n# Ploteo de las Señales, Respuesta en Frecuencia, etc.\r\n\r\nfig1, axs = plt.subplots(4,1)\r\n\r\naxs[0].plot ( t, s_t )\r\naxs[0].grid ('True')\r\naxs[0].set_title ('Señal Original')\r\naxs[0].set_ylim(-0.2,0.2)\r\naxs[0].set_ylabel('[V]')\r\n\r\naxs[1].plot ( t, r_t )\r\naxs[1].grid ('True')\r\naxs[1].set_title ('Ruido Interferente')\r\naxs[1].set_ylabel('[V]')\r\naxs[1].set_xlim(0)\r\n\r\naxs[2].plot (t, s_t + r_t )\r\naxs[2].grid ('True')\r\naxs[2].set_title ('Señal a Filtrar')\r\naxs[2].set_ylabel('[V]')\r\naxs[2].set_xlim(0)\r\n\r\naxs[3].plot (t, s_filtrada )\r\naxs[3].grid ('True')\r\naxs[3].set_title ( 'Señal Filtrada' )\r\naxs[3].set_xlabel ('t[seg]')\r\naxs[3].set_ylabel('[V]')\r\naxs[3].set_ylim(-0.2,0.2)\r\naxs[3].set_xlim(0)\r\n\r\n# Respuesta en Frecuencia:\r\nbodePlot (my_tf_lp, 'Filtro Prototipo - Low Pass')\r\npzmap (my_tf_lp)\r\n\r\nbodePlot (my_tf_hp, 'Filtro Destino - High Pass')\r\npzmap (my_tf_hp)\r\n\r\n\r\n\r\n\r\n\r\n \r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
# ----------------------------------------------------------
# RJGlass Main Program version 0.2 8/1/07
# ----------------------------------------------------------
# Copyright 2007 Michael LaBrie
#
# This file is part of RJGlass.
#
# RJGlass is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# RJGlass is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ---------------------------------------------------------------
import sys, os, time
#Load the modules needed for RJGlass.
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
#pygame needed for sound in server_only (so load either way)
import pygame
from pygame.locals import *
from pygame import image
from guage import * #All add on guage functions colors etc.
#This is code to import config file (config.py)
try:
import config
except ImportError:
# We're in a py2exe, so we'll append an element to the (one element)
# sys.path which points to Library.zip, to the directory that contains
# Library.zip, allowing us to import config.py
# Adds one level up from the Library.zip directory to the path, so import will go forward
sys.path.append(os.path.split(sys.path[0])[0])
import config
class screen_c(object):
#This controls what is in each screen.
def __init__(self, x, guage_list=[]):
self.guage_list = [] #list of guages to cycle through.
self.guage_index = 0
self.x = x
self.y = 0
self.width = 512
self.heigth = 768
self.add_guage_list(guage_list)
def add_guage_list(self,glist):
for g in glist:
self.append_guage(guage_dict[g])
def append_guage(self,guage):
self.guage_list.append(guage)
def cycle(self):
self.guage_index +=1
if self.guage_index >= len(self.guage_list):
self.guage_index =0
def cycle_reverse(self):
self.guage_index -=1
if self.guage_index <0:
self.guage_index = len(self.guage_list) -1
def active_guage(self):
return self.guage_list[self.guage_index]
#this is a static function not specificaly for the screen.
#the eventhandlers have references to the screens so it is easier to
#get the guage references by name through this object.
def gauge_by_name(self,name):
return guage_dict[name]
def draw(self, aircraft):
self.guage_active = self.guage_list[self.guage_index]
self.guage_active.draw(aircraft, self.x, self.y)
def InitPyGame():
glutInit(())
pygame.init()
if config.full_screen:
s = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL|FULLSCREEN)
else:
s = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL)
return s
def InitView(smooth, width, heigth):
global x_s, y_s, scissor
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
glLoadIdentity()
glOrtho(0,width,0.0,heigth,-1.0,1.0)
x_s = width/1024.0
y_s = heigth/768.0
glScalef(x_s, y_s, 1.0)
scissor.x_s = x_s
scissor.y_s = y_s
if smooth:
#Enable Smoothing Antianalising
glEnable(GL_LINE_SMOOTH)
glEnable(GL_BLEND)
#glBlendFunc(GL_SRC_ALPHA, GL_ZERO)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE)
#glDisable(GL_DEPTH_TEST)
#Clear Screen
#glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
def DisplaySplash(filename, delay, window_x, window_y):
#Display needs to be initialized first.
i = image.load(filename)
splash_image = bitmap_image(i)
#Determine the x and y coords to put in center of screen.
splash_x = (window_x / 2) - (splash_image.w/2)
splash_y = (window_y /2) - (splash_image.h/2)
glRasterPos3f(splash_x,splash_y,0)
glDrawPixels(splash_image.w, splash_image.h, GL_RGBA, GL_UNSIGNED_BYTE, splash_image.tostring)
pygame.display.flip()
time.sleep(delay)
def DrawWindow(left_screen, right_screen):
def divider(): #Dividing vertical white line between instruments
glColor(white)
glLineWidth(2.0)
glBegin(GL_LINES)
glVertex2f(512.0, 0.0)
glVertex2f(512.0, 768.0)
glEnd()
def draw_nodata(x,y): #Draw no data text on screen.
glColor(red)
glLineWidth(5.0)
glPushMatrix()
glTranslatef(x,y,0)
glScalef(0.4,0.4,1.0)
glText("NO SIM DATA", 100)
glPopMatrix()
global count
divider()
#PFD.draw(aircraft_data,250,445)
left_screen.draw(aircraft_data)
#ND.draw(aircraft_data,512+256, 400)
#FMS.draw(aircraft_data,512+256, 0)
right_screen.draw(aircraft_data)
glDisable(GL_SCISSOR_TEST) #Disable any scissoring.
draw_FPS(20,740, aircraft_data.frame_time)
#If Nodata is coming from Flight Sim, show on screen
if aircraft_data.nodata:
draw_nodata(50,500)
count = count +1 #Used for FPS calc
def MainLoop(mode, server_only):
#global window
global starttime
global count
global mode_func, left_screen, right_screen, eventhandler
# Start Event Processing Engine
starttime = time.time() # Used for FPS (Frame Per Second) Calculation
if (server_only):
#Set up correct function for selected mode
mode_func = aircraft_data.get_mode_func(mode)
else:
left_screen = screen_c(256,config.left_screen)
right_screen = screen_c(512+256,config.right_screen)
# left_screen.add_guage_list(config.left_screen)
# right_screen.add_guage_list(config.right_screen)
#Set up correct function for selected mode
mode_func = aircraft_data.get_mode_func(mode, left_screen, right_screen)
#Setup Keyboard
#keys.setup_lists(aircraft_data)
#Inititalize View
#left_screen = screen_c(256, [PFD, ND, FMS])
eventhandler = event_handler.event_handler_c(aircraft_data,FMS, right_screen, left_screen)
#Load textures, and guages that use them
FMS.load_texture()
EICAS1.load_texture()
EICAS2.load_texture()
RADIO.load_texture()
if server_only:
server_loop()
else:
graphic_loop()
def graphic_loop():
#This is the loop for the non server mode. Gauges drawn.
while not (aircraft_data.quit_flag):
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT) #Clear Screen
#Update globaltime
aircraft_data.globaltime = time.time()
globaltime.update(time.time())
DrawWindow(left_screen, right_screen)
pygame.display.flip() #Update screen
mode_func() #Run aircraft mode function, to do all teh calaculations etc.
# Check for keypresses
eventhandler.check_events(pygame.event.get(), globaltime.value)
def server_loop():
#This is the loop for the server only mode. No Guages Drawn
while not (aircraft_data.quit_flag):
#Update globaltime
aircraft_data.globaltime = time.time()
globaltime.update(time.time())
mode_func() #Run aircraft mode function, to do all teh calaculations etc.
time.sleep(0.01) #Throw in some time delay, since no guages are being drawn.
# Check for keypresses
#eventhandler.check_events(pygame.event.get(), globaltime.value)
def Init_Graphics(x,y):
InitPyGame()
InitView(True, x,y)
def Initialize(server_only):
#if server_only True then server will just be run, No Graphics
#Initialize count for FPS calc
global count
count = 0
if (not server_only):
Init_Graphics(config.window_x, config.window_y)
#Draw Splash Screen
if config.splash:
DisplaySplash(config.splash_filename, config.splash_delay, config.window_x, config.window_y)
def ShutDown(mode, server_only):
#Close LogFile
datafile.close()
#Close pygame mixer
pygame.mixer.quit()
#Print average Frames per second on shutdown
print "FPS ", count / (time.time() - starttime)
#Try to kill the thread if it exists. Closes it down on exit
aircraft_data.AP.quit() #only here to close debugging files if present.
if ((mode != config.TEST) & (mode != config.CLIENT)): #If simconnected connected, kill the thread.
aircraft_data.kill_SimConnect()
def CheckArg(arg, mode, server_only, addr):
if 'server' in arg:
server_only = True
elif 'guage' in arg:
server_only = False
if 'client' in arg:
mode = config.CLIENT
elif 'test' in arg:
mode = config.TEST
for a in arg:
if 'addr' in a:
addr = a.split('=')[1]
return mode, server_only, addr
#===========================================================================
#Main program starts here
#===========================================================================
#Check arguments first, and get mode and server_only flags
mode, server_only, addr = CheckArg(sys.argv, config.mode, config.server_only, config.addr)
#config.addr = addr
#print addr
Initialize(server_only)
#Import guage files.
import aircraft #Does all of the aircraft_data
import event_handler #Handles all keyboard commands
import variable
if (not server_only):
import PFD_mod
import ND_mod
import EICAS1_mod
import EICAS2_mod
import FMS_guage
import radio_mod
#Create Guages
aircraft_data = aircraft.data()
variables = variable.variable_c(aircraft_data)
if (not server_only):
PFD = PFD_mod.PFD_Guage()
ND = ND_mod.ND_Guage()
FMS = FMS_guage.FMS_guage_c()
EICAS1 = EICAS1_mod.EICAS1_guage()
EICAS2 = EICAS2_mod.EICAS2_guage()
ND.initialize(aircraft_data)
RADIO = radio_mod.radio_guage()
guage_dict= { "RADIO":RADIO,"PFD":PFD,"ND":ND,"FMS":FMS,
"EICAS1":EICAS1,"EICAS2":EICAS2 }
print "Main Loop"
#Run main, and get window size and operation mode from config file. config.py
MainLoop(mode, server_only)
#===================
# Shuting Down
#===================
ShutDown(mode, server_only)
|
normal
|
{
"blob_id": "aafadcbf946db8ed85e3df48f5411967ec35c318",
"index": 7333,
"step-1": "#!/usr/bin/env python\n# ----------------------------------------------------------\n# RJGlass Main Program version 0.2 8/1/07\n# ----------------------------------------------------------\n# Copyright 2007 Michael LaBrie\n#\n# This file is part of RJGlass.\n#\n# RJGlass is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 3 of the License, or\n# (at your option) any later version.\n\n# RJGlass is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n# ---------------------------------------------------------------\nimport sys, os, time\n\n#Load the modules needed for RJGlass.\n\t\n\t\nfrom OpenGL.GL import *\nfrom OpenGL.GLUT import *\nfrom OpenGL.GLU import *\n\n#pygame needed for sound in server_only (so load either way)\nimport pygame\nfrom pygame.locals import *\nfrom pygame import image\n\t\nfrom guage import * #All add on guage functions colors etc. \n\n#This is code to import config file (config.py)\ntry:\n\timport config\nexcept ImportError:\n\t# We're in a py2exe, so we'll append an element to the (one element) \n\t# sys.path which points to Library.zip, to the directory that contains \n\t# Library.zip, allowing us to import config.py\n\t# Adds one level up from the Library.zip directory to the path, so import will go forward\n\tsys.path.append(os.path.split(sys.path[0])[0])\n\timport config\n\n\nclass screen_c(object):\n\t#This controls what is in each screen.\n\tdef __init__(self, x, guage_list=[]):\n\t\tself.guage_list = [] #list of guages to cycle through.\n\t\tself.guage_index = 0\n\t\tself.x = x\n\t\tself.y = 0\n\t\tself.width = 512\n\t\tself.heigth = 768\n\t\tself.add_guage_list(guage_list)\n\t\t\n\tdef add_guage_list(self,glist):\n\t\tfor g in glist:\n\t\t\tself.append_guage(guage_dict[g])\n\t\t\n\tdef append_guage(self,guage):\n\t\tself.guage_list.append(guage)\n\t\t\n\tdef cycle(self):\n\t\tself.guage_index +=1\n\t\tif self.guage_index >= len(self.guage_list):\n\t\t\tself.guage_index =0\n\t\t\t\n\tdef cycle_reverse(self):\n\t\tself.guage_index -=1\n\t\tif self.guage_index <0:\n\t\t\tself.guage_index = len(self.guage_list) -1\n\t\t\t\n\tdef active_guage(self):\n\t\treturn self.guage_list[self.guage_index]\t\t\t\n\t\n\t#this is a static function not specificaly for the screen.\n\t#the eventhandlers have references to the screens so it is easier to\n\t#get the guage references by name through this object.\n\tdef gauge_by_name(self,name):\n\t\treturn guage_dict[name]\n\t\t\t\n\tdef draw(self, aircraft):\n\t\tself.guage_active = self.guage_list[self.guage_index]\n\t\tself.guage_active.draw(aircraft, self.x, self.y)\n\n\ndef InitPyGame():\n\tglutInit(())\n\tpygame.init()\n\tif config.full_screen:\n\t\ts = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL|FULLSCREEN)\n\telse:\n\t\ts = pygame.display.set_mode((1024,768), DOUBLEBUF|OPENGL)\n\treturn s\n\t\t\ndef InitView(smooth, width, heigth):\n\tglobal x_s, y_s, scissor\n\t\n\tglClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT) \n\tglLoadIdentity()\n\tglOrtho(0,width,0.0,heigth,-1.0,1.0)\n\t\n\tx_s = width/1024.0\n\ty_s = heigth/768.0\n\n\tglScalef(x_s, y_s, 1.0)\n\tscissor.x_s = x_s\n\tscissor.y_s = y_s\n\tif smooth:\n\t\t#Enable Smoothing Antianalising\n\t\tglEnable(GL_LINE_SMOOTH)\n\t\tglEnable(GL_BLEND)\n\t\t#glBlendFunc(GL_SRC_ALPHA, GL_ZERO)\n\t\tglBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)\n\t\tglHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE)\n\t\t#glDisable(GL_DEPTH_TEST)\n\t#Clear Screen\n\t#glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)\n\t\n\t\ndef DisplaySplash(filename, delay, window_x, window_y):\n\t#Display needs to be initialized first.\n\ti = image.load(filename)\n\tsplash_image = bitmap_image(i)\n\t#Determine the x and y coords to put in center of screen.\n\tsplash_x = (window_x / 2) - (splash_image.w/2)\n\tsplash_y = (window_y /2) - (splash_image.h/2)\n\tglRasterPos3f(splash_x,splash_y,0)\n\tglDrawPixels(splash_image.w, splash_image.h, GL_RGBA, GL_UNSIGNED_BYTE, splash_image.tostring)\n\tpygame.display.flip()\n\ttime.sleep(delay)\n\t\n\n\t\n\ndef DrawWindow(left_screen, right_screen):\n\t\n\tdef divider(): #Dividing vertical white line between instruments\n\t\tglColor(white)\n\t\tglLineWidth(2.0)\n\t\tglBegin(GL_LINES)\n\t\tglVertex2f(512.0, 0.0)\n\t\tglVertex2f(512.0, 768.0)\n\t\tglEnd()\n\t\t\n\tdef draw_nodata(x,y): #Draw no data text on screen.\n\t\tglColor(red)\n\t\tglLineWidth(5.0)\n\t\tglPushMatrix()\n\t\tglTranslatef(x,y,0)\n\t\tglScalef(0.4,0.4,1.0)\n\t\tglText(\"NO SIM DATA\", 100)\n\t\tglPopMatrix()\n\t\t\n\tglobal count\n\tdivider()\n\t#PFD.draw(aircraft_data,250,445)\n\tleft_screen.draw(aircraft_data)\n\t#ND.draw(aircraft_data,512+256, 400)\n\t#FMS.draw(aircraft_data,512+256, 0)\n\tright_screen.draw(aircraft_data)\n\tglDisable(GL_SCISSOR_TEST) #Disable any scissoring.\n\tdraw_FPS(20,740, aircraft_data.frame_time)\n\t#If Nodata is coming from Flight Sim, show on screen\n\tif aircraft_data.nodata:\n\t\tdraw_nodata(50,500)\n\t\n\t\n\tcount = count +1 #Used for FPS calc\n\t\ndef MainLoop(mode, server_only):\n\t#global window\n\tglobal starttime\n\tglobal count\n\tglobal mode_func, left_screen, right_screen, eventhandler\n\t# Start Event Processing Engine\t\n\tstarttime = time.time() # Used for FPS (Frame Per Second) Calculation\n\t\n\tif (server_only):\n\t\t#Set up correct function for selected mode\n\t\tmode_func = aircraft_data.get_mode_func(mode)\n\telse:\t\n\t\tleft_screen = screen_c(256,config.left_screen)\n\t\tright_screen = screen_c(512+256,config.right_screen)\n\t#\tleft_screen.add_guage_list(config.left_screen)\n\t#\tright_screen.add_guage_list(config.right_screen)\n\t\t#Set up correct function for selected mode\n\t\tmode_func = aircraft_data.get_mode_func(mode, left_screen, right_screen)\n\t\n\t#Setup Keyboard\n\t#keys.setup_lists(aircraft_data)\n\t#Inititalize View\n\t#left_screen = screen_c(256, [PFD, ND, FMS])\n\t\n\t\teventhandler = event_handler.event_handler_c(aircraft_data,FMS, right_screen, left_screen)\n\t\n\t\t#Load textures, and guages that use them\n\t\tFMS.load_texture()\n\t\tEICAS1.load_texture()\n\t\tEICAS2.load_texture()\n\t\tRADIO.load_texture()\n\t\n\tif server_only:\n\t\tserver_loop()\n\telse:\n\t\tgraphic_loop()\n\ndef graphic_loop():\n\t#This is the loop for the non server mode. Gauges drawn.\n\twhile not (aircraft_data.quit_flag):\n\t\tglClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT) #Clear Screen\t\n\t\t#Update globaltime\n\t\taircraft_data.globaltime = time.time()\n\t\tglobaltime.update(time.time())\n\t\tDrawWindow(left_screen, right_screen)\n\t\tpygame.display.flip() #Update screen\n\t\tmode_func() #Run aircraft mode function, to do all teh calaculations etc.\n\t\t\n\t\t# Check for keypresses\n\t\teventhandler.check_events(pygame.event.get(), globaltime.value)\t\t\ndef server_loop():\n\t#This is the loop for the server only mode. No Guages Drawn\n\twhile not (aircraft_data.quit_flag):\n\t\t#Update globaltime\n\t\taircraft_data.globaltime = time.time()\n\t\tglobaltime.update(time.time())\n\t\tmode_func() #Run aircraft mode function, to do all teh calaculations etc.\n\t\ttime.sleep(0.01) #Throw in some time delay, since no guages are being drawn.\n\t\t\n\t\t# Check for keypresses\n\t\t#eventhandler.check_events(pygame.event.get(), globaltime.value)\t\t\n\t\n\t\n\n\ndef Init_Graphics(x,y):\n\tInitPyGame()\n\tInitView(True, x,y)\n\t\ndef Initialize(server_only):\n\t#if server_only True then server will just be run, No Graphics\n\t#Initialize count for FPS calc\n\tglobal count\n\tcount = 0\n\tif (not server_only):\n\t\tInit_Graphics(config.window_x, config.window_y)\n\t\t#Draw Splash Screen\n\t\tif config.splash:\n\t\t\tDisplaySplash(config.splash_filename, config.splash_delay, config.window_x, config.window_y)\n\n\n\t\n\t\ndef ShutDown(mode, server_only):\n\t#Close LogFile\n\tdatafile.close()\n\t#Close pygame mixer\n\tpygame.mixer.quit()\n\t#Print average Frames per second on shutdown\n\tprint \"FPS \", count / (time.time() - starttime)\n\t#Try to kill the thread if it exists. Closes it down on exit\t\t\t\t\n\taircraft_data.AP.quit() #only here to close debugging files if present.\n\tif ((mode != config.TEST) & (mode != config.CLIENT)): #If simconnected connected, kill the thread.\n\t\taircraft_data.kill_SimConnect()\n\ndef CheckArg(arg, mode, server_only, addr):\n\tif 'server' in arg:\n\t\tserver_only = True\n\telif 'guage' in arg:\t\n\t\tserver_only = False\n\t\t\n\tif 'client' in arg:\n\t\tmode = config.CLIENT\n\telif 'test' in arg:\n\t\tmode = config.TEST\n\t\n\tfor a in arg:\n\t\tif 'addr' in a:\n\t\t\taddr = a.split('=')[1]\n\t\t\t\t\n\treturn mode, server_only, addr\n\t\n\n#===========================================================================\n#Main program starts here\n#===========================================================================\n#Check arguments first, and get mode and server_only flags\nmode, server_only, addr = CheckArg(sys.argv, config.mode, config.server_only, config.addr)\n#config.addr = addr\n#print addr\nInitialize(server_only)\n#Import guage files.\nimport aircraft #Does all of the aircraft_data\nimport event_handler #Handles all keyboard commands\nimport variable\n\t\nif (not server_only):\n\timport PFD_mod\n\timport ND_mod\n\timport EICAS1_mod\n\timport EICAS2_mod\n\timport FMS_guage\n\timport radio_mod\n\n#Create Guages\n\naircraft_data = aircraft.data()\nvariables = variable.variable_c(aircraft_data)\n\nif (not server_only):\n\tPFD = PFD_mod.PFD_Guage()\n\tND = ND_mod.ND_Guage()\n\tFMS = FMS_guage.FMS_guage_c()\n\tEICAS1 = EICAS1_mod.EICAS1_guage()\n\tEICAS2 = EICAS2_mod.EICAS2_guage()\n\tND.initialize(aircraft_data)\n\tRADIO = radio_mod.radio_guage()\n\t\n\tguage_dict= { \"RADIO\":RADIO,\"PFD\":PFD,\"ND\":ND,\"FMS\":FMS,\n\t\t\"EICAS1\":EICAS1,\"EICAS2\":EICAS2 }\n\t\t\n\nprint \"Main Loop\"\n#Run main, and get window size and operation mode from config file. config.py\nMainLoop(mode, server_only)\n#===================\n# Shuting Down\n#===================\nShutDown(mode, server_only)\t\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python3
data = None
with open('./01-data.txt') as f:
data = f.read().splitlines()
ss = {}
s = 0
ss[s] = True
def check(data):
global ss
global s
for line in data:
s += int(line)
if ss.get(s, False):
return s
ss[s] = True
return None
v = check(data)
print('after first pass:', s)
while v is None:
v = check(data)
print('first duplicate:', v)
|
normal
|
{
"blob_id": "7e1dd242c60ee12dfc4130e379fa35ae626a4d63",
"index": 5217,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n if ss.get(s, False):\n return s\n ss[s] = True\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\nwith open('./01-data.txt') as f:\n data = f.read().splitlines()\n<mask token>\n\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n if ss.get(s, False):\n return s\n ss[s] = True\n return None\n\n\n<mask token>\nprint('after first pass:', s)\nwhile v is None:\n v = check(data)\nprint('first duplicate:', v)\n",
"step-4": "data = None\nwith open('./01-data.txt') as f:\n data = f.read().splitlines()\nss = {}\ns = 0\nss[s] = True\n\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n if ss.get(s, False):\n return s\n ss[s] = True\n return None\n\n\nv = check(data)\nprint('after first pass:', s)\nwhile v is None:\n v = check(data)\nprint('first duplicate:', v)\n",
"step-5": "#!/usr/bin/env python3\n\ndata = None\n\nwith open('./01-data.txt') as f:\n data = f.read().splitlines()\n\nss = {}\ns = 0\nss[s] = True\n\ndef check(data):\n global ss\n global s\n for line in data:\n s += int(line)\n\n if ss.get(s, False):\n return s\n\n ss[s] = True\n return None\n\n\nv = check(data)\nprint('after first pass:', s)\nwhile v is None:\n v = check(data)\nprint('first duplicate:', v)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from ParseTree import ParseTree
from Node import Node
from NodeInfo import NodeInfo
from TreeAdjustor import TreeAdjustor
from model.SchemaGraph import SchemaGraph
class TreeAdjustorTest:
schema = None
def __init__(self):
return
def getAdjustedTreesTest(self):
T = ParseTree()
nodes = [Node(index=-1, word="DEFAULT", posTag="DEFAULT") for i in range(0, 8)]
nodes[0] = Node(index=0, word="ROOT", posTag="--")
nodes[0].info = NodeInfo(type="ROOT", value="ROOT")
nodes[1] = Node(index=1, word="return", posTag="--")
nodes[1].info = NodeInfo(type="SN", value="SELECT")
nodes[2] = Node(index=2, word="conference", posTag="--")
nodes[2].info = NodeInfo(type="NN", value="Author")
nodes[3] = Node(index=3, word="area", posTag="--")
nodes[3].info = NodeInfo(type="NN", value="Title")
nodes[4] = Node(index=4, word="papers", posTag="--")
nodes[4].info = NodeInfo(type="NN", value="Author")
nodes[5] = Node(index=5, word="citations", posTag="--")
nodes[5].info = NodeInfo(type="NN", value="Journal")
nodes[6] = Node(index=6, word="most", posTag="--")
nodes[6].info = NodeInfo(type="FN", value=">")
nodes[7] = Node(index=7, word="total", posTag="--")
nodes[7].info = NodeInfo(type="FN", value="Year")
T.root = nodes[0]
nodes[0].children.append(nodes[1])
nodes[1].parent = nodes[0]
nodes[1].children.append(nodes[2])
nodes[2].parent = nodes[1]
nodes[2].children.append(nodes[3])
nodes[3].parent = nodes[2]
nodes[2].children.append(nodes[4])
nodes[4].parent = nodes[2]
nodes[4].children.append(nodes[5])
nodes[5].parent = nodes[4]
nodes[5].children.append(nodes[6])
nodes[6].parent = nodes[5]
nodes[5].children.append(nodes[7])
nodes[7].parent = nodes[5]
print ("===========test for Running getAdjustedTrees() in TreeAdjustor===========")
print ("The original tree:")
print (T.toString())
print ("Number of possible trees for choice:")
obj = TreeAdjustor()
result = TreeAdjustor.getAdjustedTrees(T)
# result = TreeAdjustor.adjust(T)
print (len(result))
# result = sorted(result,cmp=TreeAdjustorTest.cmpp)
# l =sorted(m, cmp =TreeAdjustor.timeStampCompare)
for i in range(0, len(result)):
for j in range(i+1, len(result)):
if(result[i].getScore() <= result[j].getScore()):
temp = result[i]
result[i] =result[j]
result[j] = temp
print ("The three trees with highest scores look like:")
for i in range(0,5):
print (result[i])
for tree in result:
print (" treeList Result %s:%d" % (tree.getSentence(), tree.getScore()))
tree.insertImplicitNodes()
query = tree.translateToSQL(self.schema)
print ("qUERY: " + query.toString())
def adjustTest(self):
T = ParseTree()
nodes = [Node(index=-1, word="DEFAULT", posTag="DEFAULT") for i in range(0, 9)]
nodes[0] = Node(index=0, word="ROOT",posTag= "--")
nodes[0].info = NodeInfo(type="ROOT", value="ROOT")
nodes[1] = Node(index=1, word="return", posTag="--")
nodes[1].info = NodeInfo(type="SN", value="SELECT")
nodes[2] = Node(index=2, word="conference", posTag="--")
nodes[2].info = NodeInfo(type="NN", value="Author")
nodes[3] = Node(index=3, word="area", posTag="--")
nodes[3].info =NodeInfo(type="NN", value="Title")
nodes[4] =Node(index=4, word="each", posTag="--")
nodes[4].info = NodeInfo(type="QN", value=">")
nodes[5] = Node(index=5, word="papers", posTag="--")
nodes[5].info = NodeInfo(type="NN", value="Author")
nodes[6] = Node(index=6, word="citations", posTag="--")
nodes[6].info = NodeInfo(type="NN", value="Journal")
nodes[7] = Node(index=7, word="most", posTag="--")
nodes[7].info = NodeInfo(type="FN", value=">")
nodes[8] = Node(index=8, word="total", posTag="--")
nodes[8].info = NodeInfo(type="FN", value="Year")
T.root = nodes[0]
nodes[0].children.append(nodes[1])
nodes[1].parent = nodes[0]
nodes[1].children.append(nodes[2])
nodes[2].parent = nodes[1]
nodes[2].children.append(nodes[3])
nodes[3].parent = nodes[2]
nodes[2].children.append(nodes[5])
nodes[5].parent = nodes[2]
nodes[3].children.append(nodes[4])
nodes[4].parent = nodes[3]
nodes[5].children.append(nodes[6])
nodes[6].parent = nodes[5]
nodes[6].children.append(nodes[7])
nodes[7].parent = nodes[6]
nodes[6].children.append(nodes[8])
nodes[8].parent = nodes[6]
print ("===========test for Running adjust() in TreeAdjustor===========")
treeList = TreeAdjustor.adjust(T)
print ("Output size: %d"%len(treeList))
print ("Output trees:")
ctr=0
for tr in treeList:
print ("Tree %d %s"%(ctr, tr.getSentence()))
ctr+=1
@staticmethod
def cmpp(a,b):
return a.getScore() > b.getScore()
obj = TreeAdjustorTest()
obj.getAdjustedTreesTest()
# obj.adjustTest()
|
normal
|
{
"blob_id": "1db397df2d030b2f622e701c46c15d653cb79e55",
"index": 5079,
"step-1": "<mask token>\n\n\nclass TreeAdjustorTest:\n <mask token>\n\n def __init__(self):\n return\n\n def getAdjustedTreesTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word='DEFAULT', posTag='DEFAULT') for i in\n range(0, 8)]\n nodes[0] = Node(index=0, word='ROOT', posTag='--')\n nodes[0].info = NodeInfo(type='ROOT', value='ROOT')\n nodes[1] = Node(index=1, word='return', posTag='--')\n nodes[1].info = NodeInfo(type='SN', value='SELECT')\n nodes[2] = Node(index=2, word='conference', posTag='--')\n nodes[2].info = NodeInfo(type='NN', value='Author')\n nodes[3] = Node(index=3, word='area', posTag='--')\n nodes[3].info = NodeInfo(type='NN', value='Title')\n nodes[4] = Node(index=4, word='papers', posTag='--')\n nodes[4].info = NodeInfo(type='NN', value='Author')\n nodes[5] = Node(index=5, word='citations', posTag='--')\n nodes[5].info = NodeInfo(type='NN', value='Journal')\n nodes[6] = Node(index=6, word='most', posTag='--')\n nodes[6].info = NodeInfo(type='FN', value='>')\n nodes[7] = Node(index=7, word='total', posTag='--')\n nodes[7].info = NodeInfo(type='FN', value='Year')\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[4])\n nodes[4].parent = nodes[2]\n nodes[4].children.append(nodes[5])\n nodes[5].parent = nodes[4]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[5].children.append(nodes[7])\n nodes[7].parent = nodes[5]\n print(\n '===========test for Running getAdjustedTrees() in TreeAdjustor==========='\n )\n print('The original tree:')\n print(T.toString())\n print('Number of possible trees for choice:')\n obj = TreeAdjustor()\n result = TreeAdjustor.getAdjustedTrees(T)\n print(len(result))\n for i in range(0, len(result)):\n for j in range(i + 1, len(result)):\n if result[i].getScore() <= result[j].getScore():\n temp = result[i]\n result[i] = result[j]\n result[j] = temp\n print('The three trees with highest scores look like:')\n for i in range(0, 5):\n print(result[i])\n for tree in result:\n print(' treeList Result %s:%d' % (tree.getSentence(), tree.\n getScore()))\n tree.insertImplicitNodes()\n query = tree.translateToSQL(self.schema)\n print('qUERY: ' + query.toString())\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TreeAdjustorTest:\n <mask token>\n\n def __init__(self):\n return\n\n def getAdjustedTreesTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word='DEFAULT', posTag='DEFAULT') for i in\n range(0, 8)]\n nodes[0] = Node(index=0, word='ROOT', posTag='--')\n nodes[0].info = NodeInfo(type='ROOT', value='ROOT')\n nodes[1] = Node(index=1, word='return', posTag='--')\n nodes[1].info = NodeInfo(type='SN', value='SELECT')\n nodes[2] = Node(index=2, word='conference', posTag='--')\n nodes[2].info = NodeInfo(type='NN', value='Author')\n nodes[3] = Node(index=3, word='area', posTag='--')\n nodes[3].info = NodeInfo(type='NN', value='Title')\n nodes[4] = Node(index=4, word='papers', posTag='--')\n nodes[4].info = NodeInfo(type='NN', value='Author')\n nodes[5] = Node(index=5, word='citations', posTag='--')\n nodes[5].info = NodeInfo(type='NN', value='Journal')\n nodes[6] = Node(index=6, word='most', posTag='--')\n nodes[6].info = NodeInfo(type='FN', value='>')\n nodes[7] = Node(index=7, word='total', posTag='--')\n nodes[7].info = NodeInfo(type='FN', value='Year')\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[4])\n nodes[4].parent = nodes[2]\n nodes[4].children.append(nodes[5])\n nodes[5].parent = nodes[4]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[5].children.append(nodes[7])\n nodes[7].parent = nodes[5]\n print(\n '===========test for Running getAdjustedTrees() in TreeAdjustor==========='\n )\n print('The original tree:')\n print(T.toString())\n print('Number of possible trees for choice:')\n obj = TreeAdjustor()\n result = TreeAdjustor.getAdjustedTrees(T)\n print(len(result))\n for i in range(0, len(result)):\n for j in range(i + 1, len(result)):\n if result[i].getScore() <= result[j].getScore():\n temp = result[i]\n result[i] = result[j]\n result[j] = temp\n print('The three trees with highest scores look like:')\n for i in range(0, 5):\n print(result[i])\n for tree in result:\n print(' treeList Result %s:%d' % (tree.getSentence(), tree.\n getScore()))\n tree.insertImplicitNodes()\n query = tree.translateToSQL(self.schema)\n print('qUERY: ' + query.toString())\n\n def adjustTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word='DEFAULT', posTag='DEFAULT') for i in\n range(0, 9)]\n nodes[0] = Node(index=0, word='ROOT', posTag='--')\n nodes[0].info = NodeInfo(type='ROOT', value='ROOT')\n nodes[1] = Node(index=1, word='return', posTag='--')\n nodes[1].info = NodeInfo(type='SN', value='SELECT')\n nodes[2] = Node(index=2, word='conference', posTag='--')\n nodes[2].info = NodeInfo(type='NN', value='Author')\n nodes[3] = Node(index=3, word='area', posTag='--')\n nodes[3].info = NodeInfo(type='NN', value='Title')\n nodes[4] = Node(index=4, word='each', posTag='--')\n nodes[4].info = NodeInfo(type='QN', value='>')\n nodes[5] = Node(index=5, word='papers', posTag='--')\n nodes[5].info = NodeInfo(type='NN', value='Author')\n nodes[6] = Node(index=6, word='citations', posTag='--')\n nodes[6].info = NodeInfo(type='NN', value='Journal')\n nodes[7] = Node(index=7, word='most', posTag='--')\n nodes[7].info = NodeInfo(type='FN', value='>')\n nodes[8] = Node(index=8, word='total', posTag='--')\n nodes[8].info = NodeInfo(type='FN', value='Year')\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[5])\n nodes[5].parent = nodes[2]\n nodes[3].children.append(nodes[4])\n nodes[4].parent = nodes[3]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[6].children.append(nodes[7])\n nodes[7].parent = nodes[6]\n nodes[6].children.append(nodes[8])\n nodes[8].parent = nodes[6]\n print('===========test for Running adjust() in TreeAdjustor==========='\n )\n treeList = TreeAdjustor.adjust(T)\n print('Output size: %d' % len(treeList))\n print('Output trees:')\n ctr = 0\n for tr in treeList:\n print('Tree %d %s' % (ctr, tr.getSentence()))\n ctr += 1\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TreeAdjustorTest:\n schema = None\n\n def __init__(self):\n return\n\n def getAdjustedTreesTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word='DEFAULT', posTag='DEFAULT') for i in\n range(0, 8)]\n nodes[0] = Node(index=0, word='ROOT', posTag='--')\n nodes[0].info = NodeInfo(type='ROOT', value='ROOT')\n nodes[1] = Node(index=1, word='return', posTag='--')\n nodes[1].info = NodeInfo(type='SN', value='SELECT')\n nodes[2] = Node(index=2, word='conference', posTag='--')\n nodes[2].info = NodeInfo(type='NN', value='Author')\n nodes[3] = Node(index=3, word='area', posTag='--')\n nodes[3].info = NodeInfo(type='NN', value='Title')\n nodes[4] = Node(index=4, word='papers', posTag='--')\n nodes[4].info = NodeInfo(type='NN', value='Author')\n nodes[5] = Node(index=5, word='citations', posTag='--')\n nodes[5].info = NodeInfo(type='NN', value='Journal')\n nodes[6] = Node(index=6, word='most', posTag='--')\n nodes[6].info = NodeInfo(type='FN', value='>')\n nodes[7] = Node(index=7, word='total', posTag='--')\n nodes[7].info = NodeInfo(type='FN', value='Year')\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[4])\n nodes[4].parent = nodes[2]\n nodes[4].children.append(nodes[5])\n nodes[5].parent = nodes[4]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[5].children.append(nodes[7])\n nodes[7].parent = nodes[5]\n print(\n '===========test for Running getAdjustedTrees() in TreeAdjustor==========='\n )\n print('The original tree:')\n print(T.toString())\n print('Number of possible trees for choice:')\n obj = TreeAdjustor()\n result = TreeAdjustor.getAdjustedTrees(T)\n print(len(result))\n for i in range(0, len(result)):\n for j in range(i + 1, len(result)):\n if result[i].getScore() <= result[j].getScore():\n temp = result[i]\n result[i] = result[j]\n result[j] = temp\n print('The three trees with highest scores look like:')\n for i in range(0, 5):\n print(result[i])\n for tree in result:\n print(' treeList Result %s:%d' % (tree.getSentence(), tree.\n getScore()))\n tree.insertImplicitNodes()\n query = tree.translateToSQL(self.schema)\n print('qUERY: ' + query.toString())\n\n def adjustTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word='DEFAULT', posTag='DEFAULT') for i in\n range(0, 9)]\n nodes[0] = Node(index=0, word='ROOT', posTag='--')\n nodes[0].info = NodeInfo(type='ROOT', value='ROOT')\n nodes[1] = Node(index=1, word='return', posTag='--')\n nodes[1].info = NodeInfo(type='SN', value='SELECT')\n nodes[2] = Node(index=2, word='conference', posTag='--')\n nodes[2].info = NodeInfo(type='NN', value='Author')\n nodes[3] = Node(index=3, word='area', posTag='--')\n nodes[3].info = NodeInfo(type='NN', value='Title')\n nodes[4] = Node(index=4, word='each', posTag='--')\n nodes[4].info = NodeInfo(type='QN', value='>')\n nodes[5] = Node(index=5, word='papers', posTag='--')\n nodes[5].info = NodeInfo(type='NN', value='Author')\n nodes[6] = Node(index=6, word='citations', posTag='--')\n nodes[6].info = NodeInfo(type='NN', value='Journal')\n nodes[7] = Node(index=7, word='most', posTag='--')\n nodes[7].info = NodeInfo(type='FN', value='>')\n nodes[8] = Node(index=8, word='total', posTag='--')\n nodes[8].info = NodeInfo(type='FN', value='Year')\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[5])\n nodes[5].parent = nodes[2]\n nodes[3].children.append(nodes[4])\n nodes[4].parent = nodes[3]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[6].children.append(nodes[7])\n nodes[7].parent = nodes[6]\n nodes[6].children.append(nodes[8])\n nodes[8].parent = nodes[6]\n print('===========test for Running adjust() in TreeAdjustor==========='\n )\n treeList = TreeAdjustor.adjust(T)\n print('Output size: %d' % len(treeList))\n print('Output trees:')\n ctr = 0\n for tr in treeList:\n print('Tree %d %s' % (ctr, tr.getSentence()))\n ctr += 1\n\n @staticmethod\n def cmpp(a, b):\n return a.getScore() > b.getScore()\n\n\nobj = TreeAdjustorTest()\nobj.getAdjustedTreesTest()\n",
"step-4": "from ParseTree import ParseTree\nfrom Node import Node\nfrom NodeInfo import NodeInfo\nfrom TreeAdjustor import TreeAdjustor\nfrom model.SchemaGraph import SchemaGraph\n\n\nclass TreeAdjustorTest:\n schema = None\n\n def __init__(self):\n return\n\n def getAdjustedTreesTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word='DEFAULT', posTag='DEFAULT') for i in\n range(0, 8)]\n nodes[0] = Node(index=0, word='ROOT', posTag='--')\n nodes[0].info = NodeInfo(type='ROOT', value='ROOT')\n nodes[1] = Node(index=1, word='return', posTag='--')\n nodes[1].info = NodeInfo(type='SN', value='SELECT')\n nodes[2] = Node(index=2, word='conference', posTag='--')\n nodes[2].info = NodeInfo(type='NN', value='Author')\n nodes[3] = Node(index=3, word='area', posTag='--')\n nodes[3].info = NodeInfo(type='NN', value='Title')\n nodes[4] = Node(index=4, word='papers', posTag='--')\n nodes[4].info = NodeInfo(type='NN', value='Author')\n nodes[5] = Node(index=5, word='citations', posTag='--')\n nodes[5].info = NodeInfo(type='NN', value='Journal')\n nodes[6] = Node(index=6, word='most', posTag='--')\n nodes[6].info = NodeInfo(type='FN', value='>')\n nodes[7] = Node(index=7, word='total', posTag='--')\n nodes[7].info = NodeInfo(type='FN', value='Year')\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[4])\n nodes[4].parent = nodes[2]\n nodes[4].children.append(nodes[5])\n nodes[5].parent = nodes[4]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[5].children.append(nodes[7])\n nodes[7].parent = nodes[5]\n print(\n '===========test for Running getAdjustedTrees() in TreeAdjustor==========='\n )\n print('The original tree:')\n print(T.toString())\n print('Number of possible trees for choice:')\n obj = TreeAdjustor()\n result = TreeAdjustor.getAdjustedTrees(T)\n print(len(result))\n for i in range(0, len(result)):\n for j in range(i + 1, len(result)):\n if result[i].getScore() <= result[j].getScore():\n temp = result[i]\n result[i] = result[j]\n result[j] = temp\n print('The three trees with highest scores look like:')\n for i in range(0, 5):\n print(result[i])\n for tree in result:\n print(' treeList Result %s:%d' % (tree.getSentence(), tree.\n getScore()))\n tree.insertImplicitNodes()\n query = tree.translateToSQL(self.schema)\n print('qUERY: ' + query.toString())\n\n def adjustTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word='DEFAULT', posTag='DEFAULT') for i in\n range(0, 9)]\n nodes[0] = Node(index=0, word='ROOT', posTag='--')\n nodes[0].info = NodeInfo(type='ROOT', value='ROOT')\n nodes[1] = Node(index=1, word='return', posTag='--')\n nodes[1].info = NodeInfo(type='SN', value='SELECT')\n nodes[2] = Node(index=2, word='conference', posTag='--')\n nodes[2].info = NodeInfo(type='NN', value='Author')\n nodes[3] = Node(index=3, word='area', posTag='--')\n nodes[3].info = NodeInfo(type='NN', value='Title')\n nodes[4] = Node(index=4, word='each', posTag='--')\n nodes[4].info = NodeInfo(type='QN', value='>')\n nodes[5] = Node(index=5, word='papers', posTag='--')\n nodes[5].info = NodeInfo(type='NN', value='Author')\n nodes[6] = Node(index=6, word='citations', posTag='--')\n nodes[6].info = NodeInfo(type='NN', value='Journal')\n nodes[7] = Node(index=7, word='most', posTag='--')\n nodes[7].info = NodeInfo(type='FN', value='>')\n nodes[8] = Node(index=8, word='total', posTag='--')\n nodes[8].info = NodeInfo(type='FN', value='Year')\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[5])\n nodes[5].parent = nodes[2]\n nodes[3].children.append(nodes[4])\n nodes[4].parent = nodes[3]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[6].children.append(nodes[7])\n nodes[7].parent = nodes[6]\n nodes[6].children.append(nodes[8])\n nodes[8].parent = nodes[6]\n print('===========test for Running adjust() in TreeAdjustor==========='\n )\n treeList = TreeAdjustor.adjust(T)\n print('Output size: %d' % len(treeList))\n print('Output trees:')\n ctr = 0\n for tr in treeList:\n print('Tree %d %s' % (ctr, tr.getSentence()))\n ctr += 1\n\n @staticmethod\n def cmpp(a, b):\n return a.getScore() > b.getScore()\n\n\nobj = TreeAdjustorTest()\nobj.getAdjustedTreesTest()\n",
"step-5": "\nfrom ParseTree import ParseTree\nfrom Node import Node\nfrom NodeInfo import NodeInfo\nfrom TreeAdjustor import TreeAdjustor\nfrom model.SchemaGraph import SchemaGraph\n\n\nclass TreeAdjustorTest:\n\n schema = None\n def __init__(self):\n return\n\n def getAdjustedTreesTest(self):\n\n\n\n T = ParseTree()\n nodes = [Node(index=-1, word=\"DEFAULT\", posTag=\"DEFAULT\") for i in range(0, 8)]\n\n nodes[0] = Node(index=0, word=\"ROOT\", posTag=\"--\")\n nodes[0].info = NodeInfo(type=\"ROOT\", value=\"ROOT\")\n nodes[1] = Node(index=1, word=\"return\", posTag=\"--\")\n nodes[1].info = NodeInfo(type=\"SN\", value=\"SELECT\")\n nodes[2] = Node(index=2, word=\"conference\", posTag=\"--\")\n nodes[2].info = NodeInfo(type=\"NN\", value=\"Author\")\n nodes[3] = Node(index=3, word=\"area\", posTag=\"--\")\n nodes[3].info = NodeInfo(type=\"NN\", value=\"Title\")\n nodes[4] = Node(index=4, word=\"papers\", posTag=\"--\")\n nodes[4].info = NodeInfo(type=\"NN\", value=\"Author\")\n nodes[5] = Node(index=5, word=\"citations\", posTag=\"--\")\n nodes[5].info = NodeInfo(type=\"NN\", value=\"Journal\")\n nodes[6] = Node(index=6, word=\"most\", posTag=\"--\")\n nodes[6].info = NodeInfo(type=\"FN\", value=\">\")\n nodes[7] = Node(index=7, word=\"total\", posTag=\"--\")\n nodes[7].info = NodeInfo(type=\"FN\", value=\"Year\")\n\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[4])\n nodes[4].parent = nodes[2]\n nodes[4].children.append(nodes[5])\n nodes[5].parent = nodes[4]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[5].children.append(nodes[7])\n nodes[7].parent = nodes[5]\n\n print (\"===========test for Running getAdjustedTrees() in TreeAdjustor===========\")\n print (\"The original tree:\")\n print (T.toString())\n print (\"Number of possible trees for choice:\")\n\n obj = TreeAdjustor()\n result = TreeAdjustor.getAdjustedTrees(T)\n # result = TreeAdjustor.adjust(T)\n\n print (len(result))\n # result = sorted(result,cmp=TreeAdjustorTest.cmpp)\n # l =sorted(m, cmp =TreeAdjustor.timeStampCompare)\n for i in range(0, len(result)):\n for j in range(i+1, len(result)):\n if(result[i].getScore() <= result[j].getScore()):\n temp = result[i]\n result[i] =result[j]\n result[j] = temp\n print (\"The three trees with highest scores look like:\")\n for i in range(0,5):\n print (result[i])\n\n for tree in result:\n print (\" treeList Result %s:%d\" % (tree.getSentence(), tree.getScore()))\n tree.insertImplicitNodes()\n query = tree.translateToSQL(self.schema)\n print (\"qUERY: \" + query.toString())\n \n\n def adjustTest(self):\n T = ParseTree()\n nodes = [Node(index=-1, word=\"DEFAULT\", posTag=\"DEFAULT\") for i in range(0, 9)]\n nodes[0] = Node(index=0, word=\"ROOT\",posTag= \"--\")\n nodes[0].info = NodeInfo(type=\"ROOT\", value=\"ROOT\")\n nodes[1] = Node(index=1, word=\"return\", posTag=\"--\")\n nodes[1].info = NodeInfo(type=\"SN\", value=\"SELECT\")\n nodes[2] = Node(index=2, word=\"conference\", posTag=\"--\")\n nodes[2].info = NodeInfo(type=\"NN\", value=\"Author\")\n nodes[3] = Node(index=3, word=\"area\", posTag=\"--\")\n nodes[3].info =NodeInfo(type=\"NN\", value=\"Title\")\n nodes[4] =Node(index=4, word=\"each\", posTag=\"--\")\n nodes[4].info = NodeInfo(type=\"QN\", value=\">\")\n nodes[5] = Node(index=5, word=\"papers\", posTag=\"--\")\n nodes[5].info = NodeInfo(type=\"NN\", value=\"Author\")\n nodes[6] = Node(index=6, word=\"citations\", posTag=\"--\")\n nodes[6].info = NodeInfo(type=\"NN\", value=\"Journal\")\n nodes[7] = Node(index=7, word=\"most\", posTag=\"--\")\n nodes[7].info = NodeInfo(type=\"FN\", value=\">\")\n nodes[8] = Node(index=8, word=\"total\", posTag=\"--\")\n nodes[8].info = NodeInfo(type=\"FN\", value=\"Year\")\n\n T.root = nodes[0]\n nodes[0].children.append(nodes[1])\n nodes[1].parent = nodes[0]\n nodes[1].children.append(nodes[2])\n nodes[2].parent = nodes[1]\n nodes[2].children.append(nodes[3])\n nodes[3].parent = nodes[2]\n nodes[2].children.append(nodes[5])\n nodes[5].parent = nodes[2]\n nodes[3].children.append(nodes[4])\n nodes[4].parent = nodes[3]\n nodes[5].children.append(nodes[6])\n nodes[6].parent = nodes[5]\n nodes[6].children.append(nodes[7])\n nodes[7].parent = nodes[6]\n nodes[6].children.append(nodes[8])\n nodes[8].parent = nodes[6]\n\n print (\"===========test for Running adjust() in TreeAdjustor===========\")\n\n treeList = TreeAdjustor.adjust(T)\n print (\"Output size: %d\"%len(treeList))\n\n print (\"Output trees:\")\n ctr=0\n for tr in treeList:\n print (\"Tree %d %s\"%(ctr, tr.getSentence()))\n ctr+=1\n @staticmethod\n def cmpp(a,b):\n\n return a.getScore() > b.getScore()\n\nobj = TreeAdjustorTest()\nobj.getAdjustedTreesTest()\n# obj.adjustTest()\n\n\n\n",
"step-ids": [
3,
4,
8,
9,
10
]
}
|
[
3,
4,
8,
9,
10
] |
# Generated by Django 3.2.1 on 2021-05-17 18:02
import django.contrib.auth.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(blank=True, max_length=255)),
('last_name', models.CharField(blank=True, max_length=255)),
('address', models.TextField(max_length=1000, verbose_name='Address')),
('phone', models.CharField(max_length=255, unique=True)),
('email', models.EmailField(blank=True, max_length=254, null=True)),
],
options={
'verbose_name': 'Client',
'verbose_name_plural': 'Clients',
},
),
migrations.CreateModel(
name='Composition',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cost', models.PositiveIntegerField(default=0, verbose_name='Cost')),
],
options={
'verbose_name': 'Composition',
'verbose_name_plural': 'Compositions',
},
),
migrations.CreateModel(
name='Extra',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True, verbose_name='Name')),
('type', models.CharField(choices=[('dessert', 'Dessert'), ('drink', 'Drink'), ('other', 'Other')], default='other', max_length=20, verbose_name='Type')),
('price', models.PositiveIntegerField(verbose_name='Price')),
('discount_price', models.PositiveIntegerField(blank=True, null=True, verbose_name='Discount Price')),
],
options={
'verbose_name': 'Extra',
'verbose_name_plural': 'Extras',
'ordering': ('name', 'type'),
},
),
migrations.CreateModel(
name='Menu',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True, verbose_name='Name')),
('audio_file_name', models.CharField(blank=True, max_length=255, verbose_name='Music')),
('available', models.BooleanField(default=True, verbose_name='Available')),
('image', models.ImageField(blank=True, null=True, upload_to='images', verbose_name='Image')),
('font_name', models.CharField(blank=True, max_length=255, verbose_name='Font')),
('price', models.PositiveIntegerField(default=0, verbose_name='Price')),
],
options={
'verbose_name': 'Menu',
'verbose_name_plural': 'Menus',
},
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('number', models.CharField(max_length=255, unique=True)),
('status', models.CharField(choices=[('pending', 'Pending'), ('confirmed', 'Confirmed'), ('preparing', 'Preparing'), ('ready', 'Ready'), ('on_delivery', 'On Delivery'), ('delivered', 'Delivered'), ('canceled', 'Canceled'), ('no_answer', 'No Answer'), ('ditched', 'Ditched')], default='pending', max_length=20)),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orders', to='mainapp.client', verbose_name='Client')),
],
),
migrations.CreateModel(
name='ZeUser',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='auth.user')),
('phone', models.CharField(max_length=255, unique=True)),
('is_admin', models.BooleanField(default=False)),
('is_deliveryman', models.BooleanField(default=False)),
],
options={
'verbose_name': 'User',
'verbose_name_plural': 'Users',
},
bases=('auth.user',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Section',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='Name')),
('position', models.PositiveIntegerField(default=1, verbose_name='Position')),
('menu', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.menu', verbose_name='Menu')),
],
options={
'verbose_name': 'Section',
'verbose_name_plural': 'Sections',
'ordering': ('position',),
},
),
migrations.CreateModel(
name='PhoneNumber',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone', models.CharField(max_length=255, unique=True, verbose_name='Phone Number')),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='phones', to='mainapp.client', verbose_name='Client')),
],
options={
'verbose_name': 'Phone Number',
'verbose_name_plural': 'Phone Numbers',
'ordering': ('id',),
},
),
migrations.CreateModel(
name='OrderLine',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.PositiveIntegerField(default=1, verbose_name='Quantity')),
('composition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.composition', verbose_name='Composition')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='mainapp.order', verbose_name='Order')),
],
options={
'ordering': ('id',),
},
),
migrations.CreateModel(
name='Food',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='Name')),
('price', models.PositiveIntegerField(default=0, verbose_name='Price')),
('discount_price', models.PositiveIntegerField(blank=True, null=True, verbose_name='Discount Price')),
('description', models.TextField(blank=True, max_length=1000)),
('image', models.ImageField(blank=True, null=True, upload_to='images', verbose_name='Image')),
('section', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.section', verbose_name='Section')),
],
options={
'verbose_name': 'Food',
'verbose_name_plural': 'Foods',
'ordering': ('name', 'section__position'),
},
),
migrations.CreateModel(
name='CompositionFood',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('food_price', models.PositiveIntegerField(default=0, verbose_name='Food Price')),
('composition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.composition', verbose_name='Composition')),
('food', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.food', verbose_name='Food')),
],
options={
'ordering': ('food__section__position',),
},
),
migrations.AddField(
model_name='composition',
name='extras',
field=models.ManyToManyField(related_name='compositions', to='mainapp.Extra', verbose_name='Extras'),
),
migrations.AddField(
model_name='composition',
name='menu',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.menu', verbose_name='Name'),
),
migrations.AddField(
model_name='composition',
name='selected_foods',
field=models.ManyToManyField(through='mainapp.CompositionFood', to='mainapp.Food', verbose_name='Selected Foods'),
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address', models.TextField(max_length=1000, verbose_name='Address')),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='mainapp.client', verbose_name='Client')),
],
options={
'verbose_name': 'Address',
'verbose_name_plural': 'Addresses',
'ordering': ('id',),
},
),
]
|
normal
|
{
"blob_id": "7ce471b3a6966c1a60ae2e2f3ec42369fe3d0f9c",
"index": 6377,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('auth', '0012_alter_user_first_name_max_length')]\n operations = [migrations.CreateModel(name='Client', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('first_name', models.CharField(blank=\n True, max_length=255)), ('last_name', models.CharField(blank=True,\n max_length=255)), ('address', models.TextField(max_length=1000,\n verbose_name='Address')), ('phone', models.CharField(max_length=255,\n unique=True)), ('email', models.EmailField(blank=True, max_length=\n 254, null=True))], options={'verbose_name': 'Client',\n 'verbose_name_plural': 'Clients'}), migrations.CreateModel(name=\n 'Composition', fields=[('id', models.BigAutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), ('cost',\n models.PositiveIntegerField(default=0, verbose_name='Cost'))],\n options={'verbose_name': 'Composition', 'verbose_name_plural':\n 'Compositions'}), migrations.CreateModel(name='Extra', fields=[(\n 'id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=255, unique=True, verbose_name='Name')), ('type', models\n .CharField(choices=[('dessert', 'Dessert'), ('drink', 'Drink'), (\n 'other', 'Other')], default='other', max_length=20, verbose_name=\n 'Type')), ('price', models.PositiveIntegerField(verbose_name=\n 'Price')), ('discount_price', models.PositiveIntegerField(blank=\n True, null=True, verbose_name='Discount Price'))], options={\n 'verbose_name': 'Extra', 'verbose_name_plural': 'Extras',\n 'ordering': ('name', 'type')}), migrations.CreateModel(name='Menu',\n fields=[('id', models.BigAutoField(auto_created=True, primary_key=\n True, serialize=False, verbose_name='ID')), ('name', models.\n CharField(max_length=255, unique=True, verbose_name='Name')), (\n 'audio_file_name', models.CharField(blank=True, max_length=255,\n verbose_name='Music')), ('available', models.BooleanField(default=\n True, verbose_name='Available')), ('image', models.ImageField(blank\n =True, null=True, upload_to='images', verbose_name='Image')), (\n 'font_name', models.CharField(blank=True, max_length=255,\n verbose_name='Font')), ('price', models.PositiveIntegerField(\n default=0, verbose_name='Price'))], options={'verbose_name': 'Menu',\n 'verbose_name_plural': 'Menus'}), migrations.CreateModel(name=\n 'Order', fields=[('id', models.BigAutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), (\n 'created_at', models.DateTimeField(auto_now_add=True)), ('number',\n models.CharField(max_length=255, unique=True)), ('status', models.\n CharField(choices=[('pending', 'Pending'), ('confirmed',\n 'Confirmed'), ('preparing', 'Preparing'), ('ready', 'Ready'), (\n 'on_delivery', 'On Delivery'), ('delivered', 'Delivered'), (\n 'canceled', 'Canceled'), ('no_answer', 'No Answer'), ('ditched',\n 'Ditched')], default='pending', max_length=20)), ('client', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n related_name='orders', to='mainapp.client', verbose_name='Client'))\n ]), migrations.CreateModel(name='ZeUser', fields=[('user_ptr',\n models.OneToOneField(auto_created=True, on_delete=django.db.models.\n deletion.CASCADE, parent_link=True, primary_key=True, serialize=\n False, to='auth.user')), ('phone', models.CharField(max_length=255,\n unique=True)), ('is_admin', models.BooleanField(default=False)), (\n 'is_deliveryman', models.BooleanField(default=False))], options={\n 'verbose_name': 'User', 'verbose_name_plural': 'Users'}, bases=(\n 'auth.user',), managers=[('objects', django.contrib.auth.models.\n UserManager())]), migrations.CreateModel(name='Section', fields=[(\n 'id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=255, verbose_name='Name')), ('position', models.\n PositiveIntegerField(default=1, verbose_name='Position')), ('menu',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'mainapp.menu', verbose_name='Menu'))], options={'verbose_name':\n 'Section', 'verbose_name_plural': 'Sections', 'ordering': (\n 'position',)}), migrations.CreateModel(name='PhoneNumber', fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('phone', models.CharField(\n max_length=255, unique=True, verbose_name='Phone Number')), (\n 'client', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, related_name='phones', to='mainapp.client', verbose_name=\n 'Client'))], options={'verbose_name': 'Phone Number',\n 'verbose_name_plural': 'Phone Numbers', 'ordering': ('id',)}),\n migrations.CreateModel(name='OrderLine', fields=[('id', models.\n BigAutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('quantity', models.PositiveIntegerField(\n default=1, verbose_name='Quantity')), ('composition', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'mainapp.composition', verbose_name='Composition')), ('order',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n related_name='lines', to='mainapp.order', verbose_name='Order'))],\n options={'ordering': ('id',)}), migrations.CreateModel(name='Food',\n fields=[('id', models.BigAutoField(auto_created=True, primary_key=\n True, serialize=False, verbose_name='ID')), ('name', models.\n CharField(max_length=255, verbose_name='Name')), ('price', models.\n PositiveIntegerField(default=0, verbose_name='Price')), (\n 'discount_price', models.PositiveIntegerField(blank=True, null=True,\n verbose_name='Discount Price')), ('description', models.TextField(\n blank=True, max_length=1000)), ('image', models.ImageField(blank=\n True, null=True, upload_to='images', verbose_name='Image')), (\n 'section', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, to='mainapp.section', verbose_name='Section'))], options={\n 'verbose_name': 'Food', 'verbose_name_plural': 'Foods', 'ordering':\n ('name', 'section__position')}), migrations.CreateModel(name=\n 'CompositionFood', fields=[('id', models.BigAutoField(auto_created=\n True, primary_key=True, serialize=False, verbose_name='ID')), (\n 'food_price', models.PositiveIntegerField(default=0, verbose_name=\n 'Food Price')), ('composition', models.ForeignKey(on_delete=django.\n db.models.deletion.CASCADE, to='mainapp.composition', verbose_name=\n 'Composition')), ('food', models.ForeignKey(on_delete=django.db.\n models.deletion.CASCADE, to='mainapp.food', verbose_name='Food'))],\n options={'ordering': ('food__section__position',)}), migrations.\n AddField(model_name='composition', name='extras', field=models.\n ManyToManyField(related_name='compositions', to='mainapp.Extra',\n verbose_name='Extras')), migrations.AddField(model_name=\n 'composition', name='menu', field=models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='mainapp.menu', verbose_name=\n 'Name')), migrations.AddField(model_name='composition', name=\n 'selected_foods', field=models.ManyToManyField(through=\n 'mainapp.CompositionFood', to='mainapp.Food', verbose_name=\n 'Selected Foods')), migrations.CreateModel(name='Address', fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('address', models.TextField(\n max_length=1000, verbose_name='Address')), ('client', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n related_name='addresses', to='mainapp.client', verbose_name=\n 'Client'))], options={'verbose_name': 'Address',\n 'verbose_name_plural': 'Addresses', 'ordering': ('id',)})]\n",
"step-4": "import django.contrib.auth.models\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('auth', '0012_alter_user_first_name_max_length')]\n operations = [migrations.CreateModel(name='Client', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('first_name', models.CharField(blank=\n True, max_length=255)), ('last_name', models.CharField(blank=True,\n max_length=255)), ('address', models.TextField(max_length=1000,\n verbose_name='Address')), ('phone', models.CharField(max_length=255,\n unique=True)), ('email', models.EmailField(blank=True, max_length=\n 254, null=True))], options={'verbose_name': 'Client',\n 'verbose_name_plural': 'Clients'}), migrations.CreateModel(name=\n 'Composition', fields=[('id', models.BigAutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), ('cost',\n models.PositiveIntegerField(default=0, verbose_name='Cost'))],\n options={'verbose_name': 'Composition', 'verbose_name_plural':\n 'Compositions'}), migrations.CreateModel(name='Extra', fields=[(\n 'id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=255, unique=True, verbose_name='Name')), ('type', models\n .CharField(choices=[('dessert', 'Dessert'), ('drink', 'Drink'), (\n 'other', 'Other')], default='other', max_length=20, verbose_name=\n 'Type')), ('price', models.PositiveIntegerField(verbose_name=\n 'Price')), ('discount_price', models.PositiveIntegerField(blank=\n True, null=True, verbose_name='Discount Price'))], options={\n 'verbose_name': 'Extra', 'verbose_name_plural': 'Extras',\n 'ordering': ('name', 'type')}), migrations.CreateModel(name='Menu',\n fields=[('id', models.BigAutoField(auto_created=True, primary_key=\n True, serialize=False, verbose_name='ID')), ('name', models.\n CharField(max_length=255, unique=True, verbose_name='Name')), (\n 'audio_file_name', models.CharField(blank=True, max_length=255,\n verbose_name='Music')), ('available', models.BooleanField(default=\n True, verbose_name='Available')), ('image', models.ImageField(blank\n =True, null=True, upload_to='images', verbose_name='Image')), (\n 'font_name', models.CharField(blank=True, max_length=255,\n verbose_name='Font')), ('price', models.PositiveIntegerField(\n default=0, verbose_name='Price'))], options={'verbose_name': 'Menu',\n 'verbose_name_plural': 'Menus'}), migrations.CreateModel(name=\n 'Order', fields=[('id', models.BigAutoField(auto_created=True,\n primary_key=True, serialize=False, verbose_name='ID')), (\n 'created_at', models.DateTimeField(auto_now_add=True)), ('number',\n models.CharField(max_length=255, unique=True)), ('status', models.\n CharField(choices=[('pending', 'Pending'), ('confirmed',\n 'Confirmed'), ('preparing', 'Preparing'), ('ready', 'Ready'), (\n 'on_delivery', 'On Delivery'), ('delivered', 'Delivered'), (\n 'canceled', 'Canceled'), ('no_answer', 'No Answer'), ('ditched',\n 'Ditched')], default='pending', max_length=20)), ('client', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n related_name='orders', to='mainapp.client', verbose_name='Client'))\n ]), migrations.CreateModel(name='ZeUser', fields=[('user_ptr',\n models.OneToOneField(auto_created=True, on_delete=django.db.models.\n deletion.CASCADE, parent_link=True, primary_key=True, serialize=\n False, to='auth.user')), ('phone', models.CharField(max_length=255,\n unique=True)), ('is_admin', models.BooleanField(default=False)), (\n 'is_deliveryman', models.BooleanField(default=False))], options={\n 'verbose_name': 'User', 'verbose_name_plural': 'Users'}, bases=(\n 'auth.user',), managers=[('objects', django.contrib.auth.models.\n UserManager())]), migrations.CreateModel(name='Section', fields=[(\n 'id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('name', models.CharField(\n max_length=255, verbose_name='Name')), ('position', models.\n PositiveIntegerField(default=1, verbose_name='Position')), ('menu',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'mainapp.menu', verbose_name='Menu'))], options={'verbose_name':\n 'Section', 'verbose_name_plural': 'Sections', 'ordering': (\n 'position',)}), migrations.CreateModel(name='PhoneNumber', fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('phone', models.CharField(\n max_length=255, unique=True, verbose_name='Phone Number')), (\n 'client', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, related_name='phones', to='mainapp.client', verbose_name=\n 'Client'))], options={'verbose_name': 'Phone Number',\n 'verbose_name_plural': 'Phone Numbers', 'ordering': ('id',)}),\n migrations.CreateModel(name='OrderLine', fields=[('id', models.\n BigAutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('quantity', models.PositiveIntegerField(\n default=1, verbose_name='Quantity')), ('composition', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'mainapp.composition', verbose_name='Composition')), ('order',\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n related_name='lines', to='mainapp.order', verbose_name='Order'))],\n options={'ordering': ('id',)}), migrations.CreateModel(name='Food',\n fields=[('id', models.BigAutoField(auto_created=True, primary_key=\n True, serialize=False, verbose_name='ID')), ('name', models.\n CharField(max_length=255, verbose_name='Name')), ('price', models.\n PositiveIntegerField(default=0, verbose_name='Price')), (\n 'discount_price', models.PositiveIntegerField(blank=True, null=True,\n verbose_name='Discount Price')), ('description', models.TextField(\n blank=True, max_length=1000)), ('image', models.ImageField(blank=\n True, null=True, upload_to='images', verbose_name='Image')), (\n 'section', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, to='mainapp.section', verbose_name='Section'))], options={\n 'verbose_name': 'Food', 'verbose_name_plural': 'Foods', 'ordering':\n ('name', 'section__position')}), migrations.CreateModel(name=\n 'CompositionFood', fields=[('id', models.BigAutoField(auto_created=\n True, primary_key=True, serialize=False, verbose_name='ID')), (\n 'food_price', models.PositiveIntegerField(default=0, verbose_name=\n 'Food Price')), ('composition', models.ForeignKey(on_delete=django.\n db.models.deletion.CASCADE, to='mainapp.composition', verbose_name=\n 'Composition')), ('food', models.ForeignKey(on_delete=django.db.\n models.deletion.CASCADE, to='mainapp.food', verbose_name='Food'))],\n options={'ordering': ('food__section__position',)}), migrations.\n AddField(model_name='composition', name='extras', field=models.\n ManyToManyField(related_name='compositions', to='mainapp.Extra',\n verbose_name='Extras')), migrations.AddField(model_name=\n 'composition', name='menu', field=models.ForeignKey(on_delete=\n django.db.models.deletion.CASCADE, to='mainapp.menu', verbose_name=\n 'Name')), migrations.AddField(model_name='composition', name=\n 'selected_foods', field=models.ManyToManyField(through=\n 'mainapp.CompositionFood', to='mainapp.Food', verbose_name=\n 'Selected Foods')), migrations.CreateModel(name='Address', fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('address', models.TextField(\n max_length=1000, verbose_name='Address')), ('client', models.\n ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n related_name='addresses', to='mainapp.client', verbose_name=\n 'Client'))], options={'verbose_name': 'Address',\n 'verbose_name_plural': 'Addresses', 'ordering': ('id',)})]\n",
"step-5": "# Generated by Django 3.2.1 on 2021-05-17 18:02\n\nimport django.contrib.auth.models\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('auth', '0012_alter_user_first_name_max_length'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Client',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('first_name', models.CharField(blank=True, max_length=255)),\n ('last_name', models.CharField(blank=True, max_length=255)),\n ('address', models.TextField(max_length=1000, verbose_name='Address')),\n ('phone', models.CharField(max_length=255, unique=True)),\n ('email', models.EmailField(blank=True, max_length=254, null=True)),\n ],\n options={\n 'verbose_name': 'Client',\n 'verbose_name_plural': 'Clients',\n },\n ),\n migrations.CreateModel(\n name='Composition',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('cost', models.PositiveIntegerField(default=0, verbose_name='Cost')),\n ],\n options={\n 'verbose_name': 'Composition',\n 'verbose_name_plural': 'Compositions',\n },\n ),\n migrations.CreateModel(\n name='Extra',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255, unique=True, verbose_name='Name')),\n ('type', models.CharField(choices=[('dessert', 'Dessert'), ('drink', 'Drink'), ('other', 'Other')], default='other', max_length=20, verbose_name='Type')),\n ('price', models.PositiveIntegerField(verbose_name='Price')),\n ('discount_price', models.PositiveIntegerField(blank=True, null=True, verbose_name='Discount Price')),\n ],\n options={\n 'verbose_name': 'Extra',\n 'verbose_name_plural': 'Extras',\n 'ordering': ('name', 'type'),\n },\n ),\n migrations.CreateModel(\n name='Menu',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255, unique=True, verbose_name='Name')),\n ('audio_file_name', models.CharField(blank=True, max_length=255, verbose_name='Music')),\n ('available', models.BooleanField(default=True, verbose_name='Available')),\n ('image', models.ImageField(blank=True, null=True, upload_to='images', verbose_name='Image')),\n ('font_name', models.CharField(blank=True, max_length=255, verbose_name='Font')),\n ('price', models.PositiveIntegerField(default=0, verbose_name='Price')),\n ],\n options={\n 'verbose_name': 'Menu',\n 'verbose_name_plural': 'Menus',\n },\n ),\n migrations.CreateModel(\n name='Order',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('created_at', models.DateTimeField(auto_now_add=True)),\n ('number', models.CharField(max_length=255, unique=True)),\n ('status', models.CharField(choices=[('pending', 'Pending'), ('confirmed', 'Confirmed'), ('preparing', 'Preparing'), ('ready', 'Ready'), ('on_delivery', 'On Delivery'), ('delivered', 'Delivered'), ('canceled', 'Canceled'), ('no_answer', 'No Answer'), ('ditched', 'Ditched')], default='pending', max_length=20)),\n ('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orders', to='mainapp.client', verbose_name='Client')),\n ],\n ),\n migrations.CreateModel(\n name='ZeUser',\n fields=[\n ('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='auth.user')),\n ('phone', models.CharField(max_length=255, unique=True)),\n ('is_admin', models.BooleanField(default=False)),\n ('is_deliveryman', models.BooleanField(default=False)),\n ],\n options={\n 'verbose_name': 'User',\n 'verbose_name_plural': 'Users',\n },\n bases=('auth.user',),\n managers=[\n ('objects', django.contrib.auth.models.UserManager()),\n ],\n ),\n migrations.CreateModel(\n name='Section',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255, verbose_name='Name')),\n ('position', models.PositiveIntegerField(default=1, verbose_name='Position')),\n ('menu', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.menu', verbose_name='Menu')),\n ],\n options={\n 'verbose_name': 'Section',\n 'verbose_name_plural': 'Sections',\n 'ordering': ('position',),\n },\n ),\n migrations.CreateModel(\n name='PhoneNumber',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('phone', models.CharField(max_length=255, unique=True, verbose_name='Phone Number')),\n ('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='phones', to='mainapp.client', verbose_name='Client')),\n ],\n options={\n 'verbose_name': 'Phone Number',\n 'verbose_name_plural': 'Phone Numbers',\n 'ordering': ('id',),\n },\n ),\n migrations.CreateModel(\n name='OrderLine',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('quantity', models.PositiveIntegerField(default=1, verbose_name='Quantity')),\n ('composition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.composition', verbose_name='Composition')),\n ('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='mainapp.order', verbose_name='Order')),\n ],\n options={\n 'ordering': ('id',),\n },\n ),\n migrations.CreateModel(\n name='Food',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255, verbose_name='Name')),\n ('price', models.PositiveIntegerField(default=0, verbose_name='Price')),\n ('discount_price', models.PositiveIntegerField(blank=True, null=True, verbose_name='Discount Price')),\n ('description', models.TextField(blank=True, max_length=1000)),\n ('image', models.ImageField(blank=True, null=True, upload_to='images', verbose_name='Image')),\n ('section', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.section', verbose_name='Section')),\n ],\n options={\n 'verbose_name': 'Food',\n 'verbose_name_plural': 'Foods',\n 'ordering': ('name', 'section__position'),\n },\n ),\n migrations.CreateModel(\n name='CompositionFood',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('food_price', models.PositiveIntegerField(default=0, verbose_name='Food Price')),\n ('composition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.composition', verbose_name='Composition')),\n ('food', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.food', verbose_name='Food')),\n ],\n options={\n 'ordering': ('food__section__position',),\n },\n ),\n migrations.AddField(\n model_name='composition',\n name='extras',\n field=models.ManyToManyField(related_name='compositions', to='mainapp.Extra', verbose_name='Extras'),\n ),\n migrations.AddField(\n model_name='composition',\n name='menu',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.menu', verbose_name='Name'),\n ),\n migrations.AddField(\n model_name='composition',\n name='selected_foods',\n field=models.ManyToManyField(through='mainapp.CompositionFood', to='mainapp.Food', verbose_name='Selected Foods'),\n ),\n migrations.CreateModel(\n name='Address',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('address', models.TextField(max_length=1000, verbose_name='Address')),\n ('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='mainapp.client', verbose_name='Client')),\n ],\n options={\n 'verbose_name': 'Address',\n 'verbose_name_plural': 'Addresses',\n 'ordering': ('id',),\n },\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
'''
def Sort(a):
i=1
while i<len(a):
j=i
while j>0 and a[j-1] > a[j]:
temp = a[j-1]
a[j-1] = a[j]
a[j] = temp
j-=1
i+=1
return a
'''
def Sort(a):
i=1
n=len(a)
while i<len(a):
j=i
print(i-1,'\t',i)
while a[j-1]>a[j] and j>=0:
j-=1
print('Key : ',a[i],' inserting at: ',j, '\t in ',a)
if n>2:
j1=n-2
temp = arr[n-1]
while arr[j1] > temp and j1>=0:
arr[j1+1] = arr[j1]
j1-=1
print(' '.join(list(map(str, arr))))
arr[j1+1] = temp
print(' '.join(list(map(str, arr))))
elif n==1:
return arr
else: # len(arr) =2
temp = arr[1]
arr[1]=arr[0]
print(' '.join(list(map(str, arr))))
arr[0] = temp
print(' '.join(list(map(str, arr))))
i+=1
return a
|
normal
|
{
"blob_id": "3f8b8b8cfbe712f09734d0fb7302073187d65a73",
"index": 982,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef Sort(a):\n i = 1\n n = len(a)\n while i < len(a):\n j = i\n print(i - 1, '\\t', i)\n while a[j - 1] > a[j] and j >= 0:\n j -= 1\n print('Key : ', a[i], ' inserting at: ', j, '\\t in ', a)\n if n > 2:\n j1 = n - 2\n temp = arr[n - 1]\n while arr[j1] > temp and j1 >= 0:\n arr[j1 + 1] = arr[j1]\n j1 -= 1\n print(' '.join(list(map(str, arr))))\n arr[j1 + 1] = temp\n print(' '.join(list(map(str, arr))))\n elif n == 1:\n return arr\n else:\n temp = arr[1]\n arr[1] = arr[0]\n print(' '.join(list(map(str, arr))))\n arr[0] = temp\n print(' '.join(list(map(str, arr))))\n i += 1\n return a\n",
"step-3": "'''\ndef Sort(a):\n i=1\n while i<len(a):\n j=i\n while j>0 and a[j-1] > a[j]:\n temp = a[j-1]\n a[j-1] = a[j]\n a[j] = temp\n j-=1\n i+=1\n return a\n'''\ndef Sort(a):\n i=1\n n=len(a)\n while i<len(a):\n j=i\n print(i-1,'\\t',i)\n while a[j-1]>a[j] and j>=0:\n j-=1\n print('Key : ',a[i],' inserting at: ',j, '\\t in ',a)\n if n>2:\n j1=n-2\n temp = arr[n-1]\n while arr[j1] > temp and j1>=0:\n arr[j1+1] = arr[j1]\n j1-=1\n print(' '.join(list(map(str, arr))))\n arr[j1+1] = temp\n print(' '.join(list(map(str, arr))))\n elif n==1: \n return arr\n else: # len(arr) =2\n temp = arr[1]\n arr[1]=arr[0]\n print(' '.join(list(map(str, arr))))\n arr[0] = temp \n print(' '.join(list(map(str, arr))))\n i+=1\n return a\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: utf-8 -*-
# @Time : 2019/3/5 上午9:55
# @Author : yidxue
from src.handler.base.base_handler import BaseHandler
from src.utils.tools import read_model
from tornado.options import options
import os
module_path = os.path.abspath(os.path.join(os.curdir))
model_path = os.path.join(module_path, 'model')
class ReloadModelHandler(BaseHandler):
def __init__(self, application, request, **kwargs):
super(ReloadModelHandler, self).__init__(application, request, **kwargs)
def do_action(self):
model_name = self.get_argument('modelname', None)
if model_name is None:
for model_name in os.listdir(model_path):
if model_name.find(".model") == -1:
continue
model = read_model(os.path.join(model_path, model_name))
options.models[model_name] = model
self.set_result(result={"message": "server has reload all models"})
else:
model = read_model(os.path.join(model_path, model_name))
options.models[model_name] = model
self.set_result(result={"message": "server has reload {model}".format(model=model_name)})
|
normal
|
{
"blob_id": "a8ae59bb525c52ef852655f0ef1e32d96c8914d6",
"index": 1356,
"step-1": "<mask token>\n\n\nclass ReloadModelHandler(BaseHandler):\n\n def __init__(self, application, request, **kwargs):\n super(ReloadModelHandler, self).__init__(application, request, **kwargs\n )\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ReloadModelHandler(BaseHandler):\n\n def __init__(self, application, request, **kwargs):\n super(ReloadModelHandler, self).__init__(application, request, **kwargs\n )\n\n def do_action(self):\n model_name = self.get_argument('modelname', None)\n if model_name is None:\n for model_name in os.listdir(model_path):\n if model_name.find('.model') == -1:\n continue\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={'message': 'server has reload all models'})\n else:\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={'message': 'server has reload {model}'.\n format(model=model_name)})\n",
"step-3": "<mask token>\nmodule_path = os.path.abspath(os.path.join(os.curdir))\nmodel_path = os.path.join(module_path, 'model')\n\n\nclass ReloadModelHandler(BaseHandler):\n\n def __init__(self, application, request, **kwargs):\n super(ReloadModelHandler, self).__init__(application, request, **kwargs\n )\n\n def do_action(self):\n model_name = self.get_argument('modelname', None)\n if model_name is None:\n for model_name in os.listdir(model_path):\n if model_name.find('.model') == -1:\n continue\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={'message': 'server has reload all models'})\n else:\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={'message': 'server has reload {model}'.\n format(model=model_name)})\n",
"step-4": "from src.handler.base.base_handler import BaseHandler\nfrom src.utils.tools import read_model\nfrom tornado.options import options\nimport os\nmodule_path = os.path.abspath(os.path.join(os.curdir))\nmodel_path = os.path.join(module_path, 'model')\n\n\nclass ReloadModelHandler(BaseHandler):\n\n def __init__(self, application, request, **kwargs):\n super(ReloadModelHandler, self).__init__(application, request, **kwargs\n )\n\n def do_action(self):\n model_name = self.get_argument('modelname', None)\n if model_name is None:\n for model_name in os.listdir(model_path):\n if model_name.find('.model') == -1:\n continue\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={'message': 'server has reload all models'})\n else:\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={'message': 'server has reload {model}'.\n format(model=model_name)})\n",
"step-5": "# -*- coding: utf-8 -*-\n# @Time : 2019/3/5 上午9:55\n# @Author : yidxue\nfrom src.handler.base.base_handler import BaseHandler\nfrom src.utils.tools import read_model\nfrom tornado.options import options\nimport os\n\nmodule_path = os.path.abspath(os.path.join(os.curdir))\nmodel_path = os.path.join(module_path, 'model')\n\n\nclass ReloadModelHandler(BaseHandler):\n\n def __init__(self, application, request, **kwargs):\n super(ReloadModelHandler, self).__init__(application, request, **kwargs)\n\n def do_action(self):\n model_name = self.get_argument('modelname', None)\n if model_name is None:\n for model_name in os.listdir(model_path):\n if model_name.find(\".model\") == -1:\n continue\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={\"message\": \"server has reload all models\"})\n else:\n model = read_model(os.path.join(model_path, model_name))\n options.models[model_name] = model\n self.set_result(result={\"message\": \"server has reload {model}\".format(model=model_name)})\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import csv
import json,os
mylist=[]
clist=["North Indian","Italian","Continental","Chinese","Mexican","South Indian"]
for filename in os.listdir("/home/asket/Desktop/DBMS/menu"):
print(filename)
|
normal
|
{
"blob_id": "965db2523f60d83bd338bcc62ab8e5705550aa89",
"index": 6606,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor filename in os.listdir('/home/asket/Desktop/DBMS/menu'):\n print(filename)\n",
"step-3": "<mask token>\nmylist = []\nclist = ['North Indian', 'Italian', 'Continental', 'Chinese', 'Mexican',\n 'South Indian']\nfor filename in os.listdir('/home/asket/Desktop/DBMS/menu'):\n print(filename)\n",
"step-4": "import csv\nimport json, os\nmylist = []\nclist = ['North Indian', 'Italian', 'Continental', 'Chinese', 'Mexican',\n 'South Indian']\nfor filename in os.listdir('/home/asket/Desktop/DBMS/menu'):\n print(filename)\n",
"step-5": "import csv\nimport json,os\nmylist=[]\nclist=[\"North Indian\",\"Italian\",\"Continental\",\"Chinese\",\"Mexican\",\"South Indian\"]\nfor filename in os.listdir(\"/home/asket/Desktop/DBMS/menu\"):\n\tprint(filename)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#Define a function max_of_three() that takes three numbers as
#arguments and returns the largest of them.
def max_of_three(a,b,c):
max=0
if a > b:
max = a
else:
max = b
if max > c :
return max
else:
return c
print max(234,124,43)
def max_of_three2(a, b, c):
if a > b and a > c:
print a
elif b > c:
print b
else:
print c
print max_of_three2(0, 15, 2)
|
normal
|
{
"blob_id": "00b4a57537358797bfe37eee76bbf73ef42de081",
"index": 9775,
"step-1": "\n\n\n#Define a function max_of_three() that takes three numbers as\n#arguments and returns the largest of them.\n\n\n\n\ndef max_of_three(a,b,c):\n\n max=0\n if a > b:\n max = a\n else:\n max = b\n\n if max > c :\n return max\n else:\n return c\n\n\n\nprint max(234,124,43)\n\n\ndef max_of_three2(a, b, c):\n if a > b and a > c:\n print a\n elif b > c:\n print b\n else:\n print c\n\n\nprint max_of_three2(0, 15, 2)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/python
'''Defines classes for representing metadata found in Biographies'''
class Date:
'''Object to represent dates. Dates can consist of regular day-month-year, but also descriptions (before, after, ca.). Object has attributes for regular parts and one for description, default is empty string.'''
def __init__( self, year='YY', month='YY', day='YY', description='', dateInterval = ''):
self.year = year
self.month = month
self.day = day
self.description = description
self.interval = dateInterval
def returnDate(self):
myDate = self.year + '-' + self.month + '' + self.day
if self.description:
myDate += ' (' + self.description + ')'
return myDate
class DateInterval:
'''Object to represent date intervales. consists of a begin date and an end date, each of which can be underspecified'''
def __init__(self, beginDate = '', endDate = ''):
self.beginDate = beginDate
self.endDate = endDate
class Name:
'''Object to describe person names. It has fields for initials, first name, last name, infixes and titles.'''
def __init__(self, lastname, firstname = '', initials = '', infix = ''):
self.lastname = lastname
self.firstname = firstname
self.initials = initials
self.infix = infix
self.title = ''
def addTitle(self, title):
self.title = title
def defineName(self, name):
self.lastname = name
def addFirstname(self, firstname):
self.firstname = firstname
def addInitials(self, initials):
self.initials = initials
def addInfix(self, infix):
self.infix = infix
def returnName(self):
'''prefer full first name if known, else initials. If neither are known, this will be the empty string.'''
if self.firstname:
name = self.title + ' ' + self.firstname + ' ' + self.infix + self.lastname
else:
name = self.title + ' ' + self.initials + ' ' + self.infix + self.lastname
return name
class Event:
'''Object that can describe an event (time, place, description)'''
def __init__(self, label, location = '', date = Date):
self.label = label
self.location = location
self.date = date
def setDate(self, date):
self.date = date
def setLocation(self, location):
self.location = location
class State:
'''Object that can describe a state (begin time, end time, place, description)'''
def __init__(self, label, description = '', location = '', beginDate = Date, endDate = Date):
self.label = label
self.location = location
self.beginDate = beginDate
self.endDate = endDate
self.description = description
def setBeginDate(self, date):
self.beginDate = date
def setEndDate(self, date):
self.endDate = date
def setLocation(self, location):
self.location = location
def setDescription(self, description):
self.description = description
class MetadataSingle:
'''Object that represents the metadata from a single biography'''
def __init__(self, idNr, name):
self.id = idNr
self.name = name
self.birth = Event('birth')
self.death = Event('death')
self.father = Name('')
self.mother = Name('')
self.education = []
self.occupation = []
self.gender = ''
self.religion = []
self.residence = []
self.otherEvents = []
self.otherStates = []
self.text = ''
def defineBirthDay(self, date, location=''):
self.birth.date = date
if location:
self.birth.location = location
def defineDeathDay(self, date, location=''):
self.death.date = date
if location:
self.death.location = location
def defineFather(self, name):
self.father = name
def defineMother(self, name):
self.mother = name
def addEducation(self, educEvent):
self.education.append(educEvent)
def addOccupation(self, occEvent):
self.occupation.append(occEvent)
def defineGender(self, gender):
self.gender = gender
def addReligion(self, religion):
self.religion.append(religion)
def addResidence(self, religion):
self.residence.append(religion)
def defineText(self, text):
self.text = text
class MetadataComplete:
'''Object that represents all available metadata for an individual. All except id number are represented as lists'''
def __init__(self, idNr):
self.id = idNr
self.name = []
self.birth = []
self.death = []
self.father = []
self.mother = []
self.education = []
self.occupation = []
self.gender = []
self.religion = []
self.otherEvents = []
self.otherStates = []
self.text = []
def addName(self, name):
self.name.append(name)
def addBirthDay(self, birthEvent):
self.birth.append(birthEvent)
def addDeathDay(self, deathEvent):
self.death.append(deathEvent)
def addFather(self, fatherName):
self.father.append(name)
def defineMother(self, motherName):
self.mother.append(motherName)
def addEducation(self, eduList):
self.education.append(eduList)
def addOccupation(self, occList):
self.occupation.append(occList)
def defineGender(self, gender):
self.gender.append(gender)
def addReligion(self, religionList):
self.religion.append(religionList)
def.addOtherEvents(self, otherElist):
self.otherEvents.append(otherElist)
def.addOtherStates(self, otherSlist):
self.otherStates.append(otherSlist)
def defineText(self, text):
self.text.append(text)
|
normal
|
{
"blob_id": "9609f23463aa4c7859a8db741c7f3badd78b8553",
"index": 6527,
"step-1": "#!/usr/bin/python\n\n'''Defines classes for representing metadata found in Biographies'''\n\n\n\nclass Date:\n '''Object to represent dates. Dates can consist of regular day-month-year, but also descriptions (before, after, ca.). Object has attributes for regular parts and one for description, default is empty string.'''\n\n def __init__( self, year='YY', month='YY', day='YY', description='', dateInterval = ''):\n self.year = year\n self.month = month\n self.day = day\n self.description = description\n self.interval = dateInterval\n\n\n def returnDate(self):\n myDate = self.year + '-' + self.month + '' + self.day\n if self.description:\n myDate += ' (' + self.description + ')'\n return myDate\n\n\n\nclass DateInterval:\n '''Object to represent date intervales. consists of a begin date and an end date, each of which can be underspecified'''\n def __init__(self, beginDate = '', endDate = ''):\n self.beginDate = beginDate\n self.endDate = endDate\n\n\nclass Name:\n '''Object to describe person names. It has fields for initials, first name, last name, infixes and titles.'''\n \n def __init__(self, lastname, firstname = '', initials = '', infix = ''):\n self.lastname = lastname\n self.firstname = firstname\n self.initials = initials\n self.infix = infix\n self.title = ''\n\n def addTitle(self, title):\n self.title = title\n\n def defineName(self, name):\n self.lastname = name\n\n def addFirstname(self, firstname):\n self.firstname = firstname\n\n def addInitials(self, initials):\n self.initials = initials\n \n def addInfix(self, infix):\n self.infix = infix\n\n def returnName(self):\n '''prefer full first name if known, else initials. If neither are known, this will be the empty string.'''\n if self.firstname:\n name = self.title + ' ' + self.firstname + ' ' + self.infix + self.lastname\n else:\n name = self.title + ' ' + self.initials + ' ' + self.infix + self.lastname\n return name\n\n\n\nclass Event:\n '''Object that can describe an event (time, place, description)'''\n \n def __init__(self, label, location = '', date = Date):\n self.label = label\n self.location = location\n self.date = date\n\n def setDate(self, date):\n self.date = date\n\n def setLocation(self, location):\n self.location = location\n\n\n\nclass State:\n '''Object that can describe a state (begin time, end time, place, description)'''\n \n def __init__(self, label, description = '', location = '', beginDate = Date, endDate = Date):\n self.label = label\n self.location = location\n self.beginDate = beginDate\n self.endDate = endDate\n self.description = description\n \n def setBeginDate(self, date):\n self.beginDate = date\n \n def setEndDate(self, date):\n self.endDate = date\n \n def setLocation(self, location):\n self.location = location\n\n def setDescription(self, description):\n self.description = description\n\nclass MetadataSingle:\n '''Object that represents the metadata from a single biography'''\n\n def __init__(self, idNr, name):\n self.id = idNr\n self.name = name\n self.birth = Event('birth')\n self.death = Event('death')\n self.father = Name('')\n self.mother = Name('')\n self.education = []\n self.occupation = []\n self.gender = ''\n self.religion = []\n self.residence = []\n self.otherEvents = []\n self.otherStates = []\n self.text = ''\n\n def defineBirthDay(self, date, location=''):\n self.birth.date = date\n if location:\n self.birth.location = location\n\n def defineDeathDay(self, date, location=''):\n self.death.date = date\n if location:\n self.death.location = location\n\n def defineFather(self, name):\n self.father = name\n\n def defineMother(self, name):\n self.mother = name\n\n def addEducation(self, educEvent):\n self.education.append(educEvent)\n\n def addOccupation(self, occEvent):\n self.occupation.append(occEvent)\n\n def defineGender(self, gender):\n self.gender = gender\n\n def addReligion(self, religion):\n self.religion.append(religion)\n \n def addResidence(self, religion):\n self.residence.append(religion)\n \n def defineText(self, text):\n self.text = text\n\n\nclass MetadataComplete:\n '''Object that represents all available metadata for an individual. All except id number are represented as lists'''\n \n def __init__(self, idNr):\n self.id = idNr\n self.name = []\n self.birth = []\n self.death = []\n self.father = []\n self.mother = []\n self.education = []\n self.occupation = []\n self.gender = []\n self.religion = []\n self.otherEvents = []\n self.otherStates = []\n self.text = []\n\n def addName(self, name):\n self.name.append(name)\n \n def addBirthDay(self, birthEvent):\n self.birth.append(birthEvent)\n \n def addDeathDay(self, deathEvent):\n self.death.append(deathEvent)\n \n def addFather(self, fatherName):\n self.father.append(name)\n \n def defineMother(self, motherName):\n self.mother.append(motherName)\n \n def addEducation(self, eduList):\n self.education.append(eduList)\n \n def addOccupation(self, occList):\n self.occupation.append(occList)\n \n def defineGender(self, gender):\n self.gender.append(gender)\n \n def addReligion(self, religionList):\n self.religion.append(religionList)\n \n def.addOtherEvents(self, otherElist):\n self.otherEvents.append(otherElist)\n \n def.addOtherStates(self, otherSlist):\n self.otherStates.append(otherSlist)\n\n def defineText(self, text):\n self.text.append(text)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from tkinter import*
from tkinter import filedialog
import sqlite3
class Gui:
def __init__(self):
global en3
self.scr = Tk()
self.scr.geometry("2000x3000")
self.scr.title("VIEWING DATABASE")
self.connection = sqlite3.connect("student_details.db")
self.cursor = self.connection.cursor()
self.id = StringVar()
self.name1 = StringVar()
self.fathername = StringVar()
self.mothername = StringVar()
self.cont = StringVar()
self.email = StringVar()
self.f1 = Frame(self.scr, bg='brown1')
self.f1.pack(side=TOP)
self.left_frame = Frame(self.scr, bg='red')
self.left_frame.pack(side=LEFT, fill=Y)
self.right_frame = Frame(self.scr, width=3000, bg='yellow')
self.right_frame.pack(side=LEFT, fill=Y)
l = Label(self.right_frame, text="***************SHOW TABLE RECORDS IN A DATABASE******************",
font=('times', 25, 'bold'), bg="black", fg="white")
l.pack(side=TOP, fill=X)
scrollbar = Scrollbar(self.right_frame)
scrollbar.pack(side=RIGHT, fill=Y)
self.list = Listbox(self.right_frame, width=61, height=12, font=('times', 25, 'bold'),
yscrollcommand=scrollbar.set)
self.list.bind("student_list", self.show_records)
self.list.pack(side=TOP, fill=Y)
scrollbar.config(command=self.list.yview)
self.querry_frame = Frame(self.right_frame, width=81, height=5, bg="white")
self.querry_frame.pack(side=BOTTOM, fill=X)
self.en3 = Entry(self.querry_frame, font=('times', 25, 'bold'))
self.en3.pack(side=BOTTOM, fill=X)
b = Button(self.querry_frame, text="Enter",command=self.sample, font=('times', 25, 'bold'), bg="white", fg="black")
b.pack(side=RIGHT)
b1 = Button(self.querry_frame, text="Save", command=self.show_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b1.pack(side=RIGHT)
b = Button(self.f1, text="OPEN", command=self.file, font=('times', 25, 'bold'), bg="white", fg="black")
b.pack(side=LEFT)
b = Button(self.f1, text="CREATE", command=self.create_table, font=('times', 25, 'bold'), bg="white",
fg="black")
b.pack(side=LEFT)
b1 = Button(self.f1, text="INSERT", command=self.add_record, font=('times', 25, 'bold'), bg="white",
fg="black")
b1.pack(side=LEFT)
b2 = Button(self.f1, text="DELETE", command=self.del_rec, font=('times', 25, 'bold'), bg="white",
fg="black")
b2.pack(side=LEFT)
b3 = Button(self.f1, text="UPDATE", command=self.update, font=('times', 25, 'bold'), bg="white",
fg="black")
b3.pack(side=RIGHT)
b4 = Button(self.f1, text="VIEW", command=lambda: self.view_table(), font=('times', 25, 'bold'), bg="white",
fg="black")
b4.pack(side=RIGHT)
b4 = Button(self.f1, text="BROWSE", command=self.show_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b4.pack(side=RIGHT)
l = Label(self.left_frame, text="View Table in Database", font=('times', 25, 'bold'), bg='blue', fg='white')
l.pack(side=TOP, fill=X)
self.scr.mainloop()
try:
self.cursor.execute("create table user(Id varchar(10),Name varchar(30),FathersName varchar(20),MothersName varchar(20),Contact varchar(10),Email varchar(30))")
self.connection.commit()
except:
pass
def insert_data(self):
self.id = e.get()
self.name1 = e1.get()
self.fathername=e2.get()
self.mothername = e3.get()
self.cont = e4.get()
self.email = e5.get()
self.cursor.execute("insert into user values('{}','{}','{}','{}','{}','{}')".format(self.id,self.name1, self.fathername,self.mothername,self.cont , self.email))
self.connection.commit()
def show_data(self):
self.connection = sqlite3.connect("student_details.db")
self.cursor = self.connection.cursor()
self.cursor.execute("Select * from user")
rows = self.cursor.fetchall()
for row in rows:
l1 = self.list.insert(END, row)
self.connection.commit()
def update_data(self):
self.cursor.execute("Update user set {} = '{}' where id ='{}'".format(e2.get(),e3.get(),e.get()))
self.connection.commit()
self.list.delete(0, END)
self.show_data()
def update(self):
global e
global e2
global e3
self.top1 = Toplevel(self.scr)
self.top1.geometry("400x400")
l1 = Label(self.top1, text="USER_ID", font=('times', 25, 'bold'), bg="green2", fg="white")
l1.pack()
self.Id=StringVar()
e = Entry(self.top1, relief="sunken", textvariable=self.Id, font=('times', 25, 'bold'))
e.pack()
self.col_name=StringVar()
l2 = Label(self.top1, text="col_name", font=('times', 25, 'bold'), bg="green2", fg="white")
l2.pack()
e2 = Entry(self.top1, relief="sunken", textvariable=self.col_name, font=('times', 25, 'bold'))
e2.pack()
self.value=StringVar()
l3 = Label(self.top1, text="VALUE", font=('times', 25, 'bold'), bg="green2", fg="white")
l3.pack()
e3 = Entry(self.top1, relief="sunken", textvariable=self.value, font=('times', 25, 'bold'))
e3.pack()
b = Button(self.top1, text="UPDATE", command=self.update_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b.pack()
self.top1.mainloop()
def delete_data(self):
self.cursor.execute("Delete from user where id ='{}'".format(e.get()))
self.list.delete(0,END)
self.connection.commit()
self.show_data()
def del_rec(self):
global e
self.top2 = Toplevel(self.scr)
self.top2.geometry("400x400")
l1 = Label(self.top2, text="USER_ID", font=('times', 25, 'bold'), bg="green2", fg="white")
l1.pack()
self.Id = StringVar()
e = Entry(self.top2, relief="sunken", textvariable=self.Id, font=('times', 25, 'bold'))
e.pack()
b = Button(self.top2, text="delete records", command=self.delete_data, font=('times', 25, 'bold'), bg="white",
fg="black")
b.pack()
self.top2.mainloop()
def sample(self):
s=('{}'.format(self.en3.get()))
a=self.cursor.execute("{}".format(self.en3.get()))
r=self.cursor.fetchall()
for row in r:
self.list.insert(0,row)
self.connection.commit()
def file(self):
self.f1.filename = filedialog.askopenfilename( title="Select file")
p=self.f1.filename
self.list.insert(0,self.f1.filename)
def add_record(self):
global e
global e1
global e2
global e3
global e4
global e5
self.e = StringVar()
self.e1 = StringVar()
self.e2 = StringVar()
self.e3 = StringVar()
self.e4 = StringVar()
self.e5 = StringVar()
self.top=Toplevel(self.scr)
self.top.geometry("400x800")
l=Label(self.top,text="USER_ID",font=('times',25,'bold'),bg="green2",fg="white")
l.pack()
e=Entry(self.top,relief="sunken",textvariable=self.e,font=('times',25,'bold'))
e.pack()
l1 = Label(self.top, text="USERNAME", font=('times', 25, 'bold'), bg="green2", fg="white")
l1.pack()
e1 = Entry(self.top, relief="sunken",textvariable=self.e1, font=('times', 25, 'bold'))
e1.pack()
l2 = Label(self.top, text="FATHERS NAME", font=('times', 25, 'bold'), bg="green2", fg="white")
l2.pack()
e2 = Entry(self.top, relief="sunken",textvariable=self.e2, font=('times', 25, 'bold'))
e2.pack()
l3 = Label(self.top, text="MOTHERS NAME", font=('times', 25, 'bold'), bg="green2", fg="white")
l3.pack()
e3 = Entry(self.top, relief="sunken",textvariable=self.e3, font=('times', 25, 'bold'))
e3.pack()
l4 = Label(self.top, text="CONTACT NO", font=('times', 25, 'bold'), bg="green2", fg="white")
l4.pack()
e4 = Entry(self.top, relief="sunken",textvariable=self.e4, font=('times', 25, 'bold'))
e4.pack()
l5 = Label(self.top, text="E-MAIL ID", font=('times', 25, 'bold'), bg="green2", fg="white")
l5.pack()
e5 = Entry(self.top, relief="sunken",textvariable=self.e5, font=('times', 25, 'bold'))
e5.pack()
varchk=IntVar()
b = Button(self.top, text="SUBMIT", command=self.insert_data,font=('times', 25, 'bold'), bg="white",fg="black")
b.pack()
self.top.mainloop()
def view_table(self):
global list_box
self.list_box = Listbox(self.left_frame, font=('times', 20, 'bold'))
try:
self.list_box.insert(1,"user")
self.list_box.insert(2,self.tbl_name)
except:
pass
b=Button(self.left_frame,text="Click",font=('times', 20, 'bold'),command=self.selection,bg="white",fg="black")
b.place(x=100,y=400)
self.list_box.place(x=10,y=50)
def selection(self):
lb = self.list_box.curselection()
print(lb)
for i in list(lb):
self.show_data()
def show_records(self):
global m
m=self.list.curselection()
m=self.list.get(m)
self.id.delete(0,END)
self.id.insert(END,self.add_record())
global table_name
def create_table(self):
self.top = Toplevel(self.scr)
self.top.geometry("400x800")
self.table_name=StringVar()
l=Label(self.top,text="Table",font=('times', 20, 'bold'),bg="white",fg="black")
l.pack()
e=Entry(self.top,textvariable=self.table_name,font=('times', 20, 'bold'))
e.pack()
b=Button(self.top,text="Add field",command=self.fun_show , font=('times', 20, 'bold'),bg="white",fg="black")
b.pack()
b=Button(self.top,text="OK",font=('times', 20, 'bold'),command=self.show_entered_data,bg="white",fg="black")
b.pack(side=RIGHT)
def show_entered_data(self):
global en1
global en2
global list1
global tbl_name
self.tbl_name=self.table_name.get()
self.en1=self.entry1.get()
self.en2=self.entry2.get()
sent="Create table "+str(self.tbl_name)+"('"+str(self.en1)+ " "+ str(self.en2)+"')"
list1 = Text(self.top, width=41, height=8, font=('times', 25, 'bold'))
list1.place(x=0,y=0)
list1.insert(0.0,sent)
print(self.tbl_name,self.en1,self.en2)
self.cursor.execute(sent)
self.list.insert(0,sent)
self.connection.commit()
def fun_show(self):
l = Label(self.top, text="Name", font=('times', 20, 'bold'), bg="white", fg="black")
l.pack(side=TOP)
self.entry1 = StringVar()
e1 = Entry(self.top, textvariable=self.entry1, font=('times', 20, 'bold'))
e1.pack()
l = Label(self.top, text="type", font=('times', 20, 'bold'), bg="white", fg="black")
l.pack(side=TOP)
self.entry2 = StringVar()
e1 = Entry(self.top, textvariable=self.entry2, font=('times', 20, 'bold'))
e1.pack()
Gui()
|
normal
|
{
"blob_id": "4c6b04716f41c3413896f0d59f2cc9b1475d7f64",
"index": 5164,
"step-1": "<mask token>\n\n\nclass Gui:\n <mask token>\n\n def insert_data(self):\n self.id = e.get()\n self.name1 = e1.get()\n self.fathername = e2.get()\n self.mothername = e3.get()\n self.cont = e4.get()\n self.email = e5.get()\n self.cursor.execute(\n \"insert into user values('{}','{}','{}','{}','{}','{}')\".format\n (self.id, self.name1, self.fathername, self.mothername, self.\n cont, self.email))\n self.connection.commit()\n <mask token>\n <mask token>\n\n def update(self):\n global e\n global e2\n global e3\n self.top1 = Toplevel(self.scr)\n self.top1.geometry('400x400')\n l1 = Label(self.top1, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top1, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n self.col_name = StringVar()\n l2 = Label(self.top1, text='col_name', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top1, relief='sunken', textvariable=self.col_name,\n font=('times', 25, 'bold'))\n e2.pack()\n self.value = StringVar()\n l3 = Label(self.top1, text='VALUE', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l3.pack()\n e3 = Entry(self.top1, relief='sunken', textvariable=self.value,\n font=('times', 25, 'bold'))\n e3.pack()\n b = Button(self.top1, text='UPDATE', command=self.update_data, font\n =('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top1.mainloop()\n\n def delete_data(self):\n self.cursor.execute(\"Delete from user where id ='{}'\".format(e.get()))\n self.list.delete(0, END)\n self.connection.commit()\n self.show_data()\n\n def del_rec(self):\n global e\n self.top2 = Toplevel(self.scr)\n self.top2.geometry('400x400')\n l1 = Label(self.top2, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top2, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n b = Button(self.top2, text='delete records', command=self.\n delete_data, font=('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top2.mainloop()\n\n def sample(self):\n s = '{}'.format(self.en3.get())\n a = self.cursor.execute('{}'.format(self.en3.get()))\n r = self.cursor.fetchall()\n for row in r:\n self.list.insert(0, row)\n self.connection.commit()\n <mask token>\n\n def add_record(self):\n global e\n global e1\n global e2\n global e3\n global e4\n global e5\n self.e = StringVar()\n self.e1 = StringVar()\n self.e2 = StringVar()\n self.e3 = StringVar()\n self.e4 = StringVar()\n self.e5 = StringVar()\n self.top = Toplevel(self.scr)\n self.top.geometry('400x800')\n l = Label(self.top, text='USER_ID', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l.pack()\n e = Entry(self.top, relief='sunken', textvariable=self.e, font=(\n 'times', 25, 'bold'))\n e.pack()\n l1 = Label(self.top, text='USERNAME', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n e1 = Entry(self.top, relief='sunken', textvariable=self.e1, font=(\n 'times', 25, 'bold'))\n e1.pack()\n l2 = Label(self.top, text='FATHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top, relief='sunken', textvariable=self.e2, font=(\n 'times', 25, 'bold'))\n e2.pack()\n l3 = Label(self.top, text='MOTHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l3.pack()\n e3 = Entry(self.top, relief='sunken', textvariable=self.e3, font=(\n 'times', 25, 'bold'))\n e3.pack()\n l4 = Label(self.top, text='CONTACT NO', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l4.pack()\n e4 = Entry(self.top, relief='sunken', textvariable=self.e4, font=(\n 'times', 25, 'bold'))\n e4.pack()\n l5 = Label(self.top, text='E-MAIL ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l5.pack()\n e5 = Entry(self.top, relief='sunken', textvariable=self.e5, font=(\n 'times', 25, 'bold'))\n e5.pack()\n varchk = IntVar()\n b = Button(self.top, text='SUBMIT', command=self.insert_data, font=\n ('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top.mainloop()\n <mask token>\n\n def selection(self):\n lb = self.list_box.curselection()\n print(lb)\n for i in list(lb):\n self.show_data()\n <mask token>\n global table_name\n <mask token>\n\n def show_entered_data(self):\n global en1\n global en2\n global list1\n global tbl_name\n self.tbl_name = self.table_name.get()\n self.en1 = self.entry1.get()\n self.en2 = self.entry2.get()\n sent = 'Create table ' + str(self.tbl_name) + \"('\" + str(self.en1\n ) + ' ' + str(self.en2) + \"')\"\n list1 = Text(self.top, width=41, height=8, font=('times', 25, 'bold'))\n list1.place(x=0, y=0)\n list1.insert(0.0, sent)\n print(self.tbl_name, self.en1, self.en2)\n self.cursor.execute(sent)\n self.list.insert(0, sent)\n self.connection.commit()\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Gui:\n <mask token>\n\n def insert_data(self):\n self.id = e.get()\n self.name1 = e1.get()\n self.fathername = e2.get()\n self.mothername = e3.get()\n self.cont = e4.get()\n self.email = e5.get()\n self.cursor.execute(\n \"insert into user values('{}','{}','{}','{}','{}','{}')\".format\n (self.id, self.name1, self.fathername, self.mothername, self.\n cont, self.email))\n self.connection.commit()\n\n def show_data(self):\n self.connection = sqlite3.connect('student_details.db')\n self.cursor = self.connection.cursor()\n self.cursor.execute('Select * from user')\n rows = self.cursor.fetchall()\n for row in rows:\n l1 = self.list.insert(END, row)\n self.connection.commit()\n <mask token>\n\n def update(self):\n global e\n global e2\n global e3\n self.top1 = Toplevel(self.scr)\n self.top1.geometry('400x400')\n l1 = Label(self.top1, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top1, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n self.col_name = StringVar()\n l2 = Label(self.top1, text='col_name', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top1, relief='sunken', textvariable=self.col_name,\n font=('times', 25, 'bold'))\n e2.pack()\n self.value = StringVar()\n l3 = Label(self.top1, text='VALUE', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l3.pack()\n e3 = Entry(self.top1, relief='sunken', textvariable=self.value,\n font=('times', 25, 'bold'))\n e3.pack()\n b = Button(self.top1, text='UPDATE', command=self.update_data, font\n =('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top1.mainloop()\n\n def delete_data(self):\n self.cursor.execute(\"Delete from user where id ='{}'\".format(e.get()))\n self.list.delete(0, END)\n self.connection.commit()\n self.show_data()\n\n def del_rec(self):\n global e\n self.top2 = Toplevel(self.scr)\n self.top2.geometry('400x400')\n l1 = Label(self.top2, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top2, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n b = Button(self.top2, text='delete records', command=self.\n delete_data, font=('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top2.mainloop()\n\n def sample(self):\n s = '{}'.format(self.en3.get())\n a = self.cursor.execute('{}'.format(self.en3.get()))\n r = self.cursor.fetchall()\n for row in r:\n self.list.insert(0, row)\n self.connection.commit()\n\n def file(self):\n self.f1.filename = filedialog.askopenfilename(title='Select file')\n p = self.f1.filename\n self.list.insert(0, self.f1.filename)\n\n def add_record(self):\n global e\n global e1\n global e2\n global e3\n global e4\n global e5\n self.e = StringVar()\n self.e1 = StringVar()\n self.e2 = StringVar()\n self.e3 = StringVar()\n self.e4 = StringVar()\n self.e5 = StringVar()\n self.top = Toplevel(self.scr)\n self.top.geometry('400x800')\n l = Label(self.top, text='USER_ID', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l.pack()\n e = Entry(self.top, relief='sunken', textvariable=self.e, font=(\n 'times', 25, 'bold'))\n e.pack()\n l1 = Label(self.top, text='USERNAME', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n e1 = Entry(self.top, relief='sunken', textvariable=self.e1, font=(\n 'times', 25, 'bold'))\n e1.pack()\n l2 = Label(self.top, text='FATHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top, relief='sunken', textvariable=self.e2, font=(\n 'times', 25, 'bold'))\n e2.pack()\n l3 = Label(self.top, text='MOTHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l3.pack()\n e3 = Entry(self.top, relief='sunken', textvariable=self.e3, font=(\n 'times', 25, 'bold'))\n e3.pack()\n l4 = Label(self.top, text='CONTACT NO', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l4.pack()\n e4 = Entry(self.top, relief='sunken', textvariable=self.e4, font=(\n 'times', 25, 'bold'))\n e4.pack()\n l5 = Label(self.top, text='E-MAIL ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l5.pack()\n e5 = Entry(self.top, relief='sunken', textvariable=self.e5, font=(\n 'times', 25, 'bold'))\n e5.pack()\n varchk = IntVar()\n b = Button(self.top, text='SUBMIT', command=self.insert_data, font=\n ('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top.mainloop()\n <mask token>\n\n def selection(self):\n lb = self.list_box.curselection()\n print(lb)\n for i in list(lb):\n self.show_data()\n <mask token>\n global table_name\n\n def create_table(self):\n self.top = Toplevel(self.scr)\n self.top.geometry('400x800')\n self.table_name = StringVar()\n l = Label(self.top, text='Table', font=('times', 20, 'bold'), bg=\n 'white', fg='black')\n l.pack()\n e = Entry(self.top, textvariable=self.table_name, font=('times', 20,\n 'bold'))\n e.pack()\n b = Button(self.top, text='Add field', command=self.fun_show, font=\n ('times', 20, 'bold'), bg='white', fg='black')\n b.pack()\n b = Button(self.top, text='OK', font=('times', 20, 'bold'), command\n =self.show_entered_data, bg='white', fg='black')\n b.pack(side=RIGHT)\n\n def show_entered_data(self):\n global en1\n global en2\n global list1\n global tbl_name\n self.tbl_name = self.table_name.get()\n self.en1 = self.entry1.get()\n self.en2 = self.entry2.get()\n sent = 'Create table ' + str(self.tbl_name) + \"('\" + str(self.en1\n ) + ' ' + str(self.en2) + \"')\"\n list1 = Text(self.top, width=41, height=8, font=('times', 25, 'bold'))\n list1.place(x=0, y=0)\n list1.insert(0.0, sent)\n print(self.tbl_name, self.en1, self.en2)\n self.cursor.execute(sent)\n self.list.insert(0, sent)\n self.connection.commit()\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Gui:\n\n def __init__(self):\n global en3\n self.scr = Tk()\n self.scr.geometry('2000x3000')\n self.scr.title('VIEWING DATABASE')\n self.connection = sqlite3.connect('student_details.db')\n self.cursor = self.connection.cursor()\n self.id = StringVar()\n self.name1 = StringVar()\n self.fathername = StringVar()\n self.mothername = StringVar()\n self.cont = StringVar()\n self.email = StringVar()\n self.f1 = Frame(self.scr, bg='brown1')\n self.f1.pack(side=TOP)\n self.left_frame = Frame(self.scr, bg='red')\n self.left_frame.pack(side=LEFT, fill=Y)\n self.right_frame = Frame(self.scr, width=3000, bg='yellow')\n self.right_frame.pack(side=LEFT, fill=Y)\n l = Label(self.right_frame, text=\n '***************SHOW TABLE RECORDS IN A DATABASE******************'\n , font=('times', 25, 'bold'), bg='black', fg='white')\n l.pack(side=TOP, fill=X)\n scrollbar = Scrollbar(self.right_frame)\n scrollbar.pack(side=RIGHT, fill=Y)\n self.list = Listbox(self.right_frame, width=61, height=12, font=(\n 'times', 25, 'bold'), yscrollcommand=scrollbar.set)\n self.list.bind('student_list', self.show_records)\n self.list.pack(side=TOP, fill=Y)\n scrollbar.config(command=self.list.yview)\n self.querry_frame = Frame(self.right_frame, width=81, height=5, bg=\n 'white')\n self.querry_frame.pack(side=BOTTOM, fill=X)\n self.en3 = Entry(self.querry_frame, font=('times', 25, 'bold'))\n self.en3.pack(side=BOTTOM, fill=X)\n b = Button(self.querry_frame, text='Enter', command=self.sample,\n font=('times', 25, 'bold'), bg='white', fg='black')\n b.pack(side=RIGHT)\n b1 = Button(self.querry_frame, text='Save', command=self.show_data,\n font=('times', 25, 'bold'), bg='white', fg='black')\n b1.pack(side=RIGHT)\n b = Button(self.f1, text='OPEN', command=self.file, font=('times', \n 25, 'bold'), bg='white', fg='black')\n b.pack(side=LEFT)\n b = Button(self.f1, text='CREATE', command=self.create_table, font=\n ('times', 25, 'bold'), bg='white', fg='black')\n b.pack(side=LEFT)\n b1 = Button(self.f1, text='INSERT', command=self.add_record, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b1.pack(side=LEFT)\n b2 = Button(self.f1, text='DELETE', command=self.del_rec, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b2.pack(side=LEFT)\n b3 = Button(self.f1, text='UPDATE', command=self.update, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b3.pack(side=RIGHT)\n b4 = Button(self.f1, text='VIEW', command=lambda : self.view_table(\n ), font=('times', 25, 'bold'), bg='white', fg='black')\n b4.pack(side=RIGHT)\n b4 = Button(self.f1, text='BROWSE', command=self.show_data, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b4.pack(side=RIGHT)\n l = Label(self.left_frame, text='View Table in Database', font=(\n 'times', 25, 'bold'), bg='blue', fg='white')\n l.pack(side=TOP, fill=X)\n self.scr.mainloop()\n try:\n self.cursor.execute(\n 'create table user(Id varchar(10),Name varchar(30),FathersName varchar(20),MothersName varchar(20),Contact varchar(10),Email varchar(30))'\n )\n self.connection.commit()\n except:\n pass\n\n def insert_data(self):\n self.id = e.get()\n self.name1 = e1.get()\n self.fathername = e2.get()\n self.mothername = e3.get()\n self.cont = e4.get()\n self.email = e5.get()\n self.cursor.execute(\n \"insert into user values('{}','{}','{}','{}','{}','{}')\".format\n (self.id, self.name1, self.fathername, self.mothername, self.\n cont, self.email))\n self.connection.commit()\n\n def show_data(self):\n self.connection = sqlite3.connect('student_details.db')\n self.cursor = self.connection.cursor()\n self.cursor.execute('Select * from user')\n rows = self.cursor.fetchall()\n for row in rows:\n l1 = self.list.insert(END, row)\n self.connection.commit()\n\n def update_data(self):\n self.cursor.execute(\"Update user set {} = '{}' where id ='{}'\".\n format(e2.get(), e3.get(), e.get()))\n self.connection.commit()\n self.list.delete(0, END)\n self.show_data()\n\n def update(self):\n global e\n global e2\n global e3\n self.top1 = Toplevel(self.scr)\n self.top1.geometry('400x400')\n l1 = Label(self.top1, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top1, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n self.col_name = StringVar()\n l2 = Label(self.top1, text='col_name', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top1, relief='sunken', textvariable=self.col_name,\n font=('times', 25, 'bold'))\n e2.pack()\n self.value = StringVar()\n l3 = Label(self.top1, text='VALUE', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l3.pack()\n e3 = Entry(self.top1, relief='sunken', textvariable=self.value,\n font=('times', 25, 'bold'))\n e3.pack()\n b = Button(self.top1, text='UPDATE', command=self.update_data, font\n =('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top1.mainloop()\n\n def delete_data(self):\n self.cursor.execute(\"Delete from user where id ='{}'\".format(e.get()))\n self.list.delete(0, END)\n self.connection.commit()\n self.show_data()\n\n def del_rec(self):\n global e\n self.top2 = Toplevel(self.scr)\n self.top2.geometry('400x400')\n l1 = Label(self.top2, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top2, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n b = Button(self.top2, text='delete records', command=self.\n delete_data, font=('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top2.mainloop()\n\n def sample(self):\n s = '{}'.format(self.en3.get())\n a = self.cursor.execute('{}'.format(self.en3.get()))\n r = self.cursor.fetchall()\n for row in r:\n self.list.insert(0, row)\n self.connection.commit()\n\n def file(self):\n self.f1.filename = filedialog.askopenfilename(title='Select file')\n p = self.f1.filename\n self.list.insert(0, self.f1.filename)\n\n def add_record(self):\n global e\n global e1\n global e2\n global e3\n global e4\n global e5\n self.e = StringVar()\n self.e1 = StringVar()\n self.e2 = StringVar()\n self.e3 = StringVar()\n self.e4 = StringVar()\n self.e5 = StringVar()\n self.top = Toplevel(self.scr)\n self.top.geometry('400x800')\n l = Label(self.top, text='USER_ID', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l.pack()\n e = Entry(self.top, relief='sunken', textvariable=self.e, font=(\n 'times', 25, 'bold'))\n e.pack()\n l1 = Label(self.top, text='USERNAME', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n e1 = Entry(self.top, relief='sunken', textvariable=self.e1, font=(\n 'times', 25, 'bold'))\n e1.pack()\n l2 = Label(self.top, text='FATHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top, relief='sunken', textvariable=self.e2, font=(\n 'times', 25, 'bold'))\n e2.pack()\n l3 = Label(self.top, text='MOTHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l3.pack()\n e3 = Entry(self.top, relief='sunken', textvariable=self.e3, font=(\n 'times', 25, 'bold'))\n e3.pack()\n l4 = Label(self.top, text='CONTACT NO', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l4.pack()\n e4 = Entry(self.top, relief='sunken', textvariable=self.e4, font=(\n 'times', 25, 'bold'))\n e4.pack()\n l5 = Label(self.top, text='E-MAIL ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l5.pack()\n e5 = Entry(self.top, relief='sunken', textvariable=self.e5, font=(\n 'times', 25, 'bold'))\n e5.pack()\n varchk = IntVar()\n b = Button(self.top, text='SUBMIT', command=self.insert_data, font=\n ('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top.mainloop()\n\n def view_table(self):\n global list_box\n self.list_box = Listbox(self.left_frame, font=('times', 20, 'bold'))\n try:\n self.list_box.insert(1, 'user')\n self.list_box.insert(2, self.tbl_name)\n except:\n pass\n b = Button(self.left_frame, text='Click', font=('times', 20, 'bold'\n ), command=self.selection, bg='white', fg='black')\n b.place(x=100, y=400)\n self.list_box.place(x=10, y=50)\n\n def selection(self):\n lb = self.list_box.curselection()\n print(lb)\n for i in list(lb):\n self.show_data()\n\n def show_records(self):\n global m\n m = self.list.curselection()\n m = self.list.get(m)\n self.id.delete(0, END)\n self.id.insert(END, self.add_record())\n global table_name\n\n def create_table(self):\n self.top = Toplevel(self.scr)\n self.top.geometry('400x800')\n self.table_name = StringVar()\n l = Label(self.top, text='Table', font=('times', 20, 'bold'), bg=\n 'white', fg='black')\n l.pack()\n e = Entry(self.top, textvariable=self.table_name, font=('times', 20,\n 'bold'))\n e.pack()\n b = Button(self.top, text='Add field', command=self.fun_show, font=\n ('times', 20, 'bold'), bg='white', fg='black')\n b.pack()\n b = Button(self.top, text='OK', font=('times', 20, 'bold'), command\n =self.show_entered_data, bg='white', fg='black')\n b.pack(side=RIGHT)\n\n def show_entered_data(self):\n global en1\n global en2\n global list1\n global tbl_name\n self.tbl_name = self.table_name.get()\n self.en1 = self.entry1.get()\n self.en2 = self.entry2.get()\n sent = 'Create table ' + str(self.tbl_name) + \"('\" + str(self.en1\n ) + ' ' + str(self.en2) + \"')\"\n list1 = Text(self.top, width=41, height=8, font=('times', 25, 'bold'))\n list1.place(x=0, y=0)\n list1.insert(0.0, sent)\n print(self.tbl_name, self.en1, self.en2)\n self.cursor.execute(sent)\n self.list.insert(0, sent)\n self.connection.commit()\n\n def fun_show(self):\n l = Label(self.top, text='Name', font=('times', 20, 'bold'), bg=\n 'white', fg='black')\n l.pack(side=TOP)\n self.entry1 = StringVar()\n e1 = Entry(self.top, textvariable=self.entry1, font=('times', 20,\n 'bold'))\n e1.pack()\n l = Label(self.top, text='type', font=('times', 20, 'bold'), bg=\n 'white', fg='black')\n l.pack(side=TOP)\n self.entry2 = StringVar()\n e1 = Entry(self.top, textvariable=self.entry2, font=('times', 20,\n 'bold'))\n e1.pack()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Gui:\n\n def __init__(self):\n global en3\n self.scr = Tk()\n self.scr.geometry('2000x3000')\n self.scr.title('VIEWING DATABASE')\n self.connection = sqlite3.connect('student_details.db')\n self.cursor = self.connection.cursor()\n self.id = StringVar()\n self.name1 = StringVar()\n self.fathername = StringVar()\n self.mothername = StringVar()\n self.cont = StringVar()\n self.email = StringVar()\n self.f1 = Frame(self.scr, bg='brown1')\n self.f1.pack(side=TOP)\n self.left_frame = Frame(self.scr, bg='red')\n self.left_frame.pack(side=LEFT, fill=Y)\n self.right_frame = Frame(self.scr, width=3000, bg='yellow')\n self.right_frame.pack(side=LEFT, fill=Y)\n l = Label(self.right_frame, text=\n '***************SHOW TABLE RECORDS IN A DATABASE******************'\n , font=('times', 25, 'bold'), bg='black', fg='white')\n l.pack(side=TOP, fill=X)\n scrollbar = Scrollbar(self.right_frame)\n scrollbar.pack(side=RIGHT, fill=Y)\n self.list = Listbox(self.right_frame, width=61, height=12, font=(\n 'times', 25, 'bold'), yscrollcommand=scrollbar.set)\n self.list.bind('student_list', self.show_records)\n self.list.pack(side=TOP, fill=Y)\n scrollbar.config(command=self.list.yview)\n self.querry_frame = Frame(self.right_frame, width=81, height=5, bg=\n 'white')\n self.querry_frame.pack(side=BOTTOM, fill=X)\n self.en3 = Entry(self.querry_frame, font=('times', 25, 'bold'))\n self.en3.pack(side=BOTTOM, fill=X)\n b = Button(self.querry_frame, text='Enter', command=self.sample,\n font=('times', 25, 'bold'), bg='white', fg='black')\n b.pack(side=RIGHT)\n b1 = Button(self.querry_frame, text='Save', command=self.show_data,\n font=('times', 25, 'bold'), bg='white', fg='black')\n b1.pack(side=RIGHT)\n b = Button(self.f1, text='OPEN', command=self.file, font=('times', \n 25, 'bold'), bg='white', fg='black')\n b.pack(side=LEFT)\n b = Button(self.f1, text='CREATE', command=self.create_table, font=\n ('times', 25, 'bold'), bg='white', fg='black')\n b.pack(side=LEFT)\n b1 = Button(self.f1, text='INSERT', command=self.add_record, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b1.pack(side=LEFT)\n b2 = Button(self.f1, text='DELETE', command=self.del_rec, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b2.pack(side=LEFT)\n b3 = Button(self.f1, text='UPDATE', command=self.update, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b3.pack(side=RIGHT)\n b4 = Button(self.f1, text='VIEW', command=lambda : self.view_table(\n ), font=('times', 25, 'bold'), bg='white', fg='black')\n b4.pack(side=RIGHT)\n b4 = Button(self.f1, text='BROWSE', command=self.show_data, font=(\n 'times', 25, 'bold'), bg='white', fg='black')\n b4.pack(side=RIGHT)\n l = Label(self.left_frame, text='View Table in Database', font=(\n 'times', 25, 'bold'), bg='blue', fg='white')\n l.pack(side=TOP, fill=X)\n self.scr.mainloop()\n try:\n self.cursor.execute(\n 'create table user(Id varchar(10),Name varchar(30),FathersName varchar(20),MothersName varchar(20),Contact varchar(10),Email varchar(30))'\n )\n self.connection.commit()\n except:\n pass\n\n def insert_data(self):\n self.id = e.get()\n self.name1 = e1.get()\n self.fathername = e2.get()\n self.mothername = e3.get()\n self.cont = e4.get()\n self.email = e5.get()\n self.cursor.execute(\n \"insert into user values('{}','{}','{}','{}','{}','{}')\".format\n (self.id, self.name1, self.fathername, self.mothername, self.\n cont, self.email))\n self.connection.commit()\n\n def show_data(self):\n self.connection = sqlite3.connect('student_details.db')\n self.cursor = self.connection.cursor()\n self.cursor.execute('Select * from user')\n rows = self.cursor.fetchall()\n for row in rows:\n l1 = self.list.insert(END, row)\n self.connection.commit()\n\n def update_data(self):\n self.cursor.execute(\"Update user set {} = '{}' where id ='{}'\".\n format(e2.get(), e3.get(), e.get()))\n self.connection.commit()\n self.list.delete(0, END)\n self.show_data()\n\n def update(self):\n global e\n global e2\n global e3\n self.top1 = Toplevel(self.scr)\n self.top1.geometry('400x400')\n l1 = Label(self.top1, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top1, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n self.col_name = StringVar()\n l2 = Label(self.top1, text='col_name', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top1, relief='sunken', textvariable=self.col_name,\n font=('times', 25, 'bold'))\n e2.pack()\n self.value = StringVar()\n l3 = Label(self.top1, text='VALUE', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l3.pack()\n e3 = Entry(self.top1, relief='sunken', textvariable=self.value,\n font=('times', 25, 'bold'))\n e3.pack()\n b = Button(self.top1, text='UPDATE', command=self.update_data, font\n =('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top1.mainloop()\n\n def delete_data(self):\n self.cursor.execute(\"Delete from user where id ='{}'\".format(e.get()))\n self.list.delete(0, END)\n self.connection.commit()\n self.show_data()\n\n def del_rec(self):\n global e\n self.top2 = Toplevel(self.scr)\n self.top2.geometry('400x400')\n l1 = Label(self.top2, text='USER_ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n self.Id = StringVar()\n e = Entry(self.top2, relief='sunken', textvariable=self.Id, font=(\n 'times', 25, 'bold'))\n e.pack()\n b = Button(self.top2, text='delete records', command=self.\n delete_data, font=('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top2.mainloop()\n\n def sample(self):\n s = '{}'.format(self.en3.get())\n a = self.cursor.execute('{}'.format(self.en3.get()))\n r = self.cursor.fetchall()\n for row in r:\n self.list.insert(0, row)\n self.connection.commit()\n\n def file(self):\n self.f1.filename = filedialog.askopenfilename(title='Select file')\n p = self.f1.filename\n self.list.insert(0, self.f1.filename)\n\n def add_record(self):\n global e\n global e1\n global e2\n global e3\n global e4\n global e5\n self.e = StringVar()\n self.e1 = StringVar()\n self.e2 = StringVar()\n self.e3 = StringVar()\n self.e4 = StringVar()\n self.e5 = StringVar()\n self.top = Toplevel(self.scr)\n self.top.geometry('400x800')\n l = Label(self.top, text='USER_ID', font=('times', 25, 'bold'), bg=\n 'green2', fg='white')\n l.pack()\n e = Entry(self.top, relief='sunken', textvariable=self.e, font=(\n 'times', 25, 'bold'))\n e.pack()\n l1 = Label(self.top, text='USERNAME', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l1.pack()\n e1 = Entry(self.top, relief='sunken', textvariable=self.e1, font=(\n 'times', 25, 'bold'))\n e1.pack()\n l2 = Label(self.top, text='FATHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l2.pack()\n e2 = Entry(self.top, relief='sunken', textvariable=self.e2, font=(\n 'times', 25, 'bold'))\n e2.pack()\n l3 = Label(self.top, text='MOTHERS NAME', font=('times', 25, 'bold'\n ), bg='green2', fg='white')\n l3.pack()\n e3 = Entry(self.top, relief='sunken', textvariable=self.e3, font=(\n 'times', 25, 'bold'))\n e3.pack()\n l4 = Label(self.top, text='CONTACT NO', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l4.pack()\n e4 = Entry(self.top, relief='sunken', textvariable=self.e4, font=(\n 'times', 25, 'bold'))\n e4.pack()\n l5 = Label(self.top, text='E-MAIL ID', font=('times', 25, 'bold'),\n bg='green2', fg='white')\n l5.pack()\n e5 = Entry(self.top, relief='sunken', textvariable=self.e5, font=(\n 'times', 25, 'bold'))\n e5.pack()\n varchk = IntVar()\n b = Button(self.top, text='SUBMIT', command=self.insert_data, font=\n ('times', 25, 'bold'), bg='white', fg='black')\n b.pack()\n self.top.mainloop()\n\n def view_table(self):\n global list_box\n self.list_box = Listbox(self.left_frame, font=('times', 20, 'bold'))\n try:\n self.list_box.insert(1, 'user')\n self.list_box.insert(2, self.tbl_name)\n except:\n pass\n b = Button(self.left_frame, text='Click', font=('times', 20, 'bold'\n ), command=self.selection, bg='white', fg='black')\n b.place(x=100, y=400)\n self.list_box.place(x=10, y=50)\n\n def selection(self):\n lb = self.list_box.curselection()\n print(lb)\n for i in list(lb):\n self.show_data()\n\n def show_records(self):\n global m\n m = self.list.curselection()\n m = self.list.get(m)\n self.id.delete(0, END)\n self.id.insert(END, self.add_record())\n global table_name\n\n def create_table(self):\n self.top = Toplevel(self.scr)\n self.top.geometry('400x800')\n self.table_name = StringVar()\n l = Label(self.top, text='Table', font=('times', 20, 'bold'), bg=\n 'white', fg='black')\n l.pack()\n e = Entry(self.top, textvariable=self.table_name, font=('times', 20,\n 'bold'))\n e.pack()\n b = Button(self.top, text='Add field', command=self.fun_show, font=\n ('times', 20, 'bold'), bg='white', fg='black')\n b.pack()\n b = Button(self.top, text='OK', font=('times', 20, 'bold'), command\n =self.show_entered_data, bg='white', fg='black')\n b.pack(side=RIGHT)\n\n def show_entered_data(self):\n global en1\n global en2\n global list1\n global tbl_name\n self.tbl_name = self.table_name.get()\n self.en1 = self.entry1.get()\n self.en2 = self.entry2.get()\n sent = 'Create table ' + str(self.tbl_name) + \"('\" + str(self.en1\n ) + ' ' + str(self.en2) + \"')\"\n list1 = Text(self.top, width=41, height=8, font=('times', 25, 'bold'))\n list1.place(x=0, y=0)\n list1.insert(0.0, sent)\n print(self.tbl_name, self.en1, self.en2)\n self.cursor.execute(sent)\n self.list.insert(0, sent)\n self.connection.commit()\n\n def fun_show(self):\n l = Label(self.top, text='Name', font=('times', 20, 'bold'), bg=\n 'white', fg='black')\n l.pack(side=TOP)\n self.entry1 = StringVar()\n e1 = Entry(self.top, textvariable=self.entry1, font=('times', 20,\n 'bold'))\n e1.pack()\n l = Label(self.top, text='type', font=('times', 20, 'bold'), bg=\n 'white', fg='black')\n l.pack(side=TOP)\n self.entry2 = StringVar()\n e1 = Entry(self.top, textvariable=self.entry2, font=('times', 20,\n 'bold'))\n e1.pack()\n\n\nGui()\n",
"step-5": "from tkinter import*\r\nfrom tkinter import filedialog\r\nimport sqlite3\r\n\r\nclass Gui:\r\n def __init__(self):\r\n global en3\r\n self.scr = Tk()\r\n self.scr.geometry(\"2000x3000\")\r\n self.scr.title(\"VIEWING DATABASE\")\r\n self.connection = sqlite3.connect(\"student_details.db\")\r\n self.cursor = self.connection.cursor()\r\n self.id = StringVar()\r\n self.name1 = StringVar()\r\n self.fathername = StringVar()\r\n self.mothername = StringVar()\r\n self.cont = StringVar()\r\n self.email = StringVar()\r\n self.f1 = Frame(self.scr, bg='brown1')\r\n self.f1.pack(side=TOP)\r\n self.left_frame = Frame(self.scr, bg='red')\r\n self.left_frame.pack(side=LEFT, fill=Y)\r\n self.right_frame = Frame(self.scr, width=3000, bg='yellow')\r\n self.right_frame.pack(side=LEFT, fill=Y)\r\n l = Label(self.right_frame, text=\"***************SHOW TABLE RECORDS IN A DATABASE******************\",\r\n font=('times', 25, 'bold'), bg=\"black\", fg=\"white\")\r\n l.pack(side=TOP, fill=X)\r\n scrollbar = Scrollbar(self.right_frame)\r\n scrollbar.pack(side=RIGHT, fill=Y)\r\n self.list = Listbox(self.right_frame, width=61, height=12, font=('times', 25, 'bold'),\r\n yscrollcommand=scrollbar.set)\r\n self.list.bind(\"student_list\", self.show_records)\r\n self.list.pack(side=TOP, fill=Y)\r\n scrollbar.config(command=self.list.yview)\r\n self.querry_frame = Frame(self.right_frame, width=81, height=5, bg=\"white\")\r\n self.querry_frame.pack(side=BOTTOM, fill=X)\r\n self.en3 = Entry(self.querry_frame, font=('times', 25, 'bold'))\r\n self.en3.pack(side=BOTTOM, fill=X)\r\n b = Button(self.querry_frame, text=\"Enter\",command=self.sample, font=('times', 25, 'bold'), bg=\"white\", fg=\"black\")\r\n b.pack(side=RIGHT)\r\n b1 = Button(self.querry_frame, text=\"Save\", command=self.show_data, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b1.pack(side=RIGHT)\r\n b = Button(self.f1, text=\"OPEN\", command=self.file, font=('times', 25, 'bold'), bg=\"white\", fg=\"black\")\r\n b.pack(side=LEFT)\r\n b = Button(self.f1, text=\"CREATE\", command=self.create_table, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b.pack(side=LEFT)\r\n b1 = Button(self.f1, text=\"INSERT\", command=self.add_record, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b1.pack(side=LEFT)\r\n b2 = Button(self.f1, text=\"DELETE\", command=self.del_rec, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b2.pack(side=LEFT)\r\n b3 = Button(self.f1, text=\"UPDATE\", command=self.update, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b3.pack(side=RIGHT)\r\n b4 = Button(self.f1, text=\"VIEW\", command=lambda: self.view_table(), font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b4.pack(side=RIGHT)\r\n b4 = Button(self.f1, text=\"BROWSE\", command=self.show_data, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b4.pack(side=RIGHT)\r\n l = Label(self.left_frame, text=\"View Table in Database\", font=('times', 25, 'bold'), bg='blue', fg='white')\r\n l.pack(side=TOP, fill=X)\r\n\r\n self.scr.mainloop()\r\n\r\n try:\r\n self.cursor.execute(\"create table user(Id varchar(10),Name varchar(30),FathersName varchar(20),MothersName varchar(20),Contact varchar(10),Email varchar(30))\")\r\n self.connection.commit()\r\n except:\r\n pass\r\n\r\n def insert_data(self):\r\n self.id = e.get()\r\n self.name1 = e1.get()\r\n self.fathername=e2.get()\r\n self.mothername = e3.get()\r\n self.cont = e4.get()\r\n self.email = e5.get()\r\n self.cursor.execute(\"insert into user values('{}','{}','{}','{}','{}','{}')\".format(self.id,self.name1, self.fathername,self.mothername,self.cont , self.email))\r\n self.connection.commit()\r\n\r\n\r\n def show_data(self):\r\n self.connection = sqlite3.connect(\"student_details.db\")\r\n self.cursor = self.connection.cursor()\r\n self.cursor.execute(\"Select * from user\")\r\n rows = self.cursor.fetchall()\r\n for row in rows:\r\n l1 = self.list.insert(END, row)\r\n self.connection.commit()\r\n\r\n def update_data(self):\r\n self.cursor.execute(\"Update user set {} = '{}' where id ='{}'\".format(e2.get(),e3.get(),e.get()))\r\n self.connection.commit()\r\n self.list.delete(0, END)\r\n self.show_data()\r\n\r\n def update(self):\r\n global e\r\n global e2\r\n global e3\r\n self.top1 = Toplevel(self.scr)\r\n self.top1.geometry(\"400x400\")\r\n l1 = Label(self.top1, text=\"USER_ID\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l1.pack()\r\n self.Id=StringVar()\r\n e = Entry(self.top1, relief=\"sunken\", textvariable=self.Id, font=('times', 25, 'bold'))\r\n e.pack()\r\n self.col_name=StringVar()\r\n l2 = Label(self.top1, text=\"col_name\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l2.pack()\r\n e2 = Entry(self.top1, relief=\"sunken\", textvariable=self.col_name, font=('times', 25, 'bold'))\r\n e2.pack()\r\n self.value=StringVar()\r\n l3 = Label(self.top1, text=\"VALUE\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l3.pack()\r\n e3 = Entry(self.top1, relief=\"sunken\", textvariable=self.value, font=('times', 25, 'bold'))\r\n e3.pack()\r\n b = Button(self.top1, text=\"UPDATE\", command=self.update_data, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b.pack()\r\n\r\n self.top1.mainloop()\r\n\r\n def delete_data(self):\r\n self.cursor.execute(\"Delete from user where id ='{}'\".format(e.get()))\r\n self.list.delete(0,END)\r\n self.connection.commit()\r\n self.show_data()\r\n\r\n def del_rec(self):\r\n global e\r\n self.top2 = Toplevel(self.scr)\r\n self.top2.geometry(\"400x400\")\r\n l1 = Label(self.top2, text=\"USER_ID\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l1.pack()\r\n self.Id = StringVar()\r\n e = Entry(self.top2, relief=\"sunken\", textvariable=self.Id, font=('times', 25, 'bold'))\r\n e.pack()\r\n b = Button(self.top2, text=\"delete records\", command=self.delete_data, font=('times', 25, 'bold'), bg=\"white\",\r\n fg=\"black\")\r\n b.pack()\r\n self.top2.mainloop()\r\n\r\n def sample(self):\r\n s=('{}'.format(self.en3.get()))\r\n a=self.cursor.execute(\"{}\".format(self.en3.get()))\r\n r=self.cursor.fetchall()\r\n for row in r:\r\n self.list.insert(0,row)\r\n self.connection.commit()\r\n\r\n\r\n\r\n def file(self):\r\n self.f1.filename = filedialog.askopenfilename( title=\"Select file\")\r\n p=self.f1.filename\r\n self.list.insert(0,self.f1.filename)\r\n\r\n def add_record(self):\r\n global e\r\n global e1\r\n global e2\r\n global e3\r\n global e4\r\n global e5\r\n self.e = StringVar()\r\n self.e1 = StringVar()\r\n self.e2 = StringVar()\r\n self.e3 = StringVar()\r\n self.e4 = StringVar()\r\n self.e5 = StringVar()\r\n self.top=Toplevel(self.scr)\r\n self.top.geometry(\"400x800\")\r\n l=Label(self.top,text=\"USER_ID\",font=('times',25,'bold'),bg=\"green2\",fg=\"white\")\r\n l.pack()\r\n e=Entry(self.top,relief=\"sunken\",textvariable=self.e,font=('times',25,'bold'))\r\n e.pack()\r\n l1 = Label(self.top, text=\"USERNAME\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l1.pack()\r\n e1 = Entry(self.top, relief=\"sunken\",textvariable=self.e1, font=('times', 25, 'bold'))\r\n e1.pack()\r\n l2 = Label(self.top, text=\"FATHERS NAME\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l2.pack()\r\n e2 = Entry(self.top, relief=\"sunken\",textvariable=self.e2, font=('times', 25, 'bold'))\r\n e2.pack()\r\n l3 = Label(self.top, text=\"MOTHERS NAME\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l3.pack()\r\n e3 = Entry(self.top, relief=\"sunken\",textvariable=self.e3, font=('times', 25, 'bold'))\r\n e3.pack()\r\n l4 = Label(self.top, text=\"CONTACT NO\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l4.pack()\r\n e4 = Entry(self.top, relief=\"sunken\",textvariable=self.e4, font=('times', 25, 'bold'))\r\n e4.pack()\r\n l5 = Label(self.top, text=\"E-MAIL ID\", font=('times', 25, 'bold'), bg=\"green2\", fg=\"white\")\r\n l5.pack()\r\n e5 = Entry(self.top, relief=\"sunken\",textvariable=self.e5, font=('times', 25, 'bold'))\r\n e5.pack()\r\n varchk=IntVar()\r\n b = Button(self.top, text=\"SUBMIT\", command=self.insert_data,font=('times', 25, 'bold'), bg=\"white\",fg=\"black\")\r\n b.pack()\r\n self.top.mainloop()\r\n\r\n\r\n def view_table(self):\r\n global list_box\r\n self.list_box = Listbox(self.left_frame, font=('times', 20, 'bold'))\r\n\r\n try:\r\n\r\n self.list_box.insert(1,\"user\")\r\n self.list_box.insert(2,self.tbl_name)\r\n except:\r\n pass\r\n b=Button(self.left_frame,text=\"Click\",font=('times', 20, 'bold'),command=self.selection,bg=\"white\",fg=\"black\")\r\n b.place(x=100,y=400)\r\n self.list_box.place(x=10,y=50)\r\n\r\n def selection(self):\r\n lb = self.list_box.curselection()\r\n print(lb)\r\n for i in list(lb):\r\n self.show_data()\r\n\r\n def show_records(self):\r\n global m\r\n m=self.list.curselection()\r\n m=self.list.get(m)\r\n self.id.delete(0,END)\r\n self.id.insert(END,self.add_record())\r\n\r\n global table_name\r\n\r\n def create_table(self):\r\n self.top = Toplevel(self.scr)\r\n self.top.geometry(\"400x800\")\r\n self.table_name=StringVar()\r\n l=Label(self.top,text=\"Table\",font=('times', 20, 'bold'),bg=\"white\",fg=\"black\")\r\n l.pack()\r\n e=Entry(self.top,textvariable=self.table_name,font=('times', 20, 'bold'))\r\n e.pack()\r\n b=Button(self.top,text=\"Add field\",command=self.fun_show , font=('times', 20, 'bold'),bg=\"white\",fg=\"black\")\r\n b.pack()\r\n b=Button(self.top,text=\"OK\",font=('times', 20, 'bold'),command=self.show_entered_data,bg=\"white\",fg=\"black\")\r\n b.pack(side=RIGHT)\r\n\r\n\r\n def show_entered_data(self):\r\n global en1\r\n global en2\r\n global list1\r\n global tbl_name\r\n self.tbl_name=self.table_name.get()\r\n self.en1=self.entry1.get()\r\n self.en2=self.entry2.get()\r\n sent=\"Create table \"+str(self.tbl_name)+\"('\"+str(self.en1)+ \" \"+ str(self.en2)+\"')\"\r\n list1 = Text(self.top, width=41, height=8, font=('times', 25, 'bold'))\r\n list1.place(x=0,y=0)\r\n list1.insert(0.0,sent)\r\n print(self.tbl_name,self.en1,self.en2)\r\n self.cursor.execute(sent)\r\n self.list.insert(0,sent)\r\n self.connection.commit()\r\n\r\n\r\n def fun_show(self):\r\n l = Label(self.top, text=\"Name\", font=('times', 20, 'bold'), bg=\"white\", fg=\"black\")\r\n l.pack(side=TOP)\r\n self.entry1 = StringVar()\r\n e1 = Entry(self.top, textvariable=self.entry1, font=('times', 20, 'bold'))\r\n e1.pack()\r\n l = Label(self.top, text=\"type\", font=('times', 20, 'bold'), bg=\"white\", fg=\"black\")\r\n l.pack(side=TOP)\r\n self.entry2 = StringVar()\r\n e1 = Entry(self.top, textvariable=self.entry2, font=('times', 20, 'bold'))\r\n e1.pack()\r\n\r\n\r\nGui()",
"step-ids": [
9,
12,
17,
18,
20
]
}
|
[
9,
12,
17,
18,
20
] |
from flask import Flask, render_template, request
import random, requests
app = Flask(__name__)
@app.route('/')
def hello():
# return 'Hello World'
return render_template('index.html')
# root 디렉토리에 있는 templates라는 폴더를 탐색하여 파일을 찾음
@app.route('/ace')
def ace():
return '불기둥!'
@app.route('/html')
def html():
return '<h1> 태그 사용할 수 있어요! <h1>'
@app.route('/html_multiline')
# 동적 라우팅
@app.route('/greeting/<string:name>')
def greeting(name):
return render_template('index.html', html_name=name)
#세제곱을 되돌려주는 cube 페이지 작성!
#사용자에게 숫자값을 받아서, 세제곱한 결과를 보여주는 페이지
@app.route('/cube/<int:number>')
def cube(number):
result = number ** 3
return render_template('cube.html',number=number, result=result)
@app.route('/movies')
def movies():
movie_list = ['82년생김지영', '조커', '엔드게임', '궁예']
return render_template('movies.html', movies=movie_list)
# ping : 사용자로부터 입력을 받을 form 페이지를 넘겨준다
@app.route('/ping')
def ping():
return render_template('ping.html')
# pong : 사용자로부터 form 데이터를 전달받아서 가공한다
@app.route('/pong')
def pong():
user_name = request.args.get('user_name')
return render_template('pong.html', user_name=user_name)
# fake naver, google
@app.route('/naver')
def naver():
return render_template('naver.html')
# 사용자로부터 이름을 입력받을 Form 페이지!
@app.route('/vonvon')
def vonvon():
return render_template('vonvon.html')
# 전달받은 이름을 기준으로 넘겨줄 각종 정보를 가공해서 돌려주는 (응답)로직!
@app.route('/godmademe')
def godmademe():
# 1. 사용자가 입력한 데이터를 가져온다.
name = request.args.get('user_name')
# 2. 사용자에게 보여줄 여러가지 재밌는 특성들 리스트를 만든다.
first_list = ['잘생김','못생김','개성','키','몸무게','노안','동안','오징어']
second_list = ['게으름','성실함','근면함','낭비벽','신중함','덜렁거림','귀찮음']
third_list = ['식욕','똘끼','허세','우울함','가벼움']
# 3. 리스트에서 랜덤으로 하나씩을 선택한다.
first = random.choice(first_list)
second = random.choice(second_list)
third = random.choice(third_list)
# 4. 가공한 정보를 템플릿에 담아서 사용자에게 보여준다.
return render_template('godmademe.html', name=name, first=first, second=second, third=third)
# 1. 사용자로부터 임의의 텍스트를 입력받아서, 아스키 아트로 변환해서 돌려준다.
# 2. 이 때, 아스키 아트 폰트는 랜덤으로 하나를 지정해서 변환한다
@app.route('/catch')
def catch():
return render_template('catch.html')
@app.route('/result')
def result():
# 1. 사용자가 입력한 Form 데이터를 가져온다.
word = request.args.get("word")
# 2. ARTII API로 요청을 보내서, 응답 결과를 변수에 담는다. (폰트 정보들)
fonts = requests.get('http://artii.herokuapp.com/fonts_list').text
# 3. 가져온 폰트들을 리스트 형태로 바꾼다. -> 줄바꿈(\n)을 기준으로 변수 구분
fonts = fonts.split('\n')
# 4. 폰트 하나를 랜덤으로 선택한다.
font = random.choice(fonts)
# 5. 사용자가 입력한 단어와 랜덤으로 선택한 폰트 정보를 담아서 API에게 요청한다.
result = requests.get(f'http://artii.herokuapp.com/make?text={word}&font={font}').text
# 6. 최종 결과물을 사용자에게 돌려준다.
return render_template('result.html', result=result)
# 마지막에 꼭 넣어야 하는 코드
# debug 모드를 활성화해서 서버 새로고침을 생략한다
if __name__ == '__main__':
app.run(debug=True)
|
normal
|
{
"blob_id": "9fa3a7c57b311a47e67de73bf6083f1f151d73f4",
"index": 8554,
"step-1": "<mask token>\n\n\[email protected]('/html')\ndef html():\n return '<h1> 태그 사용할 수 있어요! <h1>'\n\n\n<mask token>\n\n\[email protected]('/ping')\ndef ping():\n return render_template('ping.html')\n\n\[email protected]('/pong')\ndef pong():\n user_name = request.args.get('user_name')\n return render_template('pong.html', user_name=user_name)\n\n\n<mask token>\n\n\[email protected]('/vonvon')\ndef vonvon():\n return render_template('vonvon.html')\n\n\[email protected]('/godmademe')\ndef godmademe():\n name = request.args.get('user_name')\n first_list = ['잘생김', '못생김', '개성', '키', '몸무게', '노안', '동안', '오징어']\n second_list = ['게으름', '성실함', '근면함', '낭비벽', '신중함', '덜렁거림', '귀찮음']\n third_list = ['식욕', '똘끼', '허세', '우울함', '가벼움']\n first = random.choice(first_list)\n second = random.choice(second_list)\n third = random.choice(third_list)\n return render_template('godmademe.html', name=name, first=first, second\n =second, third=third)\n\n\[email protected]('/catch')\ndef catch():\n return render_template('catch.html')\n\n\[email protected]('/result')\ndef result():\n word = request.args.get('word')\n fonts = requests.get('http://artii.herokuapp.com/fonts_list').text\n fonts = fonts.split('\\n')\n font = random.choice(fonts)\n result = requests.get(\n f'http://artii.herokuapp.com/make?text={word}&font={font}').text\n return render_template('result.html', result=result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef hello():\n return render_template('index.html')\n\n\[email protected]('/ace')\ndef ace():\n return '불기둥!'\n\n\[email protected]('/html')\ndef html():\n return '<h1> 태그 사용할 수 있어요! <h1>'\n\n\n<mask token>\n\n\[email protected]('/cube/<int:number>')\ndef cube(number):\n result = number ** 3\n return render_template('cube.html', number=number, result=result)\n\n\n<mask token>\n\n\[email protected]('/ping')\ndef ping():\n return render_template('ping.html')\n\n\[email protected]('/pong')\ndef pong():\n user_name = request.args.get('user_name')\n return render_template('pong.html', user_name=user_name)\n\n\n<mask token>\n\n\[email protected]('/vonvon')\ndef vonvon():\n return render_template('vonvon.html')\n\n\[email protected]('/godmademe')\ndef godmademe():\n name = request.args.get('user_name')\n first_list = ['잘생김', '못생김', '개성', '키', '몸무게', '노안', '동안', '오징어']\n second_list = ['게으름', '성실함', '근면함', '낭비벽', '신중함', '덜렁거림', '귀찮음']\n third_list = ['식욕', '똘끼', '허세', '우울함', '가벼움']\n first = random.choice(first_list)\n second = random.choice(second_list)\n third = random.choice(third_list)\n return render_template('godmademe.html', name=name, first=first, second\n =second, third=third)\n\n\[email protected]('/catch')\ndef catch():\n return render_template('catch.html')\n\n\[email protected]('/result')\ndef result():\n word = request.args.get('word')\n fonts = requests.get('http://artii.herokuapp.com/fonts_list').text\n fonts = fonts.split('\\n')\n font = random.choice(fonts)\n result = requests.get(\n f'http://artii.herokuapp.com/make?text={word}&font={font}').text\n return render_template('result.html', result=result)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\[email protected]('/')\ndef hello():\n return render_template('index.html')\n\n\[email protected]('/ace')\ndef ace():\n return '불기둥!'\n\n\[email protected]('/html')\ndef html():\n return '<h1> 태그 사용할 수 있어요! <h1>'\n\n\[email protected]('/html_multiline')\[email protected]('/greeting/<string:name>')\ndef greeting(name):\n return render_template('index.html', html_name=name)\n\n\[email protected]('/cube/<int:number>')\ndef cube(number):\n result = number ** 3\n return render_template('cube.html', number=number, result=result)\n\n\[email protected]('/movies')\ndef movies():\n movie_list = ['82년생김지영', '조커', '엔드게임', '궁예']\n return render_template('movies.html', movies=movie_list)\n\n\[email protected]('/ping')\ndef ping():\n return render_template('ping.html')\n\n\[email protected]('/pong')\ndef pong():\n user_name = request.args.get('user_name')\n return render_template('pong.html', user_name=user_name)\n\n\[email protected]('/naver')\ndef naver():\n return render_template('naver.html')\n\n\[email protected]('/vonvon')\ndef vonvon():\n return render_template('vonvon.html')\n\n\[email protected]('/godmademe')\ndef godmademe():\n name = request.args.get('user_name')\n first_list = ['잘생김', '못생김', '개성', '키', '몸무게', '노안', '동안', '오징어']\n second_list = ['게으름', '성실함', '근면함', '낭비벽', '신중함', '덜렁거림', '귀찮음']\n third_list = ['식욕', '똘끼', '허세', '우울함', '가벼움']\n first = random.choice(first_list)\n second = random.choice(second_list)\n third = random.choice(third_list)\n return render_template('godmademe.html', name=name, first=first, second\n =second, third=third)\n\n\[email protected]('/catch')\ndef catch():\n return render_template('catch.html')\n\n\[email protected]('/result')\ndef result():\n word = request.args.get('word')\n fonts = requests.get('http://artii.herokuapp.com/fonts_list').text\n fonts = fonts.split('\\n')\n font = random.choice(fonts)\n result = requests.get(\n f'http://artii.herokuapp.com/make?text={word}&font={font}').text\n return render_template('result.html', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask, render_template, request\nimport random, requests\napp = Flask(__name__)\n\n\[email protected]('/')\ndef hello():\n return render_template('index.html')\n\n\[email protected]('/ace')\ndef ace():\n return '불기둥!'\n\n\[email protected]('/html')\ndef html():\n return '<h1> 태그 사용할 수 있어요! <h1>'\n\n\[email protected]('/html_multiline')\[email protected]('/greeting/<string:name>')\ndef greeting(name):\n return render_template('index.html', html_name=name)\n\n\[email protected]('/cube/<int:number>')\ndef cube(number):\n result = number ** 3\n return render_template('cube.html', number=number, result=result)\n\n\[email protected]('/movies')\ndef movies():\n movie_list = ['82년생김지영', '조커', '엔드게임', '궁예']\n return render_template('movies.html', movies=movie_list)\n\n\[email protected]('/ping')\ndef ping():\n return render_template('ping.html')\n\n\[email protected]('/pong')\ndef pong():\n user_name = request.args.get('user_name')\n return render_template('pong.html', user_name=user_name)\n\n\[email protected]('/naver')\ndef naver():\n return render_template('naver.html')\n\n\[email protected]('/vonvon')\ndef vonvon():\n return render_template('vonvon.html')\n\n\[email protected]('/godmademe')\ndef godmademe():\n name = request.args.get('user_name')\n first_list = ['잘생김', '못생김', '개성', '키', '몸무게', '노안', '동안', '오징어']\n second_list = ['게으름', '성실함', '근면함', '낭비벽', '신중함', '덜렁거림', '귀찮음']\n third_list = ['식욕', '똘끼', '허세', '우울함', '가벼움']\n first = random.choice(first_list)\n second = random.choice(second_list)\n third = random.choice(third_list)\n return render_template('godmademe.html', name=name, first=first, second\n =second, third=third)\n\n\[email protected]('/catch')\ndef catch():\n return render_template('catch.html')\n\n\[email protected]('/result')\ndef result():\n word = request.args.get('word')\n fonts = requests.get('http://artii.herokuapp.com/fonts_list').text\n fonts = fonts.split('\\n')\n font = random.choice(fonts)\n result = requests.get(\n f'http://artii.herokuapp.com/make?text={word}&font={font}').text\n return render_template('result.html', result=result)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask, render_template, request\nimport random, requests\napp = Flask(__name__)\n\[email protected]('/')\ndef hello():\n # return 'Hello World'\n return render_template('index.html')\n # root 디렉토리에 있는 templates라는 폴더를 탐색하여 파일을 찾음\n\[email protected]('/ace')\ndef ace():\n return '불기둥!'\n\[email protected]('/html')\ndef html():\n return '<h1> 태그 사용할 수 있어요! <h1>'\n\[email protected]('/html_multiline')\n\n# 동적 라우팅\[email protected]('/greeting/<string:name>')\ndef greeting(name):\n return render_template('index.html', html_name=name)\n\n#세제곱을 되돌려주는 cube 페이지 작성!\n#사용자에게 숫자값을 받아서, 세제곱한 결과를 보여주는 페이지\[email protected]('/cube/<int:number>')\ndef cube(number):\n result = number ** 3\n return render_template('cube.html',number=number, result=result)\n\[email protected]('/movies')\ndef movies():\n movie_list = ['82년생김지영', '조커', '엔드게임', '궁예']\n return render_template('movies.html', movies=movie_list)\n\n# ping : 사용자로부터 입력을 받을 form 페이지를 넘겨준다\[email protected]('/ping')\ndef ping():\n return render_template('ping.html')\n\n# pong : 사용자로부터 form 데이터를 전달받아서 가공한다\[email protected]('/pong')\ndef pong():\n user_name = request.args.get('user_name')\n return render_template('pong.html', user_name=user_name)\n\n# fake naver, google\[email protected]('/naver')\ndef naver():\n return render_template('naver.html')\n\n# 사용자로부터 이름을 입력받을 Form 페이지!\[email protected]('/vonvon')\ndef vonvon():\n return render_template('vonvon.html')\n\n# 전달받은 이름을 기준으로 넘겨줄 각종 정보를 가공해서 돌려주는 (응답)로직!\[email protected]('/godmademe')\ndef godmademe():\n # 1. 사용자가 입력한 데이터를 가져온다.\n name = request.args.get('user_name')\n # 2. 사용자에게 보여줄 여러가지 재밌는 특성들 리스트를 만든다.\n first_list = ['잘생김','못생김','개성','키','몸무게','노안','동안','오징어']\n second_list = ['게으름','성실함','근면함','낭비벽','신중함','덜렁거림','귀찮음']\n third_list = ['식욕','똘끼','허세','우울함','가벼움']\n # 3. 리스트에서 랜덤으로 하나씩을 선택한다.\n first = random.choice(first_list)\n second = random.choice(second_list)\n third = random.choice(third_list)\n # 4. 가공한 정보를 템플릿에 담아서 사용자에게 보여준다.\n return render_template('godmademe.html', name=name, first=first, second=second, third=third)\n\n# 1. 사용자로부터 임의의 텍스트를 입력받아서, 아스키 아트로 변환해서 돌려준다.\n# 2. 이 때, 아스키 아트 폰트는 랜덤으로 하나를 지정해서 변환한다\[email protected]('/catch')\ndef catch():\n return render_template('catch.html')\n\[email protected]('/result')\ndef result():\n # 1. 사용자가 입력한 Form 데이터를 가져온다.\n word = request.args.get(\"word\")\n # 2. ARTII API로 요청을 보내서, 응답 결과를 변수에 담는다. (폰트 정보들)\n fonts = requests.get('http://artii.herokuapp.com/fonts_list').text\n # 3. 가져온 폰트들을 리스트 형태로 바꾼다. -> 줄바꿈(\\n)을 기준으로 변수 구분\n fonts = fonts.split('\\n')\n # 4. 폰트 하나를 랜덤으로 선택한다.\n font = random.choice(fonts)\n # 5. 사용자가 입력한 단어와 랜덤으로 선택한 폰트 정보를 담아서 API에게 요청한다.\n result = requests.get(f'http://artii.herokuapp.com/make?text={word}&font={font}').text\n # 6. 최종 결과물을 사용자에게 돌려준다.\n return render_template('result.html', result=result)\n\n# 마지막에 꼭 넣어야 하는 코드\n# debug 모드를 활성화해서 서버 새로고침을 생략한다\nif __name__ == '__main__':\n app.run(debug=True)",
"step-ids": [
7,
10,
14,
16,
17
]
}
|
[
7,
10,
14,
16,
17
] |
"""
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = ['[email protected] (David Harcombe)']
from typing import Any, Dict, List, Tuple
from classes.decorators import lazy_property
from googleapiclient.discovery import Resource
from typing import List
class SA360Validator(object):
fields = []
def __init__(self,
sa360_service: Resource = None,
agency: int = None,
advertiser: int = None) -> None:
self.sa360_service = sa360_service
self.agency = agency
self.advertiser = advertiser
@lazy_property
def saved_column_names(self) -> List[str]:
return self.list_custom_columns()
def validate(self, field: Any) -> Tuple[bool, str]:
if isinstance(field, str):
return self.validate_custom_column(field)
elif isinstance(field, dict):
field_type = field.get('type')
if field_type == 'savedColumnName':
return self.validate_custom_column(field['value'])
elif field_type == 'columnName':
return self.validate_standard_column(field['value'])
else:
# 'type' not specified. rather than fail, check both in order
(valid, name) = self.validate_custom_column(field['value'])
if valid:
field['type'] = 'savedColumnName'
return (valid, name)
else:
field['type'] = 'columnName'
return self.validate_standard_column(field['value'])
def validate_custom_column(self, name: str) -> Tuple[bool, str]:
if not name:
return (True, '--- Blank column name ---')
if not self.saved_column_names:
return (False, '--- No custom columns found ---')
if name in self.saved_column_names:
return (True, name)
return (False, self._find_bad_case(name, self.saved_column_names))
def validate_standard_column(self, name: str) -> Tuple[bool, str]:
if not name:
return (True, '--- Blank column name ---')
if name in self.fields:
return (True, name)
return (False, self._find_bad_case(name, self.fields))
def list_custom_columns(self) -> List[str]:
saved_column_names = []
if self.sa360_service:
request = self.sa360_service.savedColumns().list(
agencyId=self.agency, advertiserId=self.advertiser)
response = request.execute()
if 'items' in response:
saved_column_names = [
item['savedColumnName'] for item in response['items']
]
else:
saved_column_names = []
return saved_column_names
def _find_bad_case(self, name: str, columns: List[str]) -> str:
return next((x for i, x in enumerate(columns)
if x.casefold() == name.casefold()), None)
|
normal
|
{
"blob_id": "cce40ff190f7790ac4eca7d6cb3c032955bb4849",
"index": 8288,
"step-1": "<mask token>\n\n\nclass SA360Validator(object):\n <mask token>\n\n def __init__(self, sa360_service: Resource=None, agency: int=None,\n advertiser: int=None) ->None:\n self.sa360_service = sa360_service\n self.agency = agency\n self.advertiser = advertiser\n\n @lazy_property\n def saved_column_names(self) ->List[str]:\n return self.list_custom_columns()\n <mask token>\n\n def validate_custom_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if not self.saved_column_names:\n return False, '--- No custom columns found ---'\n if name in self.saved_column_names:\n return True, name\n return False, self._find_bad_case(name, self.saved_column_names)\n\n def validate_standard_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if name in self.fields:\n return True, name\n return False, self._find_bad_case(name, self.fields)\n\n def list_custom_columns(self) ->List[str]:\n saved_column_names = []\n if self.sa360_service:\n request = self.sa360_service.savedColumns().list(agencyId=self.\n agency, advertiserId=self.advertiser)\n response = request.execute()\n if 'items' in response:\n saved_column_names = [item['savedColumnName'] for item in\n response['items']]\n else:\n saved_column_names = []\n return saved_column_names\n\n def _find_bad_case(self, name: str, columns: List[str]) ->str:\n return next((x for i, x in enumerate(columns) if x.casefold() ==\n name.casefold()), None)\n",
"step-2": "<mask token>\n\n\nclass SA360Validator(object):\n <mask token>\n\n def __init__(self, sa360_service: Resource=None, agency: int=None,\n advertiser: int=None) ->None:\n self.sa360_service = sa360_service\n self.agency = agency\n self.advertiser = advertiser\n\n @lazy_property\n def saved_column_names(self) ->List[str]:\n return self.list_custom_columns()\n\n def validate(self, field: Any) ->Tuple[bool, str]:\n if isinstance(field, str):\n return self.validate_custom_column(field)\n elif isinstance(field, dict):\n field_type = field.get('type')\n if field_type == 'savedColumnName':\n return self.validate_custom_column(field['value'])\n elif field_type == 'columnName':\n return self.validate_standard_column(field['value'])\n else:\n valid, name = self.validate_custom_column(field['value'])\n if valid:\n field['type'] = 'savedColumnName'\n return valid, name\n else:\n field['type'] = 'columnName'\n return self.validate_standard_column(field['value'])\n\n def validate_custom_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if not self.saved_column_names:\n return False, '--- No custom columns found ---'\n if name in self.saved_column_names:\n return True, name\n return False, self._find_bad_case(name, self.saved_column_names)\n\n def validate_standard_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if name in self.fields:\n return True, name\n return False, self._find_bad_case(name, self.fields)\n\n def list_custom_columns(self) ->List[str]:\n saved_column_names = []\n if self.sa360_service:\n request = self.sa360_service.savedColumns().list(agencyId=self.\n agency, advertiserId=self.advertiser)\n response = request.execute()\n if 'items' in response:\n saved_column_names = [item['savedColumnName'] for item in\n response['items']]\n else:\n saved_column_names = []\n return saved_column_names\n\n def _find_bad_case(self, name: str, columns: List[str]) ->str:\n return next((x for i, x in enumerate(columns) if x.casefold() ==\n name.casefold()), None)\n",
"step-3": "<mask token>\n\n\nclass SA360Validator(object):\n fields = []\n\n def __init__(self, sa360_service: Resource=None, agency: int=None,\n advertiser: int=None) ->None:\n self.sa360_service = sa360_service\n self.agency = agency\n self.advertiser = advertiser\n\n @lazy_property\n def saved_column_names(self) ->List[str]:\n return self.list_custom_columns()\n\n def validate(self, field: Any) ->Tuple[bool, str]:\n if isinstance(field, str):\n return self.validate_custom_column(field)\n elif isinstance(field, dict):\n field_type = field.get('type')\n if field_type == 'savedColumnName':\n return self.validate_custom_column(field['value'])\n elif field_type == 'columnName':\n return self.validate_standard_column(field['value'])\n else:\n valid, name = self.validate_custom_column(field['value'])\n if valid:\n field['type'] = 'savedColumnName'\n return valid, name\n else:\n field['type'] = 'columnName'\n return self.validate_standard_column(field['value'])\n\n def validate_custom_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if not self.saved_column_names:\n return False, '--- No custom columns found ---'\n if name in self.saved_column_names:\n return True, name\n return False, self._find_bad_case(name, self.saved_column_names)\n\n def validate_standard_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if name in self.fields:\n return True, name\n return False, self._find_bad_case(name, self.fields)\n\n def list_custom_columns(self) ->List[str]:\n saved_column_names = []\n if self.sa360_service:\n request = self.sa360_service.savedColumns().list(agencyId=self.\n agency, advertiserId=self.advertiser)\n response = request.execute()\n if 'items' in response:\n saved_column_names = [item['savedColumnName'] for item in\n response['items']]\n else:\n saved_column_names = []\n return saved_column_names\n\n def _find_bad_case(self, name: str, columns: List[str]) ->str:\n return next((x for i, x in enumerate(columns) if x.casefold() ==\n name.casefold()), None)\n",
"step-4": "<mask token>\n__author__ = ['[email protected] (David Harcombe)']\nfrom typing import Any, Dict, List, Tuple\nfrom classes.decorators import lazy_property\nfrom googleapiclient.discovery import Resource\nfrom typing import List\n\n\nclass SA360Validator(object):\n fields = []\n\n def __init__(self, sa360_service: Resource=None, agency: int=None,\n advertiser: int=None) ->None:\n self.sa360_service = sa360_service\n self.agency = agency\n self.advertiser = advertiser\n\n @lazy_property\n def saved_column_names(self) ->List[str]:\n return self.list_custom_columns()\n\n def validate(self, field: Any) ->Tuple[bool, str]:\n if isinstance(field, str):\n return self.validate_custom_column(field)\n elif isinstance(field, dict):\n field_type = field.get('type')\n if field_type == 'savedColumnName':\n return self.validate_custom_column(field['value'])\n elif field_type == 'columnName':\n return self.validate_standard_column(field['value'])\n else:\n valid, name = self.validate_custom_column(field['value'])\n if valid:\n field['type'] = 'savedColumnName'\n return valid, name\n else:\n field['type'] = 'columnName'\n return self.validate_standard_column(field['value'])\n\n def validate_custom_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if not self.saved_column_names:\n return False, '--- No custom columns found ---'\n if name in self.saved_column_names:\n return True, name\n return False, self._find_bad_case(name, self.saved_column_names)\n\n def validate_standard_column(self, name: str) ->Tuple[bool, str]:\n if not name:\n return True, '--- Blank column name ---'\n if name in self.fields:\n return True, name\n return False, self._find_bad_case(name, self.fields)\n\n def list_custom_columns(self) ->List[str]:\n saved_column_names = []\n if self.sa360_service:\n request = self.sa360_service.savedColumns().list(agencyId=self.\n agency, advertiserId=self.advertiser)\n response = request.execute()\n if 'items' in response:\n saved_column_names = [item['savedColumnName'] for item in\n response['items']]\n else:\n saved_column_names = []\n return saved_column_names\n\n def _find_bad_case(self, name: str, columns: List[str]) ->str:\n return next((x for i, x in enumerate(columns) if x.casefold() ==\n name.casefold()), None)\n",
"step-5": "\"\"\"\nCopyright 2020 Google LLC\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n https://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\n\n__author__ = ['[email protected] (David Harcombe)']\n\nfrom typing import Any, Dict, List, Tuple\n\nfrom classes.decorators import lazy_property\nfrom googleapiclient.discovery import Resource\nfrom typing import List\n\n\nclass SA360Validator(object):\n fields = []\n\n def __init__(self,\n sa360_service: Resource = None,\n agency: int = None,\n advertiser: int = None) -> None:\n self.sa360_service = sa360_service\n self.agency = agency\n self.advertiser = advertiser\n\n @lazy_property\n def saved_column_names(self) -> List[str]:\n return self.list_custom_columns()\n\n def validate(self, field: Any) -> Tuple[bool, str]:\n if isinstance(field, str):\n return self.validate_custom_column(field)\n\n elif isinstance(field, dict):\n field_type = field.get('type')\n if field_type == 'savedColumnName':\n return self.validate_custom_column(field['value'])\n elif field_type == 'columnName':\n return self.validate_standard_column(field['value'])\n else:\n # 'type' not specified. rather than fail, check both in order\n (valid, name) = self.validate_custom_column(field['value'])\n if valid:\n field['type'] = 'savedColumnName'\n return (valid, name)\n else:\n field['type'] = 'columnName'\n return self.validate_standard_column(field['value'])\n\n def validate_custom_column(self, name: str) -> Tuple[bool, str]:\n if not name:\n return (True, '--- Blank column name ---')\n\n if not self.saved_column_names:\n return (False, '--- No custom columns found ---')\n\n if name in self.saved_column_names:\n return (True, name)\n\n return (False, self._find_bad_case(name, self.saved_column_names))\n\n def validate_standard_column(self, name: str) -> Tuple[bool, str]:\n if not name:\n return (True, '--- Blank column name ---')\n\n if name in self.fields:\n return (True, name)\n\n return (False, self._find_bad_case(name, self.fields))\n\n def list_custom_columns(self) -> List[str]:\n saved_column_names = []\n if self.sa360_service:\n request = self.sa360_service.savedColumns().list(\n agencyId=self.agency, advertiserId=self.advertiser)\n response = request.execute()\n\n if 'items' in response:\n saved_column_names = [\n item['savedColumnName'] for item in response['items']\n ]\n else:\n saved_column_names = []\n\n return saved_column_names\n\n def _find_bad_case(self, name: str, columns: List[str]) -> str:\n return next((x for i, x in enumerate(columns)\n if x.casefold() == name.casefold()), None)\n",
"step-ids": [
7,
8,
9,
11,
12
]
}
|
[
7,
8,
9,
11,
12
] |
#!/usr/bin/python
#
# Copyright 2017 Steven Watanabe
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
from MockProgram import *
command('strip', '-S', '-x', input_file('bin/darwin-4.2.1/release/target-os-darwin/test'))
main()
|
normal
|
{
"blob_id": "d2f77afd0d282b1fa4859c5368c9d2c745a5625e",
"index": 3293,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncommand('strip', '-S', '-x', input_file(\n 'bin/darwin-4.2.1/release/target-os-darwin/test'))\nmain()\n",
"step-3": "from MockProgram import *\ncommand('strip', '-S', '-x', input_file(\n 'bin/darwin-4.2.1/release/target-os-darwin/test'))\nmain()\n",
"step-4": "#!/usr/bin/python\n#\n# Copyright 2017 Steven Watanabe\n#\n# Distributed under the Boost Software License, Version 1.0.\n# (See accompanying file LICENSE_1_0.txt or copy at\n# http://www.boost.org/LICENSE_1_0.txt)\n\nfrom MockProgram import *\n\ncommand('strip', '-S', '-x', input_file('bin/darwin-4.2.1/release/target-os-darwin/test'))\n\nmain()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Vincent Celis
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import webapp2
import handlers
# A list containing webapp2.Route instances to define the routing tables
ROUTE_LIST = [
webapp2.Route(r'/api/history<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.HistoryApi, name='historyApi'),
webapp2.Route(r'/api<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.PageApi, name='pageApi'),
webapp2.Route(r'/signup', handler=handlers.SignupPage, name='signup'),
webapp2.Route(r'/login', handler=handlers.LoginPage, name='login'),
webapp2.Route(r'/logout', handler=handlers.LogoutPage, name='logout'),
webapp2.Route(r'/search', handler=handlers.SearchPage, name='search'),
webapp2.Route(r'/_edit<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.EditPage, name='edit'),
webapp2.Route(r'/_history<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.HistoryPage, name='history'),
webapp2.Route(r'<name:/(?:[a-zA-Z0-9_-]+/?)*>',
handler=handlers.WikiPage, name='wiki')
]
|
normal
|
{
"blob_id": "a61bc654eecb4e44dce3e62df752f80559a2d055",
"index": 9184,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nROUTE_LIST = [webapp2.Route('/api/history<name:/(?:[a-zA-Z0-9_-]+/?)*>',\n handler=handlers.HistoryApi, name='historyApi'), webapp2.Route(\n '/api<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler=handlers.PageApi, name=\n 'pageApi'), webapp2.Route('/signup', handler=handlers.SignupPage, name=\n 'signup'), webapp2.Route('/login', handler=handlers.LoginPage, name=\n 'login'), webapp2.Route('/logout', handler=handlers.LogoutPage, name=\n 'logout'), webapp2.Route('/search', handler=handlers.SearchPage, name=\n 'search'), webapp2.Route('/_edit<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler\n =handlers.EditPage, name='edit'), webapp2.Route(\n '/_history<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler=handlers.HistoryPage,\n name='history'), webapp2.Route('<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler\n =handlers.WikiPage, name='wiki')]\n",
"step-3": "import webapp2\nimport handlers\nROUTE_LIST = [webapp2.Route('/api/history<name:/(?:[a-zA-Z0-9_-]+/?)*>',\n handler=handlers.HistoryApi, name='historyApi'), webapp2.Route(\n '/api<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler=handlers.PageApi, name=\n 'pageApi'), webapp2.Route('/signup', handler=handlers.SignupPage, name=\n 'signup'), webapp2.Route('/login', handler=handlers.LoginPage, name=\n 'login'), webapp2.Route('/logout', handler=handlers.LogoutPage, name=\n 'logout'), webapp2.Route('/search', handler=handlers.SearchPage, name=\n 'search'), webapp2.Route('/_edit<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler\n =handlers.EditPage, name='edit'), webapp2.Route(\n '/_history<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler=handlers.HistoryPage,\n name='history'), webapp2.Route('<name:/(?:[a-zA-Z0-9_-]+/?)*>', handler\n =handlers.WikiPage, name='wiki')]\n",
"step-4": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# Copyright (c) 2014 Vincent Celis\n# \n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n# \n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n# \n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n\nimport webapp2\nimport handlers\n\n# A list containing webapp2.Route instances to define the routing tables\nROUTE_LIST = [\n webapp2.Route(r'/api/history<name:/(?:[a-zA-Z0-9_-]+/?)*>',\n handler=handlers.HistoryApi, name='historyApi'),\n webapp2.Route(r'/api<name:/(?:[a-zA-Z0-9_-]+/?)*>',\n handler=handlers.PageApi, name='pageApi'),\n webapp2.Route(r'/signup', handler=handlers.SignupPage, name='signup'),\n webapp2.Route(r'/login', handler=handlers.LoginPage, name='login'),\n webapp2.Route(r'/logout', handler=handlers.LogoutPage, name='logout'),\n webapp2.Route(r'/search', handler=handlers.SearchPage, name='search'),\n webapp2.Route(r'/_edit<name:/(?:[a-zA-Z0-9_-]+/?)*>',\n handler=handlers.EditPage, name='edit'),\n webapp2.Route(r'/_history<name:/(?:[a-zA-Z0-9_-]+/?)*>',\n handler=handlers.HistoryPage, name='history'),\n webapp2.Route(r'<name:/(?:[a-zA-Z0-9_-]+/?)*>',\n handler=handlers.WikiPage, name='wiki')\n ]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
car_state = False
u_input = input(f'>')
if car_state == True:
print('Car is stopped!')
if u_input == 'start':
car_state = True
print('Car has started!')
elif u_input == 'stop':
car_state == False
print('Car has stopped!')
else:
print('''I don''t understand that...''')
|
normal
|
{
"blob_id": "2766339632200c26a8c6cd3abff28b1495870b9a",
"index": 9207,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif car_state == True:\n print('Car is stopped!')\nif u_input == 'start':\n car_state = True\n print('Car has started!')\nelif u_input == 'stop':\n car_state == False\n print('Car has stopped!')\nelse:\n print(\"I don''t understand that...\")\n",
"step-3": "car_state = False\nu_input = input(f'>')\nif car_state == True:\n print('Car is stopped!')\nif u_input == 'start':\n car_state = True\n print('Car has started!')\nelif u_input == 'stop':\n car_state == False\n print('Car has stopped!')\nelse:\n print(\"I don''t understand that...\")\n",
"step-4": "car_state = False\r\nu_input = input(f'>')\r\n\r\nif car_state == True:\r\n print('Car is stopped!')\r\n\r\nif u_input == 'start':\r\n car_state = True\r\n print('Car has started!')\r\nelif u_input == 'stop':\r\n car_state == False\r\n print('Car has stopped!')\r\nelse:\r\n print('''I don''t understand that...''')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# coding: utf-8
# In[5]:
import re
def phonenumbervalidate(phone):
pattern ='^[6-9][0-9]{9}$'
phone =str(phone)
if re.match(pattern,phone):
return True
return False
print(phonenumbervalidate(998855451))
print(phonenumbervalidate(9955441))
# In[10]:
import re
def phonenumbervalidate(phone):
pattern ='^[0][6-9][0-9]{9}$'
phone =str(phone)
if re.match(pattern,phone):
return True
return False
print(phonenumbervalidate("09988554510"))
print(phonenumbervalidate(99554410))
# In[11]:
import re
def validaterollnumber(number):
number =str(number)
pattern ="^[1][5][2][u][1][A][0][1-9][0-6][0-9]"
if re.match(pattern,number):
return True
return False
print(phonenumbervalidate("152u1A0555"))
print(phonenumbervalidate("152u1A0485"))
# In[ ]:
|
normal
|
{
"blob_id": "6b2161379bdd27980d3a515cdf4719ab036845fe",
"index": 8217,
"step-1": "<mask token>\n\n\ndef phonenumbervalidate(phone):\n pattern = '^[0][6-9][0-9]{9}$'\n phone = str(phone)\n if re.match(pattern, phone):\n return True\n return False\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef phonenumbervalidate(phone):\n pattern = '^[0][6-9][0-9]{9}$'\n phone = str(phone)\n if re.match(pattern, phone):\n return True\n return False\n\n\n<mask token>\n\n\ndef validaterollnumber(number):\n number = str(number)\n pattern = '^[1][5][2][u][1][A][0][1-9][0-6][0-9]'\n if re.match(pattern, number):\n return True\n return False\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef phonenumbervalidate(phone):\n pattern = '^[6-9][0-9]{9}$'\n phone = str(phone)\n if re.match(pattern, phone):\n return True\n return False\n\n\n<mask token>\n\n\ndef phonenumbervalidate(phone):\n pattern = '^[0][6-9][0-9]{9}$'\n phone = str(phone)\n if re.match(pattern, phone):\n return True\n return False\n\n\n<mask token>\n\n\ndef validaterollnumber(number):\n number = str(number)\n pattern = '^[1][5][2][u][1][A][0][1-9][0-6][0-9]'\n if re.match(pattern, number):\n return True\n return False\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef phonenumbervalidate(phone):\n pattern = '^[6-9][0-9]{9}$'\n phone = str(phone)\n if re.match(pattern, phone):\n return True\n return False\n\n\nprint(phonenumbervalidate(998855451))\nprint(phonenumbervalidate(9955441))\n<mask token>\n\n\ndef phonenumbervalidate(phone):\n pattern = '^[0][6-9][0-9]{9}$'\n phone = str(phone)\n if re.match(pattern, phone):\n return True\n return False\n\n\nprint(phonenumbervalidate('09988554510'))\nprint(phonenumbervalidate(99554410))\n<mask token>\n\n\ndef validaterollnumber(number):\n number = str(number)\n pattern = '^[1][5][2][u][1][A][0][1-9][0-6][0-9]'\n if re.match(pattern, number):\n return True\n return False\n\n\nprint(phonenumbervalidate('152u1A0555'))\nprint(phonenumbervalidate('152u1A0485'))\n",
"step-5": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[5]:\n\n\nimport re\ndef phonenumbervalidate(phone):\n pattern ='^[6-9][0-9]{9}$'\n phone =str(phone)\n if re.match(pattern,phone):\n return True \n return False\nprint(phonenumbervalidate(998855451))\nprint(phonenumbervalidate(9955441))\n\n\n# In[10]:\n\n\nimport re\ndef phonenumbervalidate(phone):\n pattern ='^[0][6-9][0-9]{9}$'\n phone =str(phone)\n if re.match(pattern,phone):\n return True \n return False\nprint(phonenumbervalidate(\"09988554510\"))\nprint(phonenumbervalidate(99554410))\n\n\n# In[11]:\n\n\nimport re\ndef validaterollnumber(number):\n \n number =str(number)\n pattern =\"^[1][5][2][u][1][A][0][1-9][0-6][0-9]\" \n if re.match(pattern,number):\n return True \n return False\nprint(phonenumbervalidate(\"152u1A0555\"))\nprint(phonenumbervalidate(\"152u1A0485\"))\n\n\n# In[ ]:\n\n\n\n\n",
"step-ids": [
1,
2,
3,
4,
6
]
}
|
[
1,
2,
3,
4,
6
] |
n = int(input())
a = oct(n)
b = hex(n)
print(a[2:],b[2:].upper())
#.upper : 소문자 -> 대문자
|
normal
|
{
"blob_id": "d6cea40e907a0424b2b1b8162f19aa8203443e55",
"index": 4360,
"step-1": "n = int(input())\n\na = oct(n)\nb = hex(n)\n\nprint(a[2:],b[2:].upper())\n\n#.upper : 소문자 -> 대문자\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#Create a 3x3 identity matrix
import numpy as np
vector = np.zeros((8,8))
vector[1::2,::2]=1
vector[::2,1::2]=1
print(vector)
'''
Output
[[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]
[0. 1. 0. 1. 0. 1. 0. 1.]
[1. 0. 1. 0. 1. 0. 1. 0.]]
'''
|
normal
|
{
"blob_id": "10d3ee459a296c26429659a202833a9570cf9454",
"index": 9639,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(vector)\n<mask token>\n",
"step-3": "<mask token>\nvector = np.zeros((8, 8))\nvector[1::2, ::2] = 1\nvector[::2, 1::2] = 1\nprint(vector)\n<mask token>\n",
"step-4": "import numpy as np\nvector = np.zeros((8, 8))\nvector[1::2, ::2] = 1\nvector[::2, 1::2] = 1\nprint(vector)\n<mask token>\n",
"step-5": "#Create a 3x3 identity matrix\n\n\nimport numpy as np\n\nvector = np.zeros((8,8))\nvector[1::2,::2]=1\nvector[::2,1::2]=1\nprint(vector)\n\n'''\nOutput\n\n[[0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]\n [0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]\n [0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]\n [0. 1. 0. 1. 0. 1. 0. 1.]\n [1. 0. 1. 0. 1. 0. 1. 0.]]\n\n'''",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
from sql_interpreter.tables.csv_table import CsvTable
from sql_interpreter.interpreter import SqlInterpreter
from sql_interpreter.cli import Cli
class InterpreterTest():
def setUp(self):
self.interpreter = SqlInterpreter()
self.cli = Cli(self.interpreter)
filename = os.path.join(
os.path.dirname(__file__), 'resources/employees.csv')
self.interpreter.load(CsvTable('employees', filename))
filename = os.path.join(
os.path.dirname(__file__), 'resources/departments.csv')
self.interpreter.load(CsvTable('departments', filename))
def tearDown(self):
self.interpreter.unload_all()
def test_select_1(self):
sql = '''select
id, first_name || ' ' || last_name as full_name, salary - 1000
from employees;'''
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_2(self):
sql = '''select
e.id, last_name, department_id, departments.id, name
from employees e, departments;'''
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_all(self):
code = '''select * from employees;'''
self.interpreter.interpret(code)
self.cli.print_new_line()
def test_select_distinct(self):
sql = '''select distinct
departments.id as dep_id, employees.salary as sal
from employees, departments
order by dep_id, sal desc;'''
self.cli.execute(sql)
self.cli.print_new_line()
if __name__ == '__main__':
test = InterpreterTest()
test.setUp()
test.test_select_1()
test.test_select_2()
test.test_select_distinct()
test.tearDown()
|
normal
|
{
"blob_id": "b3ee76bc0d93135d0908044a2424dd927a390007",
"index": 6357,
"step-1": "<mask token>\n\n\nclass InterpreterTest:\n <mask token>\n\n def tearDown(self):\n self.interpreter.unload_all()\n <mask token>\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass InterpreterTest:\n <mask token>\n\n def tearDown(self):\n self.interpreter.unload_all()\n <mask token>\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n\n def test_select_distinct(self):\n sql = \"\"\"select distinct\n departments.id as dep_id, employees.salary as sal\n from employees, departments\n order by dep_id, sal desc;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass InterpreterTest:\n\n def setUp(self):\n self.interpreter = SqlInterpreter()\n self.cli = Cli(self.interpreter)\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/employees.csv')\n self.interpreter.load(CsvTable('employees', filename))\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/departments.csv')\n self.interpreter.load(CsvTable('departments', filename))\n\n def tearDown(self):\n self.interpreter.unload_all()\n\n def test_select_1(self):\n sql = \"\"\"select\n id, first_name || ' ' || last_name as full_name, salary - 1000\n from employees;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n\n def test_select_distinct(self):\n sql = \"\"\"select distinct\n departments.id as dep_id, employees.salary as sal\n from employees, departments\n order by dep_id, sal desc;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n\nif __name__ == '__main__':\n test = InterpreterTest()\n test.setUp()\n test.test_select_1()\n test.test_select_2()\n test.test_select_distinct()\n test.tearDown()\n",
"step-4": "import os\nfrom sql_interpreter.tables.csv_table import CsvTable\nfrom sql_interpreter.interpreter import SqlInterpreter\nfrom sql_interpreter.cli import Cli\n\n\nclass InterpreterTest:\n\n def setUp(self):\n self.interpreter = SqlInterpreter()\n self.cli = Cli(self.interpreter)\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/employees.csv')\n self.interpreter.load(CsvTable('employees', filename))\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/departments.csv')\n self.interpreter.load(CsvTable('departments', filename))\n\n def tearDown(self):\n self.interpreter.unload_all()\n\n def test_select_1(self):\n sql = \"\"\"select\n id, first_name || ' ' || last_name as full_name, salary - 1000\n from employees;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n\n def test_select_distinct(self):\n sql = \"\"\"select distinct\n departments.id as dep_id, employees.salary as sal\n from employees, departments\n order by dep_id, sal desc;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n\nif __name__ == '__main__':\n test = InterpreterTest()\n test.setUp()\n test.test_select_1()\n test.test_select_2()\n test.test_select_distinct()\n test.tearDown()\n",
"step-5": "import os\r\nfrom sql_interpreter.tables.csv_table import CsvTable\r\nfrom sql_interpreter.interpreter import SqlInterpreter\r\nfrom sql_interpreter.cli import Cli\r\n\r\n\r\nclass InterpreterTest():\r\n def setUp(self):\r\n self.interpreter = SqlInterpreter()\r\n self.cli = Cli(self.interpreter)\r\n filename = os.path.join(\r\n os.path.dirname(__file__), 'resources/employees.csv')\r\n self.interpreter.load(CsvTable('employees', filename))\r\n filename = os.path.join(\r\n os.path.dirname(__file__), 'resources/departments.csv')\r\n self.interpreter.load(CsvTable('departments', filename))\r\n\r\n def tearDown(self):\r\n self.interpreter.unload_all()\r\n\r\n def test_select_1(self):\r\n sql = '''select\r\n id, first_name || ' ' || last_name as full_name, salary - 1000\r\n from employees;'''\r\n self.cli.execute(sql)\r\n self.cli.print_new_line()\r\n\r\n def test_select_2(self):\r\n sql = '''select\r\n e.id, last_name, department_id, departments.id, name\r\n from employees e, departments;'''\r\n self.cli.execute(sql)\r\n self.cli.print_new_line()\r\n\r\n def test_select_all(self):\r\n code = '''select * from employees;'''\r\n self.interpreter.interpret(code)\r\n self.cli.print_new_line()\r\n\r\n def test_select_distinct(self):\r\n sql = '''select distinct\r\n departments.id as dep_id, employees.salary as sal\r\n from employees, departments\r\n order by dep_id, sal desc;'''\r\n self.cli.execute(sql)\r\n self.cli.print_new_line()\r\n\r\n\r\nif __name__ == '__main__':\r\n test = InterpreterTest()\r\n test.setUp()\r\n test.test_select_1()\r\n test.test_select_2()\r\n test.test_select_distinct()\r\n test.tearDown()\r\n",
"step-ids": [
4,
5,
8,
9,
10
]
}
|
[
4,
5,
8,
9,
10
] |
class ChartType:
Vanilla = "Vanilla"
Neopolitan = "Neopolitan"
|
normal
|
{
"blob_id": "451a36eb205a269a05e3b3d89541278633d12aaa",
"index": 9781,
"step-1": "<mask token>\n",
"step-2": "class ChartType:\n <mask token>\n <mask token>\n",
"step-3": "class ChartType:\n Vanilla = 'Vanilla'\n Neopolitan = 'Neopolitan'\n",
"step-4": "\n\nclass ChartType:\n Vanilla = \"Vanilla\"\n Neopolitan = \"Neopolitan\"\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.shortcuts import render
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from polls.models import Poll
from .serializers import PollSerializer
# class PollView(APIView):
#
# def get(self, request):
# serializer = PollSerializer(Poll.objects.all(), many=True)
# response = {"polls": serializer.data}
# return Response(response, status=status.HTTP_200_OK)
#
# def post(self, request, format=None):
# data = request.data
# serializer = PollSerializer(data=data)
# if serializer.is_valid():
# poll = Poll(**data)
# poll.save()
# response = serializer.data
# return Response(response, status=status.HTTP_200_OK)
#
#
def index(request):
data = {}
return render(request,"polls/index.html",data)
#
# def show(request):
# data = {}
# p = Poll.objects.all()
# data["polls"] = p
# return render(request, "polls/show.html", data)
def show(request):
# data = {}
# p = Poll.objects.all()
# data["polls"] = p
return render(request, "polls/show.html")
def searchShow(request):
if 'search' in request.GET:
search_string = request.GET['search']
context = {
"search_string": search_string,
}
return render(request, "polls/show.html", context)
|
normal
|
{
"blob_id": "866ff68744a16158b7917ca6defc35440208ae71",
"index": 8575,
"step-1": "<mask token>\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\n<mask token>\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {'search_string': search_string}\n return render(request, 'polls/show.html', context)\n",
"step-3": "<mask token>\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\ndef show(request):\n return render(request, 'polls/show.html')\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {'search_string': search_string}\n return render(request, 'polls/show.html', context)\n",
"step-4": "from django.shortcuts import render\nfrom rest_framework import status\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\nfrom polls.models import Poll\nfrom .serializers import PollSerializer\n\n\ndef index(request):\n data = {}\n return render(request, 'polls/index.html', data)\n\n\ndef show(request):\n return render(request, 'polls/show.html')\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {'search_string': search_string}\n return render(request, 'polls/show.html', context)\n",
"step-5": "from django.shortcuts import render\n\nfrom rest_framework import status\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\n\nfrom polls.models import Poll\nfrom .serializers import PollSerializer\n\n\n# class PollView(APIView):\n#\n# def get(self, request):\n# serializer = PollSerializer(Poll.objects.all(), many=True)\n# response = {\"polls\": serializer.data}\n# return Response(response, status=status.HTTP_200_OK)\n#\n# def post(self, request, format=None):\n# data = request.data\n# serializer = PollSerializer(data=data)\n# if serializer.is_valid():\n# poll = Poll(**data)\n# poll.save()\n# response = serializer.data\n# return Response(response, status=status.HTTP_200_OK)\n#\n#\ndef index(request):\n data = {}\n return render(request,\"polls/index.html\",data)\n#\n# def show(request):\n# data = {}\n# p = Poll.objects.all()\n# data[\"polls\"] = p\n# return render(request, \"polls/show.html\", data)\n\ndef show(request):\n # data = {}\n # p = Poll.objects.all()\n # data[\"polls\"] = p\n return render(request, \"polls/show.html\")\n\n\n\ndef searchShow(request):\n if 'search' in request.GET:\n search_string = request.GET['search']\n context = {\n \"search_string\": search_string,\n }\n return render(request, \"polls/show.html\", context)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import json
import requests
import time
class TRY():
rates = list()
def __init__(self, r):
# if(TRY.rates[-1] != r):
TRY.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"TRY: {TRY.rates}")
class USD():
rates = list()
def __init__(self, r):
# if(USD.rates[-1] != r):
USD.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"USD: {USD.rates}")
class RUB():
rates = list()
def __init__(self, r):
# if(RUB.rates[-1] != r):
RUB.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"RUB: {RUB.rates}")
class INR():
rates = list()
def __init__(self, r):
# if(INR.rates[-1] != r):
INR.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"INR: {INR.rates}")
class Factory():
def getExchange(self, currency, rates):
if currency == "TRY":
return TRY(rates)
elif currency == "USD":
return USD(rates) # abd doları
elif currency == "RUB":
return RUB(rates) # rusya rublesi
elif currency == "INR":
return INR(rates) # hindistan rupisi
else:
return None
def main(urlAPI):
resp = requests.get(urlAPI)
if(resp.ok is True):
# print(resp.ok)
data = resp.text
jsonData = json.loads(data)
parsedData = jsonData['rates']
factory = Factory()
# print(parsedData)
for c in parsedData:
f = factory.getExchange(c, parsedData[c])
TRY.ls(f)
USD.ls(f)
RUB.ls(f)
INR.ls(f)
else:
print(resp.ok)
if __name__ == '__main__':
for i in range(3):
# time.sleep(10)
main("https://api.exchangeratesapi.io/latest")
|
normal
|
{
"blob_id": "d56aa0f0b7c420e4021736cf8f80923121856d1c",
"index": 1286,
"step-1": "<mask token>\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass USD:\n rates = list()\n\n def __init__(self, r):\n USD.rates.append(r)\n\n def ls(self):\n print(f'USD: {USD.rates}')\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TRY:\n <mask token>\n\n def __init__(self, r):\n TRY.rates.append(r)\n <mask token>\n\n\nclass USD:\n rates = list()\n\n def __init__(self, r):\n USD.rates.append(r)\n\n def ls(self):\n print(f'USD: {USD.rates}')\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TRY:\n rates = list()\n\n def __init__(self, r):\n TRY.rates.append(r)\n\n def ls(self):\n print(f'TRY: {TRY.rates}')\n\n\nclass USD:\n rates = list()\n\n def __init__(self, r):\n USD.rates.append(r)\n\n def ls(self):\n print(f'USD: {USD.rates}')\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\ndef main(urlAPI):\n resp = requests.get(urlAPI)\n if resp.ok is True:\n data = resp.text\n jsonData = json.loads(data)\n parsedData = jsonData['rates']\n factory = Factory()\n for c in parsedData:\n f = factory.getExchange(c, parsedData[c])\n TRY.ls(f)\n USD.ls(f)\n RUB.ls(f)\n INR.ls(f)\n else:\n print(resp.ok)\n\n\n<mask token>\n",
"step-5": "import json\nimport requests\nimport time\n\n\nclass TRY():\n rates = list()\n\n def __init__(self, r):\n # if(TRY.rates[-1] != r):\n TRY.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"TRY: {TRY.rates}\")\n\n\nclass USD():\n rates = list()\n\n def __init__(self, r):\n # if(USD.rates[-1] != r):\n USD.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"USD: {USD.rates}\")\n\n\nclass RUB():\n rates = list()\n\n def __init__(self, r):\n # if(RUB.rates[-1] != r):\n RUB.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"RUB: {RUB.rates}\")\n\n\nclass INR():\n rates = list()\n\n def __init__(self, r):\n # if(INR.rates[-1] != r):\n INR.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"INR: {INR.rates}\")\n\n\nclass Factory():\n def getExchange(self, currency, rates):\n if currency == \"TRY\":\n return TRY(rates)\n elif currency == \"USD\":\n return USD(rates) # abd doları\n elif currency == \"RUB\":\n return RUB(rates) # rusya rublesi\n elif currency == \"INR\":\n return INR(rates) # hindistan rupisi\n else:\n return None\n\n\ndef main(urlAPI):\n resp = requests.get(urlAPI)\n if(resp.ok is True):\n\n # print(resp.ok)\n data = resp.text\n jsonData = json.loads(data)\n parsedData = jsonData['rates']\n\n factory = Factory()\n # print(parsedData)\n\n for c in parsedData:\n f = factory.getExchange(c, parsedData[c])\n\n TRY.ls(f)\n USD.ls(f)\n RUB.ls(f)\n INR.ls(f)\n else:\n print(resp.ok)\n\n\nif __name__ == '__main__':\n for i in range(3):\n # time.sleep(10)\n main(\"https://api.exchangeratesapi.io/latest\")\n",
"step-ids": [
10,
14,
16,
19,
22
]
}
|
[
10,
14,
16,
19,
22
] |
#! /usr/local/env python
#coding:utf-8
import urllib.request
import urllib.error
try:
urllib.request.urlopen("http://blog.csdn.net/jo_andy")
except urllib.error.URLError as e:
if hasattr(e,"code"):
print(e.code)
if hasattr(e,'reason'):
print(e.reason)
|
normal
|
{
"blob_id": "2ffd0de2888872cfa664919fcfc54b8e60b03280",
"index": 5256,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n urllib.request.urlopen('http://blog.csdn.net/jo_andy')\nexcept urllib.error.URLError as e:\n if hasattr(e, 'code'):\n print(e.code)\n if hasattr(e, 'reason'):\n print(e.reason)\n",
"step-3": "import urllib.request\nimport urllib.error\ntry:\n urllib.request.urlopen('http://blog.csdn.net/jo_andy')\nexcept urllib.error.URLError as e:\n if hasattr(e, 'code'):\n print(e.code)\n if hasattr(e, 'reason'):\n print(e.reason)\n",
"step-4": "#! /usr/local/env python\n#coding:utf-8\nimport urllib.request\nimport urllib.error\ntry:\n urllib.request.urlopen(\"http://blog.csdn.net/jo_andy\")\nexcept urllib.error.URLError as e:\n if hasattr(e,\"code\"):\n print(e.code)\n if hasattr(e,'reason'):\n print(e.reason)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import random
class Madlib:
'''
This class generates the madlib from word lists.
'''
def get_madlib(self):
madlib = """
Once there was a {0}. It {1} at the {2}.
Then because of its {3} it {4}. Wow! You sure are {5}!
Thanks! I {6} you very much.
"""
nouns = ['cheesecakes', 'bicycle', 'park', 'computer']
verbs = ['watched tv', 'voted', 'fell over']
adjectives = ['smelly', 'slimy', 'soft', 'loud']
output = madlib.format(
random.choice(nouns),
random.choice(verbs),
random.choice(nouns),
random.choice(nouns),
random.choice(verbs),
random.choice(adjectives),
random.choice(adjectives)
)
return output
|
normal
|
{
"blob_id": "2b23237e697cb4ca8f1013d7be343c70fba9541d",
"index": 6342,
"step-1": "<mask token>\n\n\nclass Madlib:\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Madlib:\n <mask token>\n\n def get_madlib(self):\n madlib = \"\"\"\n Once there was a {0}. It {1} at the {2}.\n Then because of its {3} it {4}. Wow! You sure are {5}!\n Thanks! I {6} you very much.\n \"\"\"\n nouns = ['cheesecakes', 'bicycle', 'park', 'computer']\n verbs = ['watched tv', 'voted', 'fell over']\n adjectives = ['smelly', 'slimy', 'soft', 'loud']\n output = madlib.format(random.choice(nouns), random.choice(verbs),\n random.choice(nouns), random.choice(nouns), random.choice(verbs\n ), random.choice(adjectives), random.choice(adjectives))\n return output\n",
"step-3": "<mask token>\n\n\nclass Madlib:\n \"\"\"\n This class generates the madlib from word lists.\n \"\"\"\n\n def get_madlib(self):\n madlib = \"\"\"\n Once there was a {0}. It {1} at the {2}.\n Then because of its {3} it {4}. Wow! You sure are {5}!\n Thanks! I {6} you very much.\n \"\"\"\n nouns = ['cheesecakes', 'bicycle', 'park', 'computer']\n verbs = ['watched tv', 'voted', 'fell over']\n adjectives = ['smelly', 'slimy', 'soft', 'loud']\n output = madlib.format(random.choice(nouns), random.choice(verbs),\n random.choice(nouns), random.choice(nouns), random.choice(verbs\n ), random.choice(adjectives), random.choice(adjectives))\n return output\n",
"step-4": "import random\n\n\nclass Madlib:\n \"\"\"\n This class generates the madlib from word lists.\n \"\"\"\n\n def get_madlib(self):\n madlib = \"\"\"\n Once there was a {0}. It {1} at the {2}.\n Then because of its {3} it {4}. Wow! You sure are {5}!\n Thanks! I {6} you very much.\n \"\"\"\n nouns = ['cheesecakes', 'bicycle', 'park', 'computer']\n verbs = ['watched tv', 'voted', 'fell over']\n adjectives = ['smelly', 'slimy', 'soft', 'loud']\n output = madlib.format(random.choice(nouns), random.choice(verbs),\n random.choice(nouns), random.choice(nouns), random.choice(verbs\n ), random.choice(adjectives), random.choice(adjectives))\n return output\n",
"step-5": "import random\n\n\nclass Madlib:\n '''\n This class generates the madlib from word lists.\n '''\n def get_madlib(self):\n madlib = \"\"\"\n Once there was a {0}. It {1} at the {2}.\n Then because of its {3} it {4}. Wow! You sure are {5}!\n Thanks! I {6} you very much.\n \"\"\"\n nouns = ['cheesecakes', 'bicycle', 'park', 'computer']\n verbs = ['watched tv', 'voted', 'fell over']\n adjectives = ['smelly', 'slimy', 'soft', 'loud']\n\n output = madlib.format(\n random.choice(nouns),\n random.choice(verbs),\n random.choice(nouns),\n random.choice(nouns),\n random.choice(verbs),\n random.choice(adjectives),\n random.choice(adjectives)\n )\n return output\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""Exercise 7.2. Encapsulate this loop in a function called square_root that takes a as a parameter,
chooses a reasonable value of x, and returns an estimate of the square root of a."""
def my_square_root(a,x) :
e = 0.0001
while True :
y=(x+a/x)/2
if abs(y-x) < e :
return y
break
x = y
a = input("Find square root of which number? ",)
x = input("What is your first guess?")
result = round(my_square_root(float(a),float(x)),3)
print("The square root of ",a,"is ",result)
|
normal
|
{
"blob_id": "c9f4ae94dc901d34a3c0fb4371c8d35a7fe94507",
"index": 5095,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef my_square_root(a, x):\n e = 0.0001\n while True:\n y = (x + a / x) / 2\n if abs(y - x) < e:\n return y\n break\n x = y\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef my_square_root(a, x):\n e = 0.0001\n while True:\n y = (x + a / x) / 2\n if abs(y - x) < e:\n return y\n break\n x = y\n\n\n<mask token>\nprint('The square root of ', a, 'is ', result)\n",
"step-4": "<mask token>\n\n\ndef my_square_root(a, x):\n e = 0.0001\n while True:\n y = (x + a / x) / 2\n if abs(y - x) < e:\n return y\n break\n x = y\n\n\na = input('Find square root of which number? ')\nx = input('What is your first guess?')\nresult = round(my_square_root(float(a), float(x)), 3)\nprint('The square root of ', a, 'is ', result)\n",
"step-5": "\"\"\"Exercise 7.2. Encapsulate this loop in a function called square_root that takes a as a parameter,\nchooses a reasonable value of x, and returns an estimate of the square root of a.\"\"\"\n\ndef my_square_root(a,x) :\n e = 0.0001\n while True :\n y=(x+a/x)/2\n if abs(y-x) < e :\n return y\n break\n x = y\n\na = input(\"Find square root of which number? \",)\nx = input(\"What is your first guess?\") \nresult = round(my_square_root(float(a),float(x)),3)\nprint(\"The square root of \",a,\"is \",result)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from src import npyscreen
from src.MainForm import MainForm
from src.ContactsForm import ContactsForm
from src.SendFileForm import SendFileForm
from src.MessageInfoForm import MessageInfoForm
from src.ForwardMessageForm import ForwardMessageForm
from src.RemoveMessageForm import RemoveMessageForm
class App(npyscreen.StandardApp):
def onStart(self):
self.MainForm = self.addForm("MAIN", MainForm)
self.ContactsForm = self.addForm("CONTACTS", ContactsForm)
self.SendFileForm = self.addForm("SEND_FILE", SendFileForm, lines=15)
self.MessageInfoForm = self.addForm("MESSAGE_INFO", MessageInfoForm)
self.ForwardMessageForm = self.addForm("FORWARD_MESSAGE", ForwardMessageForm)
self.RemoveMessageForm = self.addForm("REMOVE_MESSAGE", RemoveMessageForm, lines=5, columns=20)
|
normal
|
{
"blob_id": "dc2c429bae10ee14737583a3726eff8fde8306c7",
"index": 6940,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass App(npyscreen.StandardApp):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass App(npyscreen.StandardApp):\n\n def onStart(self):\n self.MainForm = self.addForm('MAIN', MainForm)\n self.ContactsForm = self.addForm('CONTACTS', ContactsForm)\n self.SendFileForm = self.addForm('SEND_FILE', SendFileForm, lines=15)\n self.MessageInfoForm = self.addForm('MESSAGE_INFO', MessageInfoForm)\n self.ForwardMessageForm = self.addForm('FORWARD_MESSAGE',\n ForwardMessageForm)\n self.RemoveMessageForm = self.addForm('REMOVE_MESSAGE',\n RemoveMessageForm, lines=5, columns=20)\n",
"step-4": "from src import npyscreen\nfrom src.MainForm import MainForm\nfrom src.ContactsForm import ContactsForm\nfrom src.SendFileForm import SendFileForm\nfrom src.MessageInfoForm import MessageInfoForm\nfrom src.ForwardMessageForm import ForwardMessageForm\nfrom src.RemoveMessageForm import RemoveMessageForm\n\n\nclass App(npyscreen.StandardApp):\n\n def onStart(self):\n self.MainForm = self.addForm('MAIN', MainForm)\n self.ContactsForm = self.addForm('CONTACTS', ContactsForm)\n self.SendFileForm = self.addForm('SEND_FILE', SendFileForm, lines=15)\n self.MessageInfoForm = self.addForm('MESSAGE_INFO', MessageInfoForm)\n self.ForwardMessageForm = self.addForm('FORWARD_MESSAGE',\n ForwardMessageForm)\n self.RemoveMessageForm = self.addForm('REMOVE_MESSAGE',\n RemoveMessageForm, lines=5, columns=20)\n",
"step-5": "from src import npyscreen\nfrom src.MainForm import MainForm\nfrom src.ContactsForm import ContactsForm\nfrom src.SendFileForm import SendFileForm\nfrom src.MessageInfoForm import MessageInfoForm\nfrom src.ForwardMessageForm import ForwardMessageForm\nfrom src.RemoveMessageForm import RemoveMessageForm\n\n\nclass App(npyscreen.StandardApp):\n\n def onStart(self):\n self.MainForm = self.addForm(\"MAIN\", MainForm)\n self.ContactsForm = self.addForm(\"CONTACTS\", ContactsForm)\n self.SendFileForm = self.addForm(\"SEND_FILE\", SendFileForm, lines=15)\n self.MessageInfoForm = self.addForm(\"MESSAGE_INFO\", MessageInfoForm)\n self.ForwardMessageForm = self.addForm(\"FORWARD_MESSAGE\", ForwardMessageForm)\n self.RemoveMessageForm = self.addForm(\"REMOVE_MESSAGE\", RemoveMessageForm, lines=5, columns=20)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python3
# Lesson_5 Activity 2 Mailroom Part 2
import os
def page_break():
""" Print a separator to distinguish new 'pages'"""
print("_"*75+"\n")
def get_amount():
"""Get valid donation amount from user"""
while True:
try:
amount = input("How much did they donate: ")
if str(amount).lower() == 'exit':
return amount
else:
return float(amount)
except ValueError:
print("you have made an invalid choice, try again.")
def get_key(donor_chart):
""" Return key for sorted function """
return(sum(donor_chart[1]))
def menu_page():
""" Return valid menu option from user """
while True:
try:
print("Please choose one of the following options(1,2,3):"
"\n1. Send a Thank you. \n2. Create a report"
"\n3. Send Letters to Everyone \n4. Quit")
option = int(input('--->'))
except ValueError:
print("You have made an invalid choice, try again.")
page_break()
return option
def send_thanks():
""" Send Thanks """
page_break()
while True:
list_names = [item[0] for item in donor_chart.items()]
try:
print("To whom would you like to say thank you?\n"
"(type \"list\" for a full list of names or"
"\"exit\" to return to the menu)")
name = input("--->")
except ValueError:
print("you have made an invalid choice, try again.")
page_break()
continue
if name == 'list':
print(("{}\n"*len(list_names)).format(*list_names))
continue
elif name in list_names:
amount = get_amount()
new_donor = False
elif name.lower() == 'exit':
break
else:
addname = input("The name you selected is not in the list,"
" would you like to add it(y/n)? ")
if addname[0].lower() == 'y':
amount = get_amount()
new_donor = True
elif addname.lower() == 'exit':
break
else:
print("\nName was not added, try again\n")
continue
if amount == "exit":
break
add_donation(name, amount, new_donor)
print("\nDear {} \nThank you for your generous donation of ${:.2f}!!\n"
"Now all of the kittens will get "
"to eat this year".format(name, amount))
break
def create_report():
""" Create Report """
page_break()
list_names = [item[0] for item in donor_chart.items()]
new_list = []
for donor in donor_chart.items():
sum_don = sum(donor[1])
new_list.append(sum_don)
col_lab = ["Donor Name", "Total Given", "Num Gifts", "Average Gift"]
max_name = max([len(x) for x in list_names])
max_don = []
for don in donor_chart.items():
max_don.append(max(don[1]))
max_donl = len(str(max(max_don)))
max_gift = len(col_lab[2])
if max_donl < len(col_lab[1]):
max_donl = len(col_lab[1])
format_col = "\n{:<" + "{}".format(max_name+5) + "}|{:^"
format_col += "{}".format(max_donl+5)
format_col += "}|{:^" + "{}".format(max_gift+5)
format_col += "}|{:>" + "{}".format(max_donl+5) + "}"
print(format_col.format(*col_lab))
print("-"*len(format_col.format(*col_lab)))
sorted_list = sorted(donor_chart.items(), key=get_key, reverse=True)
for donor in sorted_list:
num_gifts = len(donor[1])
avg_gift = sum(donor[1])/num_gifts
format_item = "{:<" + "{}".format(max_name+5) + "}${:>"
format_item += "{}".format(max_donl+5) + ".2f}{:>"
format_item += "{}".format(max_gift+5) + "d} ${:>"
format_item += "{}".format(max_donl+5) + ".2f}"
print(format_item.format(donor[0], sum(donor[1]), num_gifts, avg_gift))
def send_letters():
""" Write letters to each donor in the donor chart and
save them in a user specified directory """
while True:
try:
dir_path = input("Please type the desired directory "
"to save the letters: ")
letter_form = ("Dear {},\n\n\tThank you for your very "
"kind donation of ${:.2f}!")
letter_form += ("\n\n\tNow all of the kittens will "
"get to eat this year!")
letter_form += ("\n\n\t\t\t\t Cheers! \n\t\t\t\t "
"-The Team")
if dir_path.lower() == "Exit":
break
if not os.path.exists(dir_path):
print("That is not a valid directory, using working directory")
dir_path = os.getcwd()
for name, donation in donor_chart.items():
file_name = ("{}.txt".format(name))
path_name = dir_path + "/" + file_name
with open(path_name, 'w') as file:
file.write(letter_form.format(name, sum(donation)))
break
except ValueError:
print("\nsomething went wrong please try again: ")
def add_donation(name, amount, donor_bool):
""" add a donation for a new or existing donor """
if donor_bool is False:
donor_chart.get(list_names.index(name), [1]).append(amount)
else:
donor_chart.update({name: [amount]})
return
def menu_quit():
""" return quit for menus """
return "Quit"
if __name__ == '__main__':
donor_chart = {"Justin Thyme": [1, 1, 1],
"Beau Andarrow": [207.121324, 400.321234, 12345.001234],
"Crystal Clearwater": [80082],
"Harry Shins": [1.00, 2.00, 3.00],
"Bob Zuruncle": [0.53, 7.00],
"Al Kaseltzer": [1010101, 666.00],
"Joe Somebody": [25]}
options = range(1, 5)
menus = (send_thanks, create_report, send_letters, menu_quit)
menu_dict = dict(zip(options, menus))
option = 0
while True:
page_break()
try:
option = menu_page()
if menu_dict[option]() == "Quit":
break
except KeyError:
print("You have made an invalid choice, try again.")
page_break()
|
normal
|
{
"blob_id": "8a192fc08a65c80b8733a9d07374156c09f36598",
"index": 2823,
"step-1": "<mask token>\n\n\ndef get_amount():\n \"\"\"Get valid donation amount from user\"\"\"\n while True:\n try:\n amount = input('How much did they donate: ')\n if str(amount).lower() == 'exit':\n return amount\n else:\n return float(amount)\n except ValueError:\n print('you have made an invalid choice, try again.')\n\n\ndef get_key(donor_chart):\n \"\"\" Return key for sorted function \"\"\"\n return sum(donor_chart[1])\n\n\ndef menu_page():\n \"\"\" Return valid menu option from user \"\"\"\n while True:\n try:\n print(\n \"\"\"Please choose one of the following options(1,2,3):\n1. Send a Thank you. \n2. Create a report\n3. Send Letters to Everyone \n4. Quit\"\"\"\n )\n option = int(input('--->'))\n except ValueError:\n print('You have made an invalid choice, try again.')\n page_break()\n return option\n\n\ndef send_thanks():\n \"\"\" Send Thanks \"\"\"\n page_break()\n while True:\n list_names = [item[0] for item in donor_chart.items()]\n try:\n print(\n \"\"\"To whom would you like to say thank you?\n(type \"list\" for a full list of names or\"exit\" to return to the menu)\"\"\"\n )\n name = input('--->')\n except ValueError:\n print('you have made an invalid choice, try again.')\n page_break()\n continue\n if name == 'list':\n print(('{}\\n' * len(list_names)).format(*list_names))\n continue\n elif name in list_names:\n amount = get_amount()\n new_donor = False\n elif name.lower() == 'exit':\n break\n else:\n addname = input(\n 'The name you selected is not in the list, would you like to add it(y/n)? '\n )\n if addname[0].lower() == 'y':\n amount = get_amount()\n new_donor = True\n elif addname.lower() == 'exit':\n break\n else:\n print('\\nName was not added, try again\\n')\n continue\n if amount == 'exit':\n break\n add_donation(name, amount, new_donor)\n print(\n \"\"\"\nDear {} \nThank you for your generous donation of ${:.2f}!!\nNow all of the kittens will get to eat this year\"\"\"\n .format(name, amount))\n break\n\n\n<mask token>\n\n\ndef send_letters():\n \"\"\" Write letters to each donor in the donor chart and\n save them in a user specified directory \"\"\"\n while True:\n try:\n dir_path = input(\n 'Please type the desired directory to save the letters: ')\n letter_form = (\n 'Dear {},\\n\\n\\tThank you for your very kind donation of ${:.2f}!'\n )\n letter_form += (\n '\\n\\n\\tNow all of the kittens will get to eat this year!')\n letter_form += '\\n\\n\\t\\t\\t\\t Cheers! \\n\\t\\t\\t\\t -The Team'\n if dir_path.lower() == 'Exit':\n break\n if not os.path.exists(dir_path):\n print('That is not a valid directory, using working directory')\n dir_path = os.getcwd()\n for name, donation in donor_chart.items():\n file_name = '{}.txt'.format(name)\n path_name = dir_path + '/' + file_name\n with open(path_name, 'w') as file:\n file.write(letter_form.format(name, sum(donation)))\n break\n except ValueError:\n print('\\nsomething went wrong please try again: ')\n\n\n<mask token>\n\n\ndef menu_quit():\n \"\"\" return quit for menus \"\"\"\n return 'Quit'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_amount():\n \"\"\"Get valid donation amount from user\"\"\"\n while True:\n try:\n amount = input('How much did they donate: ')\n if str(amount).lower() == 'exit':\n return amount\n else:\n return float(amount)\n except ValueError:\n print('you have made an invalid choice, try again.')\n\n\ndef get_key(donor_chart):\n \"\"\" Return key for sorted function \"\"\"\n return sum(donor_chart[1])\n\n\ndef menu_page():\n \"\"\" Return valid menu option from user \"\"\"\n while True:\n try:\n print(\n \"\"\"Please choose one of the following options(1,2,3):\n1. Send a Thank you. \n2. Create a report\n3. Send Letters to Everyone \n4. Quit\"\"\"\n )\n option = int(input('--->'))\n except ValueError:\n print('You have made an invalid choice, try again.')\n page_break()\n return option\n\n\ndef send_thanks():\n \"\"\" Send Thanks \"\"\"\n page_break()\n while True:\n list_names = [item[0] for item in donor_chart.items()]\n try:\n print(\n \"\"\"To whom would you like to say thank you?\n(type \"list\" for a full list of names or\"exit\" to return to the menu)\"\"\"\n )\n name = input('--->')\n except ValueError:\n print('you have made an invalid choice, try again.')\n page_break()\n continue\n if name == 'list':\n print(('{}\\n' * len(list_names)).format(*list_names))\n continue\n elif name in list_names:\n amount = get_amount()\n new_donor = False\n elif name.lower() == 'exit':\n break\n else:\n addname = input(\n 'The name you selected is not in the list, would you like to add it(y/n)? '\n )\n if addname[0].lower() == 'y':\n amount = get_amount()\n new_donor = True\n elif addname.lower() == 'exit':\n break\n else:\n print('\\nName was not added, try again\\n')\n continue\n if amount == 'exit':\n break\n add_donation(name, amount, new_donor)\n print(\n \"\"\"\nDear {} \nThank you for your generous donation of ${:.2f}!!\nNow all of the kittens will get to eat this year\"\"\"\n .format(name, amount))\n break\n\n\n<mask token>\n\n\ndef send_letters():\n \"\"\" Write letters to each donor in the donor chart and\n save them in a user specified directory \"\"\"\n while True:\n try:\n dir_path = input(\n 'Please type the desired directory to save the letters: ')\n letter_form = (\n 'Dear {},\\n\\n\\tThank you for your very kind donation of ${:.2f}!'\n )\n letter_form += (\n '\\n\\n\\tNow all of the kittens will get to eat this year!')\n letter_form += '\\n\\n\\t\\t\\t\\t Cheers! \\n\\t\\t\\t\\t -The Team'\n if dir_path.lower() == 'Exit':\n break\n if not os.path.exists(dir_path):\n print('That is not a valid directory, using working directory')\n dir_path = os.getcwd()\n for name, donation in donor_chart.items():\n file_name = '{}.txt'.format(name)\n path_name = dir_path + '/' + file_name\n with open(path_name, 'w') as file:\n file.write(letter_form.format(name, sum(donation)))\n break\n except ValueError:\n print('\\nsomething went wrong please try again: ')\n\n\ndef add_donation(name, amount, donor_bool):\n \"\"\" add a donation for a new or existing donor \"\"\"\n if donor_bool is False:\n donor_chart.get(list_names.index(name), [1]).append(amount)\n else:\n donor_chart.update({name: [amount]})\n return\n\n\ndef menu_quit():\n \"\"\" return quit for menus \"\"\"\n return 'Quit'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef page_break():\n \"\"\" Print a separator to distinguish new 'pages'\"\"\"\n print('_' * 75 + '\\n')\n\n\ndef get_amount():\n \"\"\"Get valid donation amount from user\"\"\"\n while True:\n try:\n amount = input('How much did they donate: ')\n if str(amount).lower() == 'exit':\n return amount\n else:\n return float(amount)\n except ValueError:\n print('you have made an invalid choice, try again.')\n\n\ndef get_key(donor_chart):\n \"\"\" Return key for sorted function \"\"\"\n return sum(donor_chart[1])\n\n\ndef menu_page():\n \"\"\" Return valid menu option from user \"\"\"\n while True:\n try:\n print(\n \"\"\"Please choose one of the following options(1,2,3):\n1. Send a Thank you. \n2. Create a report\n3. Send Letters to Everyone \n4. Quit\"\"\"\n )\n option = int(input('--->'))\n except ValueError:\n print('You have made an invalid choice, try again.')\n page_break()\n return option\n\n\ndef send_thanks():\n \"\"\" Send Thanks \"\"\"\n page_break()\n while True:\n list_names = [item[0] for item in donor_chart.items()]\n try:\n print(\n \"\"\"To whom would you like to say thank you?\n(type \"list\" for a full list of names or\"exit\" to return to the menu)\"\"\"\n )\n name = input('--->')\n except ValueError:\n print('you have made an invalid choice, try again.')\n page_break()\n continue\n if name == 'list':\n print(('{}\\n' * len(list_names)).format(*list_names))\n continue\n elif name in list_names:\n amount = get_amount()\n new_donor = False\n elif name.lower() == 'exit':\n break\n else:\n addname = input(\n 'The name you selected is not in the list, would you like to add it(y/n)? '\n )\n if addname[0].lower() == 'y':\n amount = get_amount()\n new_donor = True\n elif addname.lower() == 'exit':\n break\n else:\n print('\\nName was not added, try again\\n')\n continue\n if amount == 'exit':\n break\n add_donation(name, amount, new_donor)\n print(\n \"\"\"\nDear {} \nThank you for your generous donation of ${:.2f}!!\nNow all of the kittens will get to eat this year\"\"\"\n .format(name, amount))\n break\n\n\n<mask token>\n\n\ndef send_letters():\n \"\"\" Write letters to each donor in the donor chart and\n save them in a user specified directory \"\"\"\n while True:\n try:\n dir_path = input(\n 'Please type the desired directory to save the letters: ')\n letter_form = (\n 'Dear {},\\n\\n\\tThank you for your very kind donation of ${:.2f}!'\n )\n letter_form += (\n '\\n\\n\\tNow all of the kittens will get to eat this year!')\n letter_form += '\\n\\n\\t\\t\\t\\t Cheers! \\n\\t\\t\\t\\t -The Team'\n if dir_path.lower() == 'Exit':\n break\n if not os.path.exists(dir_path):\n print('That is not a valid directory, using working directory')\n dir_path = os.getcwd()\n for name, donation in donor_chart.items():\n file_name = '{}.txt'.format(name)\n path_name = dir_path + '/' + file_name\n with open(path_name, 'w') as file:\n file.write(letter_form.format(name, sum(donation)))\n break\n except ValueError:\n print('\\nsomething went wrong please try again: ')\n\n\ndef add_donation(name, amount, donor_bool):\n \"\"\" add a donation for a new or existing donor \"\"\"\n if donor_bool is False:\n donor_chart.get(list_names.index(name), [1]).append(amount)\n else:\n donor_chart.update({name: [amount]})\n return\n\n\ndef menu_quit():\n \"\"\" return quit for menus \"\"\"\n return 'Quit'\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef page_break():\n \"\"\" Print a separator to distinguish new 'pages'\"\"\"\n print('_' * 75 + '\\n')\n\n\ndef get_amount():\n \"\"\"Get valid donation amount from user\"\"\"\n while True:\n try:\n amount = input('How much did they donate: ')\n if str(amount).lower() == 'exit':\n return amount\n else:\n return float(amount)\n except ValueError:\n print('you have made an invalid choice, try again.')\n\n\ndef get_key(donor_chart):\n \"\"\" Return key for sorted function \"\"\"\n return sum(donor_chart[1])\n\n\ndef menu_page():\n \"\"\" Return valid menu option from user \"\"\"\n while True:\n try:\n print(\n \"\"\"Please choose one of the following options(1,2,3):\n1. Send a Thank you. \n2. Create a report\n3. Send Letters to Everyone \n4. Quit\"\"\"\n )\n option = int(input('--->'))\n except ValueError:\n print('You have made an invalid choice, try again.')\n page_break()\n return option\n\n\ndef send_thanks():\n \"\"\" Send Thanks \"\"\"\n page_break()\n while True:\n list_names = [item[0] for item in donor_chart.items()]\n try:\n print(\n \"\"\"To whom would you like to say thank you?\n(type \"list\" for a full list of names or\"exit\" to return to the menu)\"\"\"\n )\n name = input('--->')\n except ValueError:\n print('you have made an invalid choice, try again.')\n page_break()\n continue\n if name == 'list':\n print(('{}\\n' * len(list_names)).format(*list_names))\n continue\n elif name in list_names:\n amount = get_amount()\n new_donor = False\n elif name.lower() == 'exit':\n break\n else:\n addname = input(\n 'The name you selected is not in the list, would you like to add it(y/n)? '\n )\n if addname[0].lower() == 'y':\n amount = get_amount()\n new_donor = True\n elif addname.lower() == 'exit':\n break\n else:\n print('\\nName was not added, try again\\n')\n continue\n if amount == 'exit':\n break\n add_donation(name, amount, new_donor)\n print(\n \"\"\"\nDear {} \nThank you for your generous donation of ${:.2f}!!\nNow all of the kittens will get to eat this year\"\"\"\n .format(name, amount))\n break\n\n\ndef create_report():\n \"\"\" Create Report \"\"\"\n page_break()\n list_names = [item[0] for item in donor_chart.items()]\n new_list = []\n for donor in donor_chart.items():\n sum_don = sum(donor[1])\n new_list.append(sum_don)\n col_lab = ['Donor Name', 'Total Given', 'Num Gifts', 'Average Gift']\n max_name = max([len(x) for x in list_names])\n max_don = []\n for don in donor_chart.items():\n max_don.append(max(don[1]))\n max_donl = len(str(max(max_don)))\n max_gift = len(col_lab[2])\n if max_donl < len(col_lab[1]):\n max_donl = len(col_lab[1])\n format_col = '\\n{:<' + '{}'.format(max_name + 5) + '}|{:^'\n format_col += '{}'.format(max_donl + 5)\n format_col += '}|{:^' + '{}'.format(max_gift + 5)\n format_col += '}|{:>' + '{}'.format(max_donl + 5) + '}'\n print(format_col.format(*col_lab))\n print('-' * len(format_col.format(*col_lab)))\n sorted_list = sorted(donor_chart.items(), key=get_key, reverse=True)\n for donor in sorted_list:\n num_gifts = len(donor[1])\n avg_gift = sum(donor[1]) / num_gifts\n format_item = '{:<' + '{}'.format(max_name + 5) + '}${:>'\n format_item += '{}'.format(max_donl + 5) + '.2f}{:>'\n format_item += '{}'.format(max_gift + 5) + 'd} ${:>'\n format_item += '{}'.format(max_donl + 5) + '.2f}'\n print(format_item.format(donor[0], sum(donor[1]), num_gifts, avg_gift))\n\n\ndef send_letters():\n \"\"\" Write letters to each donor in the donor chart and\n save them in a user specified directory \"\"\"\n while True:\n try:\n dir_path = input(\n 'Please type the desired directory to save the letters: ')\n letter_form = (\n 'Dear {},\\n\\n\\tThank you for your very kind donation of ${:.2f}!'\n )\n letter_form += (\n '\\n\\n\\tNow all of the kittens will get to eat this year!')\n letter_form += '\\n\\n\\t\\t\\t\\t Cheers! \\n\\t\\t\\t\\t -The Team'\n if dir_path.lower() == 'Exit':\n break\n if not os.path.exists(dir_path):\n print('That is not a valid directory, using working directory')\n dir_path = os.getcwd()\n for name, donation in donor_chart.items():\n file_name = '{}.txt'.format(name)\n path_name = dir_path + '/' + file_name\n with open(path_name, 'w') as file:\n file.write(letter_form.format(name, sum(donation)))\n break\n except ValueError:\n print('\\nsomething went wrong please try again: ')\n\n\ndef add_donation(name, amount, donor_bool):\n \"\"\" add a donation for a new or existing donor \"\"\"\n if donor_bool is False:\n donor_chart.get(list_names.index(name), [1]).append(amount)\n else:\n donor_chart.update({name: [amount]})\n return\n\n\ndef menu_quit():\n \"\"\" return quit for menus \"\"\"\n return 'Quit'\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/env python3\n\n# Lesson_5 Activity 2 Mailroom Part 2\n\nimport os\n\n\ndef page_break():\n \"\"\" Print a separator to distinguish new 'pages'\"\"\"\n print(\"_\"*75+\"\\n\")\n\n\ndef get_amount():\n \"\"\"Get valid donation amount from user\"\"\"\n while True:\n try:\n amount = input(\"How much did they donate: \")\n if str(amount).lower() == 'exit':\n return amount\n else:\n return float(amount)\n except ValueError:\n print(\"you have made an invalid choice, try again.\")\n\n\ndef get_key(donor_chart):\n \"\"\" Return key for sorted function \"\"\"\n return(sum(donor_chart[1]))\n\n\ndef menu_page():\n \"\"\" Return valid menu option from user \"\"\"\n while True:\n try:\n print(\"Please choose one of the following options(1,2,3):\"\n \"\\n1. Send a Thank you. \\n2. Create a report\"\n \"\\n3. Send Letters to Everyone \\n4. Quit\")\n option = int(input('--->'))\n except ValueError:\n print(\"You have made an invalid choice, try again.\")\n page_break()\n return option\n\n\ndef send_thanks():\n \"\"\" Send Thanks \"\"\"\n page_break()\n while True:\n list_names = [item[0] for item in donor_chart.items()]\n try:\n print(\"To whom would you like to say thank you?\\n\"\n \"(type \\\"list\\\" for a full list of names or\"\n \"\\\"exit\\\" to return to the menu)\")\n name = input(\"--->\")\n except ValueError:\n print(\"you have made an invalid choice, try again.\")\n page_break()\n continue\n if name == 'list':\n print((\"{}\\n\"*len(list_names)).format(*list_names))\n continue\n elif name in list_names:\n amount = get_amount()\n new_donor = False\n elif name.lower() == 'exit':\n break\n else:\n addname = input(\"The name you selected is not in the list,\"\n \" would you like to add it(y/n)? \")\n if addname[0].lower() == 'y':\n amount = get_amount()\n new_donor = True\n elif addname.lower() == 'exit':\n break\n else:\n print(\"\\nName was not added, try again\\n\")\n continue\n if amount == \"exit\":\n break\n add_donation(name, amount, new_donor)\n print(\"\\nDear {} \\nThank you for your generous donation of ${:.2f}!!\\n\"\n \"Now all of the kittens will get \"\n \"to eat this year\".format(name, amount))\n break\n\n\ndef create_report():\n \"\"\" Create Report \"\"\"\n page_break()\n list_names = [item[0] for item in donor_chart.items()]\n new_list = []\n for donor in donor_chart.items():\n sum_don = sum(donor[1])\n new_list.append(sum_don)\n col_lab = [\"Donor Name\", \"Total Given\", \"Num Gifts\", \"Average Gift\"]\n max_name = max([len(x) for x in list_names])\n max_don = []\n for don in donor_chart.items():\n max_don.append(max(don[1]))\n max_donl = len(str(max(max_don)))\n max_gift = len(col_lab[2])\n if max_donl < len(col_lab[1]):\n max_donl = len(col_lab[1])\n format_col = \"\\n{:<\" + \"{}\".format(max_name+5) + \"}|{:^\"\n format_col += \"{}\".format(max_donl+5)\n format_col += \"}|{:^\" + \"{}\".format(max_gift+5)\n format_col += \"}|{:>\" + \"{}\".format(max_donl+5) + \"}\"\n print(format_col.format(*col_lab))\n print(\"-\"*len(format_col.format(*col_lab)))\n sorted_list = sorted(donor_chart.items(), key=get_key, reverse=True)\n for donor in sorted_list:\n num_gifts = len(donor[1])\n avg_gift = sum(donor[1])/num_gifts\n format_item = \"{:<\" + \"{}\".format(max_name+5) + \"}${:>\"\n format_item += \"{}\".format(max_donl+5) + \".2f}{:>\"\n format_item += \"{}\".format(max_gift+5) + \"d} ${:>\"\n format_item += \"{}\".format(max_donl+5) + \".2f}\"\n print(format_item.format(donor[0], sum(donor[1]), num_gifts, avg_gift))\n\n\ndef send_letters():\n \"\"\" Write letters to each donor in the donor chart and\n save them in a user specified directory \"\"\"\n while True:\n try:\n dir_path = input(\"Please type the desired directory \"\n \"to save the letters: \")\n letter_form = (\"Dear {},\\n\\n\\tThank you for your very \"\n \"kind donation of ${:.2f}!\")\n letter_form += (\"\\n\\n\\tNow all of the kittens will \"\n \"get to eat this year!\")\n letter_form += (\"\\n\\n\\t\\t\\t\\t Cheers! \\n\\t\\t\\t\\t \"\n \"-The Team\")\n if dir_path.lower() == \"Exit\":\n break\n if not os.path.exists(dir_path):\n print(\"That is not a valid directory, using working directory\")\n dir_path = os.getcwd()\n for name, donation in donor_chart.items():\n file_name = (\"{}.txt\".format(name))\n path_name = dir_path + \"/\" + file_name\n with open(path_name, 'w') as file:\n file.write(letter_form.format(name, sum(donation)))\n break\n except ValueError:\n print(\"\\nsomething went wrong please try again: \")\n\n\ndef add_donation(name, amount, donor_bool):\n \"\"\" add a donation for a new or existing donor \"\"\"\n if donor_bool is False:\n donor_chart.get(list_names.index(name), [1]).append(amount)\n else:\n donor_chart.update({name: [amount]})\n return\n\n\ndef menu_quit():\n \"\"\" return quit for menus \"\"\"\n return \"Quit\"\n\nif __name__ == '__main__':\n donor_chart = {\"Justin Thyme\": [1, 1, 1],\n \"Beau Andarrow\": [207.121324, 400.321234, 12345.001234],\n \"Crystal Clearwater\": [80082],\n \"Harry Shins\": [1.00, 2.00, 3.00],\n \"Bob Zuruncle\": [0.53, 7.00],\n \"Al Kaseltzer\": [1010101, 666.00],\n \"Joe Somebody\": [25]}\n\n options = range(1, 5)\n menus = (send_thanks, create_report, send_letters, menu_quit)\n menu_dict = dict(zip(options, menus))\n\n option = 0\n while True:\n page_break()\n try:\n option = menu_page()\n if menu_dict[option]() == \"Quit\":\n break\n except KeyError:\n print(\"You have made an invalid choice, try again.\")\n page_break()\n",
"step-ids": [
6,
7,
8,
9,
12
]
}
|
[
6,
7,
8,
9,
12
] |
from tqdm import tqdm
import fasttext
import codecs
import os
import hashlib
import time
def make_save_folder(prefix="", add_suffix=True) -> str:
"""
1. 現在時刻のハッシュをsuffixにした文字列の生成
2. 生成した文字列のフォルダが無かったら作る
:param prefix:save folderの系統ラベル
:param add_suffix: suffixを付与するかを選ぶフラグ, True: 付与, False: 付与しない
:return: str, モデルのセーブ先フォルダ名
"""
if prefix == "":
prefix = "./fast_text"
if add_suffix:
prefix = f"{prefix}_{hashlib.sha1(time.ctime().encode()).hexdigest()}"
if not prefix.endswith("/"):
prefix += "/"
if not os.path.exists(prefix):
os.mkdir(prefix)
return prefix
def make_fast_text(tokens_list: list, num_dimension: int, save_folder="", min_count=1) -> bool:
"""
現在の言語処理では、単語分散表現を使うのがデファクトになった
単語分散表現は、単語を意味に応じて高次元空間に配置する手法である
原点からのベクトルに意味をもたせているので、同じ向きを向いている単語同士は意味が近い
理論的には分布仮説に基づいて、任意の単語に意味を、出現位置近傍の単語で規定する
2013年のword2vecという手法が初出で、後年に同じ研究者がfasttextを考案した
以下の点が異なる [here](http://54.92.5.12/2019/05/09/fasttext%E3%81%A8word2vec%E3%81%AE%E9%81%95%E3%81%84/)
- subwordと呼ばれる単語の部分文字列でも意味を獲得する
- fasttextのほうが圧倒的に早い
ここでは、文を分割したtokenのlistのlistを受け取って、fasttextのモデルをセーブする
tokenのlistは語順が重要なので、文脈の順を崩さないこと
:param tokens_list: list of list of str, tokenのリストのリスト
:param num_dimension: int, word embedding space dimension
:param save_folder: str, path to save folder
:param min_count: int, fasttextがモデルに反映する最小の単語出現頻度, 1なら全文書中に1度だけ出現, 3以上が相場
:return: bool
"""
# arrange save folder
save_folder = make_save_folder(save_folder, True if save_folder == "" else False)
# 入力はスペース区切りしたテキストファイル
file_name_input_text = f"{save_folder}delete_me_wakati.txt"
wakati = "\n".join([" ".join(tokens) for tokens in tqdm(tokens_list, desc="分かち書き @ fast text")])
with codecs.open(file_name_input_text, "w", "utf-8") as f:
f.write(wakati)
# model 生成
model_fast_text = fasttext.train_unsupervised(file_name_input_text, model="skipgram", dim=num_dimension,
minCount=min_count)
# save
model_fast_text.save_model(f"{save_folder}model_fast_text.bin")
return True
|
normal
|
{
"blob_id": "14e304f30364932910986f2dda48223b6d4b01c0",
"index": 8372,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef make_save_folder(prefix='', add_suffix=True) ->str:\n \"\"\"\n 1. 現在時刻のハッシュをsuffixにした文字列の生成\n 2. 生成した文字列のフォルダが無かったら作る\n :param prefix:save folderの系統ラベル\n :param add_suffix: suffixを付与するかを選ぶフラグ, True: 付与, False: 付与しない\n :return: str, モデルのセーブ先フォルダ名\n \"\"\"\n if prefix == '':\n prefix = './fast_text'\n if add_suffix:\n prefix = f'{prefix}_{hashlib.sha1(time.ctime().encode()).hexdigest()}'\n if not prefix.endswith('/'):\n prefix += '/'\n if not os.path.exists(prefix):\n os.mkdir(prefix)\n return prefix\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef make_save_folder(prefix='', add_suffix=True) ->str:\n \"\"\"\n 1. 現在時刻のハッシュをsuffixにした文字列の生成\n 2. 生成した文字列のフォルダが無かったら作る\n :param prefix:save folderの系統ラベル\n :param add_suffix: suffixを付与するかを選ぶフラグ, True: 付与, False: 付与しない\n :return: str, モデルのセーブ先フォルダ名\n \"\"\"\n if prefix == '':\n prefix = './fast_text'\n if add_suffix:\n prefix = f'{prefix}_{hashlib.sha1(time.ctime().encode()).hexdigest()}'\n if not prefix.endswith('/'):\n prefix += '/'\n if not os.path.exists(prefix):\n os.mkdir(prefix)\n return prefix\n\n\ndef make_fast_text(tokens_list: list, num_dimension: int, save_folder='',\n min_count=1) ->bool:\n \"\"\"\n 現在の言語処理では、単語分散表現を使うのがデファクトになった\n 単語分散表現は、単語を意味に応じて高次元空間に配置する手法である\n 原点からのベクトルに意味をもたせているので、同じ向きを向いている単語同士は意味が近い\n 理論的には分布仮説に基づいて、任意の単語に意味を、出現位置近傍の単語で規定する\n 2013年のword2vecという手法が初出で、後年に同じ研究者がfasttextを考案した\n 以下の点が異なる [here](http://54.92.5.12/2019/05/09/fasttext%E3%81%A8word2vec%E3%81%AE%E9%81%95%E3%81%84/)\n - subwordと呼ばれる単語の部分文字列でも意味を獲得する\n - fasttextのほうが圧倒的に早い\n\n ここでは、文を分割したtokenのlistのlistを受け取って、fasttextのモデルをセーブする\n tokenのlistは語順が重要なので、文脈の順を崩さないこと\n :param tokens_list: list of list of str, tokenのリストのリスト\n :param num_dimension: int, word embedding space dimension\n :param save_folder: str, path to save folder\n :param min_count: int, fasttextがモデルに反映する最小の単語出現頻度, 1なら全文書中に1度だけ出現, 3以上が相場\n :return: bool\n \"\"\"\n save_folder = make_save_folder(save_folder, True if save_folder == '' else\n False)\n file_name_input_text = f'{save_folder}delete_me_wakati.txt'\n wakati = '\\n'.join([' '.join(tokens) for tokens in tqdm(tokens_list,\n desc='分かち書き @ fast text')])\n with codecs.open(file_name_input_text, 'w', 'utf-8') as f:\n f.write(wakati)\n model_fast_text = fasttext.train_unsupervised(file_name_input_text,\n model='skipgram', dim=num_dimension, minCount=min_count)\n model_fast_text.save_model(f'{save_folder}model_fast_text.bin')\n return True\n",
"step-4": "from tqdm import tqdm\nimport fasttext\nimport codecs\nimport os\nimport hashlib\nimport time\n\n\ndef make_save_folder(prefix='', add_suffix=True) ->str:\n \"\"\"\n 1. 現在時刻のハッシュをsuffixにした文字列の生成\n 2. 生成した文字列のフォルダが無かったら作る\n :param prefix:save folderの系統ラベル\n :param add_suffix: suffixを付与するかを選ぶフラグ, True: 付与, False: 付与しない\n :return: str, モデルのセーブ先フォルダ名\n \"\"\"\n if prefix == '':\n prefix = './fast_text'\n if add_suffix:\n prefix = f'{prefix}_{hashlib.sha1(time.ctime().encode()).hexdigest()}'\n if not prefix.endswith('/'):\n prefix += '/'\n if not os.path.exists(prefix):\n os.mkdir(prefix)\n return prefix\n\n\ndef make_fast_text(tokens_list: list, num_dimension: int, save_folder='',\n min_count=1) ->bool:\n \"\"\"\n 現在の言語処理では、単語分散表現を使うのがデファクトになった\n 単語分散表現は、単語を意味に応じて高次元空間に配置する手法である\n 原点からのベクトルに意味をもたせているので、同じ向きを向いている単語同士は意味が近い\n 理論的には分布仮説に基づいて、任意の単語に意味を、出現位置近傍の単語で規定する\n 2013年のword2vecという手法が初出で、後年に同じ研究者がfasttextを考案した\n 以下の点が異なる [here](http://54.92.5.12/2019/05/09/fasttext%E3%81%A8word2vec%E3%81%AE%E9%81%95%E3%81%84/)\n - subwordと呼ばれる単語の部分文字列でも意味を獲得する\n - fasttextのほうが圧倒的に早い\n\n ここでは、文を分割したtokenのlistのlistを受け取って、fasttextのモデルをセーブする\n tokenのlistは語順が重要なので、文脈の順を崩さないこと\n :param tokens_list: list of list of str, tokenのリストのリスト\n :param num_dimension: int, word embedding space dimension\n :param save_folder: str, path to save folder\n :param min_count: int, fasttextがモデルに反映する最小の単語出現頻度, 1なら全文書中に1度だけ出現, 3以上が相場\n :return: bool\n \"\"\"\n save_folder = make_save_folder(save_folder, True if save_folder == '' else\n False)\n file_name_input_text = f'{save_folder}delete_me_wakati.txt'\n wakati = '\\n'.join([' '.join(tokens) for tokens in tqdm(tokens_list,\n desc='分かち書き @ fast text')])\n with codecs.open(file_name_input_text, 'w', 'utf-8') as f:\n f.write(wakati)\n model_fast_text = fasttext.train_unsupervised(file_name_input_text,\n model='skipgram', dim=num_dimension, minCount=min_count)\n model_fast_text.save_model(f'{save_folder}model_fast_text.bin')\n return True\n",
"step-5": "from tqdm import tqdm\nimport fasttext\nimport codecs\nimport os\nimport hashlib\nimport time\n\n\ndef make_save_folder(prefix=\"\", add_suffix=True) -> str:\n \"\"\"\n 1. 現在時刻のハッシュをsuffixにした文字列の生成\n 2. 生成した文字列のフォルダが無かったら作る\n :param prefix:save folderの系統ラベル\n :param add_suffix: suffixを付与するかを選ぶフラグ, True: 付与, False: 付与しない\n :return: str, モデルのセーブ先フォルダ名\n \"\"\"\n if prefix == \"\":\n prefix = \"./fast_text\"\n if add_suffix:\n prefix = f\"{prefix}_{hashlib.sha1(time.ctime().encode()).hexdigest()}\"\n if not prefix.endswith(\"/\"):\n prefix += \"/\"\n if not os.path.exists(prefix):\n os.mkdir(prefix)\n return prefix\n\n\ndef make_fast_text(tokens_list: list, num_dimension: int, save_folder=\"\", min_count=1) -> bool:\n \"\"\"\n 現在の言語処理では、単語分散表現を使うのがデファクトになった\n 単語分散表現は、単語を意味に応じて高次元空間に配置する手法である\n 原点からのベクトルに意味をもたせているので、同じ向きを向いている単語同士は意味が近い\n 理論的には分布仮説に基づいて、任意の単語に意味を、出現位置近傍の単語で規定する\n 2013年のword2vecという手法が初出で、後年に同じ研究者がfasttextを考案した\n 以下の点が異なる [here](http://54.92.5.12/2019/05/09/fasttext%E3%81%A8word2vec%E3%81%AE%E9%81%95%E3%81%84/)\n - subwordと呼ばれる単語の部分文字列でも意味を獲得する\n - fasttextのほうが圧倒的に早い\n\n ここでは、文を分割したtokenのlistのlistを受け取って、fasttextのモデルをセーブする\n tokenのlistは語順が重要なので、文脈の順を崩さないこと\n :param tokens_list: list of list of str, tokenのリストのリスト\n :param num_dimension: int, word embedding space dimension\n :param save_folder: str, path to save folder\n :param min_count: int, fasttextがモデルに反映する最小の単語出現頻度, 1なら全文書中に1度だけ出現, 3以上が相場\n :return: bool\n \"\"\"\n # arrange save folder\n save_folder = make_save_folder(save_folder, True if save_folder == \"\" else False)\n\n # 入力はスペース区切りしたテキストファイル\n file_name_input_text = f\"{save_folder}delete_me_wakati.txt\"\n wakati = \"\\n\".join([\" \".join(tokens) for tokens in tqdm(tokens_list, desc=\"分かち書き @ fast text\")])\n with codecs.open(file_name_input_text, \"w\", \"utf-8\") as f:\n f.write(wakati)\n\n # model 生成\n model_fast_text = fasttext.train_unsupervised(file_name_input_text, model=\"skipgram\", dim=num_dimension,\n minCount=min_count)\n # save\n model_fast_text.save_model(f\"{save_folder}model_fast_text.bin\")\n\n return True\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import openpyxl
class TestXLUtility:
def __init__(self, driver):
self.driver = driver
def getRowCount(file, sheetname):
workbook = openpyxl.load_workbook(file)
#sheet = workbook.get_sheet_by_name(sheetname)
sheet = workbook[sheetname]
return(sheet.max_row)
def getColumnCount(file, sheetname):
workbook = openpyxl.load_workbook(file)
#sheet = workbook.get_sheet_by_name(sheetname)
sheet = workbook[sheetname]
return (sheet.max_column)
def readData(file,sheetname,rownum,columno):
workbook = openpyxl.load_workbook(file)
#sheet = workbook.get_sheet_by_name(sheetname)
sheet = workbook[sheetname]
return(sheet.cell(row=rownum, column=columno).value)
def writeData(file,sheetname,rownum,columno,data):
workbook = openpyxl.load_workbook(file)
#sheet = workbook.get_sheet_by_name(sheetname)
sheet = workbook[sheetname]
sheet.cell(row=rownum, column=columno).value = data
workbook.save(file)
|
normal
|
{
"blob_id": "adae4f9ebcbbb775fc40278ceec9a0cc30c0a503",
"index": 1541,
"step-1": "<mask token>\n\n\nclass TestXLUtility:\n <mask token>\n\n def getRowCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.max_row\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestXLUtility:\n <mask token>\n\n def getRowCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.max_row\n\n def getColumnCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.max_column\n <mask token>\n\n def writeData(file, sheetname, rownum, columno, data):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n sheet.cell(row=rownum, column=columno).value = data\n workbook.save(file)\n",
"step-3": "<mask token>\n\n\nclass TestXLUtility:\n\n def __init__(self, driver):\n self.driver = driver\n\n def getRowCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.max_row\n\n def getColumnCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.max_column\n\n def readData(file, sheetname, rownum, columno):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.cell(row=rownum, column=columno).value\n\n def writeData(file, sheetname, rownum, columno, data):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n sheet.cell(row=rownum, column=columno).value = data\n workbook.save(file)\n",
"step-4": "import openpyxl\n\n\nclass TestXLUtility:\n\n def __init__(self, driver):\n self.driver = driver\n\n def getRowCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.max_row\n\n def getColumnCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.max_column\n\n def readData(file, sheetname, rownum, columno):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n return sheet.cell(row=rownum, column=columno).value\n\n def writeData(file, sheetname, rownum, columno, data):\n workbook = openpyxl.load_workbook(file)\n sheet = workbook[sheetname]\n sheet.cell(row=rownum, column=columno).value = data\n workbook.save(file)\n",
"step-5": "import openpyxl\n\nclass TestXLUtility:\n\n def __init__(self, driver):\n self.driver = driver\n\n def getRowCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n #sheet = workbook.get_sheet_by_name(sheetname)\n sheet = workbook[sheetname]\n return(sheet.max_row)\n\n def getColumnCount(file, sheetname):\n workbook = openpyxl.load_workbook(file)\n #sheet = workbook.get_sheet_by_name(sheetname)\n sheet = workbook[sheetname]\n return (sheet.max_column)\n\n def readData(file,sheetname,rownum,columno):\n workbook = openpyxl.load_workbook(file)\n #sheet = workbook.get_sheet_by_name(sheetname)\n sheet = workbook[sheetname]\n return(sheet.cell(row=rownum, column=columno).value)\n\n def writeData(file,sheetname,rownum,columno,data):\n workbook = openpyxl.load_workbook(file)\n #sheet = workbook.get_sheet_by_name(sheetname)\n sheet = workbook[sheetname]\n sheet.cell(row=rownum, column=columno).value = data\n workbook.save(file)\n",
"step-ids": [
2,
4,
6,
7,
8
]
}
|
[
2,
4,
6,
7,
8
] |
import pickle
import numpy as np
import torch
import time
import torchvision
import matplotlib
import matplotlib.pyplot as plt
def load_cifar_data(data_files):
data = []
labels = []
for file in data_files:
with open(file, 'rb') as fo:
data_dict = pickle.load(fo, encoding='bytes')
if len(data) == 0:
data = data_dict[str.encode('data')]
labels = data_dict[str.encode('labels')]
else:
data = np.vstack((data, data_dict[str.encode('data')]))
labels.extend(data_dict[str.encode('labels')])
return data, labels
def unpickle(file):
with open(file, 'rb') as fo:
res = pickle.load(fo, encoding='bytes')
return res
def get_classwise_indices(labels):
label_indices = {}
for idx, label in enumerate(labels):
if label not in label_indices.keys():
label_indices[label] = [idx]
else:
label_indices[label].append(idx)
return label_indices
def get_data_from_indices(data, indices_dict, count_per_class, image_shape):
generated_data = []
generated_labels = []
for key, val in indices_dict.items():
if count_per_class:
for i in range(count_per_class):
generated_data.append(np.reshape(data[val[i]], image_shape))
generated_labels.append(key)
else:
for i in val:
generated_data.append(np.reshape(data[i], image_shape))
generated_labels.append(key)
return np.asarray(generated_data), np.reshape(np.asarray(generated_labels, dtype=np.int32), (-1,1))
def create_data_loader(data_x, data_y, batch_size, shuffle):
tensor_x = torch.stack([torch.Tensor(i) for i in data_x]) # transform to torch tensors
tensor_y = torch.stack([torch.Tensor(i) for i in data_y])
dataset = torch.utils.data.TensorDataset(tensor_x,tensor_y) # create datset
dataloader = torch.utils.data.DataLoader(dataset=dataset, batch_size=batch_size, shuffle=shuffle) # create dataloader
return dataloader
def train_model(model, train_data_loader, test_data_loader, num_epochs=5, learning_rate=0.001, save_epochs=None, model_name="cnn"):
num_epochs = num_epochs
learning_rate = learning_rate
# Loss and optimizer
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
total_step = len(train_data_loader)
train_times = []
train_accuracies = []
train_losses = []
test_accuracies = []
for epoch in range(num_epochs):
start_time = time.time()
for i, (images, labels) in enumerate(train_data_loader):
# Forward pass
outputs = model(images)
target = torch.max(labels.long(), 1)[0]
loss = criterion(outputs, target)
# Backward and optimize
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (i+1) % 200 == 0:
print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'
.format(epoch+1, num_epochs, i+1, total_step, loss.item()))
end_time = time.time()
if save_epochs and epoch + 1 in save_epochs:
torch.save(model, "../data/models/" + model_name + "_" + str(epoch+1))
train_times.append(end_time - start_time)
train_losses.append(loss.item())
print("Calculating train accuracy...")
train_accuracies.append(get_accuracies(train_data_loader, model)[0])
print("Calculating test accuracy...")
test_accuracies.append(get_accuracies(test_data_loader, model)[0])
print("Average training time per epoch:", np.mean(train_times))
print("Total training time for all epochs:", np.sum(train_times))
return train_accuracies, test_accuracies, train_losses
def get_accuracies(data_loader, model):
start_time = time.time()
model.eval()
with torch.no_grad():
correct = 0
total = 0
for images, labels in data_loader:
labels = torch.max(labels.long(), 1)[0]
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
accuracy = 100 * correct / total
end_time = time.time()
time_taken = end_time - start_time
print('Accuracy of the model: {} %'.format(accuracy))
return accuracy, time_taken
def get_model_size(model, model_name):
model = pickle.dumps(net)
byte_size = sys.getsizeof(model)
print('Size of ' + model_name + ' model: ', byte_size/1000000)
def imshow(img, label_names, file_name="../data/sample_images"):
npimg = img.numpy()
npimg = npimg.astype(np.uint8)
npimg = np.transpose(npimg, (1, 2, 0))
plt.clf()
im = plt.imshow(npimg)
ylim = im.get_extent()[2]
plt.yticks(np.arange(0, ylim + 1, ylim/len(label_names)), label_names)
plt.savefig(file_name)
plt.show()
def show_classwise_images(data, labels, label_names, k):
image_dict = {}
for idx, l in enumerate(labels):
label = l[0]
if label in image_dict.keys() and len(image_dict[label]) < k:
image_dict[label].append(data[idx])
elif label not in image_dict.keys():
image_dict[label] = [data[idx]]
images_to_show = []
labels_to_show = []
for label, image in image_dict.items():
labels_to_show.append(label_names[label])
for i in image:
images_to_show.append(i)
images_tensor = torch.stack([torch.Tensor(i) for i in images_to_show])
imshow(torchvision.utils.make_grid(images_tensor, nrow=k), labels_to_show)
def outlier_analysis(model, outliers_tensor, outlier_label_names, cifar10_label_names):
model.eval()
predicted_labels = []
with torch.no_grad():
start_time = time.time()
outputs = model(outliers_tensor)
end_time = time.time()
print("Time taken for prediction:", str(end_time - start_time))
_, predicted = torch.max(outputs.data, 1)
for idx, label in enumerate(predicted):
print("Original:", outlier_label_names[idx], "Predicted:", cifar10_label_names[label])
predicted_labels.append(cifar10_label_names[label])
imshow(torchvision.utils.make_grid(outliers_tensor, nrow=1), predicted_labels)
def plot_values(x, y, xlabel, ylabel, title, legend, fig_name):
plt.clf()
for y_i in y:
plt.plot(x, y_i)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.legend(legend)
plt.savefig("../data/plots/" + fig_name)
plt.show()
|
normal
|
{
"blob_id": "66fe0a3b84773ee1d4f91d8fde60f1fc5b3d7e4c",
"index": 6454,
"step-1": "<mask token>\n\n\ndef load_cifar_data(data_files):\n data = []\n labels = []\n for file in data_files:\n with open(file, 'rb') as fo:\n data_dict = pickle.load(fo, encoding='bytes')\n if len(data) == 0:\n data = data_dict[str.encode('data')]\n labels = data_dict[str.encode('labels')]\n else:\n data = np.vstack((data, data_dict[str.encode('data')]))\n labels.extend(data_dict[str.encode('labels')])\n return data, labels\n\n\n<mask token>\n\n\ndef get_data_from_indices(data, indices_dict, count_per_class, image_shape):\n generated_data = []\n generated_labels = []\n for key, val in indices_dict.items():\n if count_per_class:\n for i in range(count_per_class):\n generated_data.append(np.reshape(data[val[i]], image_shape))\n generated_labels.append(key)\n else:\n for i in val:\n generated_data.append(np.reshape(data[i], image_shape))\n generated_labels.append(key)\n return np.asarray(generated_data), np.reshape(np.asarray(\n generated_labels, dtype=np.int32), (-1, 1))\n\n\ndef create_data_loader(data_x, data_y, batch_size, shuffle):\n tensor_x = torch.stack([torch.Tensor(i) for i in data_x])\n tensor_y = torch.stack([torch.Tensor(i) for i in data_y])\n dataset = torch.utils.data.TensorDataset(tensor_x, tensor_y)\n dataloader = torch.utils.data.DataLoader(dataset=dataset, batch_size=\n batch_size, shuffle=shuffle)\n return dataloader\n\n\ndef train_model(model, train_data_loader, test_data_loader, num_epochs=5,\n learning_rate=0.001, save_epochs=None, model_name='cnn'):\n num_epochs = num_epochs\n learning_rate = learning_rate\n criterion = torch.nn.CrossEntropyLoss()\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n total_step = len(train_data_loader)\n train_times = []\n train_accuracies = []\n train_losses = []\n test_accuracies = []\n for epoch in range(num_epochs):\n start_time = time.time()\n for i, (images, labels) in enumerate(train_data_loader):\n outputs = model(images)\n target = torch.max(labels.long(), 1)[0]\n loss = criterion(outputs, target)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if (i + 1) % 200 == 0:\n print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(\n epoch + 1, num_epochs, i + 1, total_step, loss.item()))\n end_time = time.time()\n if save_epochs and epoch + 1 in save_epochs:\n torch.save(model, '../data/models/' + model_name + '_' + str(\n epoch + 1))\n train_times.append(end_time - start_time)\n train_losses.append(loss.item())\n print('Calculating train accuracy...')\n train_accuracies.append(get_accuracies(train_data_loader, model)[0])\n print('Calculating test accuracy...')\n test_accuracies.append(get_accuracies(test_data_loader, model)[0])\n print('Average training time per epoch:', np.mean(train_times))\n print('Total training time for all epochs:', np.sum(train_times))\n return train_accuracies, test_accuracies, train_losses\n\n\ndef get_accuracies(data_loader, model):\n start_time = time.time()\n model.eval()\n with torch.no_grad():\n correct = 0\n total = 0\n for images, labels in data_loader:\n labels = torch.max(labels.long(), 1)[0]\n outputs = model(images)\n _, predicted = torch.max(outputs.data, 1)\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n accuracy = 100 * correct / total\n end_time = time.time()\n time_taken = end_time - start_time\n print('Accuracy of the model: {} %'.format(accuracy))\n return accuracy, time_taken\n\n\ndef get_model_size(model, model_name):\n model = pickle.dumps(net)\n byte_size = sys.getsizeof(model)\n print('Size of ' + model_name + ' model: ', byte_size / 1000000)\n\n\ndef imshow(img, label_names, file_name='../data/sample_images'):\n npimg = img.numpy()\n npimg = npimg.astype(np.uint8)\n npimg = np.transpose(npimg, (1, 2, 0))\n plt.clf()\n im = plt.imshow(npimg)\n ylim = im.get_extent()[2]\n plt.yticks(np.arange(0, ylim + 1, ylim / len(label_names)), label_names)\n plt.savefig(file_name)\n plt.show()\n\n\ndef show_classwise_images(data, labels, label_names, k):\n image_dict = {}\n for idx, l in enumerate(labels):\n label = l[0]\n if label in image_dict.keys() and len(image_dict[label]) < k:\n image_dict[label].append(data[idx])\n elif label not in image_dict.keys():\n image_dict[label] = [data[idx]]\n images_to_show = []\n labels_to_show = []\n for label, image in image_dict.items():\n labels_to_show.append(label_names[label])\n for i in image:\n images_to_show.append(i)\n images_tensor = torch.stack([torch.Tensor(i) for i in images_to_show])\n imshow(torchvision.utils.make_grid(images_tensor, nrow=k), labels_to_show)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_cifar_data(data_files):\n data = []\n labels = []\n for file in data_files:\n with open(file, 'rb') as fo:\n data_dict = pickle.load(fo, encoding='bytes')\n if len(data) == 0:\n data = data_dict[str.encode('data')]\n labels = data_dict[str.encode('labels')]\n else:\n data = np.vstack((data, data_dict[str.encode('data')]))\n labels.extend(data_dict[str.encode('labels')])\n return data, labels\n\n\ndef unpickle(file):\n with open(file, 'rb') as fo:\n res = pickle.load(fo, encoding='bytes')\n return res\n\n\n<mask token>\n\n\ndef get_data_from_indices(data, indices_dict, count_per_class, image_shape):\n generated_data = []\n generated_labels = []\n for key, val in indices_dict.items():\n if count_per_class:\n for i in range(count_per_class):\n generated_data.append(np.reshape(data[val[i]], image_shape))\n generated_labels.append(key)\n else:\n for i in val:\n generated_data.append(np.reshape(data[i], image_shape))\n generated_labels.append(key)\n return np.asarray(generated_data), np.reshape(np.asarray(\n generated_labels, dtype=np.int32), (-1, 1))\n\n\ndef create_data_loader(data_x, data_y, batch_size, shuffle):\n tensor_x = torch.stack([torch.Tensor(i) for i in data_x])\n tensor_y = torch.stack([torch.Tensor(i) for i in data_y])\n dataset = torch.utils.data.TensorDataset(tensor_x, tensor_y)\n dataloader = torch.utils.data.DataLoader(dataset=dataset, batch_size=\n batch_size, shuffle=shuffle)\n return dataloader\n\n\ndef train_model(model, train_data_loader, test_data_loader, num_epochs=5,\n learning_rate=0.001, save_epochs=None, model_name='cnn'):\n num_epochs = num_epochs\n learning_rate = learning_rate\n criterion = torch.nn.CrossEntropyLoss()\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n total_step = len(train_data_loader)\n train_times = []\n train_accuracies = []\n train_losses = []\n test_accuracies = []\n for epoch in range(num_epochs):\n start_time = time.time()\n for i, (images, labels) in enumerate(train_data_loader):\n outputs = model(images)\n target = torch.max(labels.long(), 1)[0]\n loss = criterion(outputs, target)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if (i + 1) % 200 == 0:\n print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(\n epoch + 1, num_epochs, i + 1, total_step, loss.item()))\n end_time = time.time()\n if save_epochs and epoch + 1 in save_epochs:\n torch.save(model, '../data/models/' + model_name + '_' + str(\n epoch + 1))\n train_times.append(end_time - start_time)\n train_losses.append(loss.item())\n print('Calculating train accuracy...')\n train_accuracies.append(get_accuracies(train_data_loader, model)[0])\n print('Calculating test accuracy...')\n test_accuracies.append(get_accuracies(test_data_loader, model)[0])\n print('Average training time per epoch:', np.mean(train_times))\n print('Total training time for all epochs:', np.sum(train_times))\n return train_accuracies, test_accuracies, train_losses\n\n\ndef get_accuracies(data_loader, model):\n start_time = time.time()\n model.eval()\n with torch.no_grad():\n correct = 0\n total = 0\n for images, labels in data_loader:\n labels = torch.max(labels.long(), 1)[0]\n outputs = model(images)\n _, predicted = torch.max(outputs.data, 1)\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n accuracy = 100 * correct / total\n end_time = time.time()\n time_taken = end_time - start_time\n print('Accuracy of the model: {} %'.format(accuracy))\n return accuracy, time_taken\n\n\ndef get_model_size(model, model_name):\n model = pickle.dumps(net)\n byte_size = sys.getsizeof(model)\n print('Size of ' + model_name + ' model: ', byte_size / 1000000)\n\n\ndef imshow(img, label_names, file_name='../data/sample_images'):\n npimg = img.numpy()\n npimg = npimg.astype(np.uint8)\n npimg = np.transpose(npimg, (1, 2, 0))\n plt.clf()\n im = plt.imshow(npimg)\n ylim = im.get_extent()[2]\n plt.yticks(np.arange(0, ylim + 1, ylim / len(label_names)), label_names)\n plt.savefig(file_name)\n plt.show()\n\n\ndef show_classwise_images(data, labels, label_names, k):\n image_dict = {}\n for idx, l in enumerate(labels):\n label = l[0]\n if label in image_dict.keys() and len(image_dict[label]) < k:\n image_dict[label].append(data[idx])\n elif label not in image_dict.keys():\n image_dict[label] = [data[idx]]\n images_to_show = []\n labels_to_show = []\n for label, image in image_dict.items():\n labels_to_show.append(label_names[label])\n for i in image:\n images_to_show.append(i)\n images_tensor = torch.stack([torch.Tensor(i) for i in images_to_show])\n imshow(torchvision.utils.make_grid(images_tensor, nrow=k), labels_to_show)\n\n\ndef outlier_analysis(model, outliers_tensor, outlier_label_names,\n cifar10_label_names):\n model.eval()\n predicted_labels = []\n with torch.no_grad():\n start_time = time.time()\n outputs = model(outliers_tensor)\n end_time = time.time()\n print('Time taken for prediction:', str(end_time - start_time))\n _, predicted = torch.max(outputs.data, 1)\n for idx, label in enumerate(predicted):\n print('Original:', outlier_label_names[idx], 'Predicted:',\n cifar10_label_names[label])\n predicted_labels.append(cifar10_label_names[label])\n imshow(torchvision.utils.make_grid(outliers_tensor, nrow=1),\n predicted_labels)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_cifar_data(data_files):\n data = []\n labels = []\n for file in data_files:\n with open(file, 'rb') as fo:\n data_dict = pickle.load(fo, encoding='bytes')\n if len(data) == 0:\n data = data_dict[str.encode('data')]\n labels = data_dict[str.encode('labels')]\n else:\n data = np.vstack((data, data_dict[str.encode('data')]))\n labels.extend(data_dict[str.encode('labels')])\n return data, labels\n\n\ndef unpickle(file):\n with open(file, 'rb') as fo:\n res = pickle.load(fo, encoding='bytes')\n return res\n\n\ndef get_classwise_indices(labels):\n label_indices = {}\n for idx, label in enumerate(labels):\n if label not in label_indices.keys():\n label_indices[label] = [idx]\n else:\n label_indices[label].append(idx)\n return label_indices\n\n\ndef get_data_from_indices(data, indices_dict, count_per_class, image_shape):\n generated_data = []\n generated_labels = []\n for key, val in indices_dict.items():\n if count_per_class:\n for i in range(count_per_class):\n generated_data.append(np.reshape(data[val[i]], image_shape))\n generated_labels.append(key)\n else:\n for i in val:\n generated_data.append(np.reshape(data[i], image_shape))\n generated_labels.append(key)\n return np.asarray(generated_data), np.reshape(np.asarray(\n generated_labels, dtype=np.int32), (-1, 1))\n\n\ndef create_data_loader(data_x, data_y, batch_size, shuffle):\n tensor_x = torch.stack([torch.Tensor(i) for i in data_x])\n tensor_y = torch.stack([torch.Tensor(i) for i in data_y])\n dataset = torch.utils.data.TensorDataset(tensor_x, tensor_y)\n dataloader = torch.utils.data.DataLoader(dataset=dataset, batch_size=\n batch_size, shuffle=shuffle)\n return dataloader\n\n\ndef train_model(model, train_data_loader, test_data_loader, num_epochs=5,\n learning_rate=0.001, save_epochs=None, model_name='cnn'):\n num_epochs = num_epochs\n learning_rate = learning_rate\n criterion = torch.nn.CrossEntropyLoss()\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n total_step = len(train_data_loader)\n train_times = []\n train_accuracies = []\n train_losses = []\n test_accuracies = []\n for epoch in range(num_epochs):\n start_time = time.time()\n for i, (images, labels) in enumerate(train_data_loader):\n outputs = model(images)\n target = torch.max(labels.long(), 1)[0]\n loss = criterion(outputs, target)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if (i + 1) % 200 == 0:\n print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(\n epoch + 1, num_epochs, i + 1, total_step, loss.item()))\n end_time = time.time()\n if save_epochs and epoch + 1 in save_epochs:\n torch.save(model, '../data/models/' + model_name + '_' + str(\n epoch + 1))\n train_times.append(end_time - start_time)\n train_losses.append(loss.item())\n print('Calculating train accuracy...')\n train_accuracies.append(get_accuracies(train_data_loader, model)[0])\n print('Calculating test accuracy...')\n test_accuracies.append(get_accuracies(test_data_loader, model)[0])\n print('Average training time per epoch:', np.mean(train_times))\n print('Total training time for all epochs:', np.sum(train_times))\n return train_accuracies, test_accuracies, train_losses\n\n\ndef get_accuracies(data_loader, model):\n start_time = time.time()\n model.eval()\n with torch.no_grad():\n correct = 0\n total = 0\n for images, labels in data_loader:\n labels = torch.max(labels.long(), 1)[0]\n outputs = model(images)\n _, predicted = torch.max(outputs.data, 1)\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n accuracy = 100 * correct / total\n end_time = time.time()\n time_taken = end_time - start_time\n print('Accuracy of the model: {} %'.format(accuracy))\n return accuracy, time_taken\n\n\ndef get_model_size(model, model_name):\n model = pickle.dumps(net)\n byte_size = sys.getsizeof(model)\n print('Size of ' + model_name + ' model: ', byte_size / 1000000)\n\n\ndef imshow(img, label_names, file_name='../data/sample_images'):\n npimg = img.numpy()\n npimg = npimg.astype(np.uint8)\n npimg = np.transpose(npimg, (1, 2, 0))\n plt.clf()\n im = plt.imshow(npimg)\n ylim = im.get_extent()[2]\n plt.yticks(np.arange(0, ylim + 1, ylim / len(label_names)), label_names)\n plt.savefig(file_name)\n plt.show()\n\n\ndef show_classwise_images(data, labels, label_names, k):\n image_dict = {}\n for idx, l in enumerate(labels):\n label = l[0]\n if label in image_dict.keys() and len(image_dict[label]) < k:\n image_dict[label].append(data[idx])\n elif label not in image_dict.keys():\n image_dict[label] = [data[idx]]\n images_to_show = []\n labels_to_show = []\n for label, image in image_dict.items():\n labels_to_show.append(label_names[label])\n for i in image:\n images_to_show.append(i)\n images_tensor = torch.stack([torch.Tensor(i) for i in images_to_show])\n imshow(torchvision.utils.make_grid(images_tensor, nrow=k), labels_to_show)\n\n\ndef outlier_analysis(model, outliers_tensor, outlier_label_names,\n cifar10_label_names):\n model.eval()\n predicted_labels = []\n with torch.no_grad():\n start_time = time.time()\n outputs = model(outliers_tensor)\n end_time = time.time()\n print('Time taken for prediction:', str(end_time - start_time))\n _, predicted = torch.max(outputs.data, 1)\n for idx, label in enumerate(predicted):\n print('Original:', outlier_label_names[idx], 'Predicted:',\n cifar10_label_names[label])\n predicted_labels.append(cifar10_label_names[label])\n imshow(torchvision.utils.make_grid(outliers_tensor, nrow=1),\n predicted_labels)\n\n\ndef plot_values(x, y, xlabel, ylabel, title, legend, fig_name):\n plt.clf()\n for y_i in y:\n plt.plot(x, y_i)\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.title(title)\n plt.legend(legend)\n plt.savefig('../data/plots/' + fig_name)\n plt.show()\n",
"step-4": "import pickle\nimport numpy as np\nimport torch\nimport time\nimport torchvision\nimport matplotlib\nimport matplotlib.pyplot as plt\n\n\ndef load_cifar_data(data_files):\n data = []\n labels = []\n for file in data_files:\n with open(file, 'rb') as fo:\n data_dict = pickle.load(fo, encoding='bytes')\n if len(data) == 0:\n data = data_dict[str.encode('data')]\n labels = data_dict[str.encode('labels')]\n else:\n data = np.vstack((data, data_dict[str.encode('data')]))\n labels.extend(data_dict[str.encode('labels')])\n return data, labels\n\n\ndef unpickle(file):\n with open(file, 'rb') as fo:\n res = pickle.load(fo, encoding='bytes')\n return res\n\n\ndef get_classwise_indices(labels):\n label_indices = {}\n for idx, label in enumerate(labels):\n if label not in label_indices.keys():\n label_indices[label] = [idx]\n else:\n label_indices[label].append(idx)\n return label_indices\n\n\ndef get_data_from_indices(data, indices_dict, count_per_class, image_shape):\n generated_data = []\n generated_labels = []\n for key, val in indices_dict.items():\n if count_per_class:\n for i in range(count_per_class):\n generated_data.append(np.reshape(data[val[i]], image_shape))\n generated_labels.append(key)\n else:\n for i in val:\n generated_data.append(np.reshape(data[i], image_shape))\n generated_labels.append(key)\n return np.asarray(generated_data), np.reshape(np.asarray(\n generated_labels, dtype=np.int32), (-1, 1))\n\n\ndef create_data_loader(data_x, data_y, batch_size, shuffle):\n tensor_x = torch.stack([torch.Tensor(i) for i in data_x])\n tensor_y = torch.stack([torch.Tensor(i) for i in data_y])\n dataset = torch.utils.data.TensorDataset(tensor_x, tensor_y)\n dataloader = torch.utils.data.DataLoader(dataset=dataset, batch_size=\n batch_size, shuffle=shuffle)\n return dataloader\n\n\ndef train_model(model, train_data_loader, test_data_loader, num_epochs=5,\n learning_rate=0.001, save_epochs=None, model_name='cnn'):\n num_epochs = num_epochs\n learning_rate = learning_rate\n criterion = torch.nn.CrossEntropyLoss()\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n total_step = len(train_data_loader)\n train_times = []\n train_accuracies = []\n train_losses = []\n test_accuracies = []\n for epoch in range(num_epochs):\n start_time = time.time()\n for i, (images, labels) in enumerate(train_data_loader):\n outputs = model(images)\n target = torch.max(labels.long(), 1)[0]\n loss = criterion(outputs, target)\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n if (i + 1) % 200 == 0:\n print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(\n epoch + 1, num_epochs, i + 1, total_step, loss.item()))\n end_time = time.time()\n if save_epochs and epoch + 1 in save_epochs:\n torch.save(model, '../data/models/' + model_name + '_' + str(\n epoch + 1))\n train_times.append(end_time - start_time)\n train_losses.append(loss.item())\n print('Calculating train accuracy...')\n train_accuracies.append(get_accuracies(train_data_loader, model)[0])\n print('Calculating test accuracy...')\n test_accuracies.append(get_accuracies(test_data_loader, model)[0])\n print('Average training time per epoch:', np.mean(train_times))\n print('Total training time for all epochs:', np.sum(train_times))\n return train_accuracies, test_accuracies, train_losses\n\n\ndef get_accuracies(data_loader, model):\n start_time = time.time()\n model.eval()\n with torch.no_grad():\n correct = 0\n total = 0\n for images, labels in data_loader:\n labels = torch.max(labels.long(), 1)[0]\n outputs = model(images)\n _, predicted = torch.max(outputs.data, 1)\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n accuracy = 100 * correct / total\n end_time = time.time()\n time_taken = end_time - start_time\n print('Accuracy of the model: {} %'.format(accuracy))\n return accuracy, time_taken\n\n\ndef get_model_size(model, model_name):\n model = pickle.dumps(net)\n byte_size = sys.getsizeof(model)\n print('Size of ' + model_name + ' model: ', byte_size / 1000000)\n\n\ndef imshow(img, label_names, file_name='../data/sample_images'):\n npimg = img.numpy()\n npimg = npimg.astype(np.uint8)\n npimg = np.transpose(npimg, (1, 2, 0))\n plt.clf()\n im = plt.imshow(npimg)\n ylim = im.get_extent()[2]\n plt.yticks(np.arange(0, ylim + 1, ylim / len(label_names)), label_names)\n plt.savefig(file_name)\n plt.show()\n\n\ndef show_classwise_images(data, labels, label_names, k):\n image_dict = {}\n for idx, l in enumerate(labels):\n label = l[0]\n if label in image_dict.keys() and len(image_dict[label]) < k:\n image_dict[label].append(data[idx])\n elif label not in image_dict.keys():\n image_dict[label] = [data[idx]]\n images_to_show = []\n labels_to_show = []\n for label, image in image_dict.items():\n labels_to_show.append(label_names[label])\n for i in image:\n images_to_show.append(i)\n images_tensor = torch.stack([torch.Tensor(i) for i in images_to_show])\n imshow(torchvision.utils.make_grid(images_tensor, nrow=k), labels_to_show)\n\n\ndef outlier_analysis(model, outliers_tensor, outlier_label_names,\n cifar10_label_names):\n model.eval()\n predicted_labels = []\n with torch.no_grad():\n start_time = time.time()\n outputs = model(outliers_tensor)\n end_time = time.time()\n print('Time taken for prediction:', str(end_time - start_time))\n _, predicted = torch.max(outputs.data, 1)\n for idx, label in enumerate(predicted):\n print('Original:', outlier_label_names[idx], 'Predicted:',\n cifar10_label_names[label])\n predicted_labels.append(cifar10_label_names[label])\n imshow(torchvision.utils.make_grid(outliers_tensor, nrow=1),\n predicted_labels)\n\n\ndef plot_values(x, y, xlabel, ylabel, title, legend, fig_name):\n plt.clf()\n for y_i in y:\n plt.plot(x, y_i)\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.title(title)\n plt.legend(legend)\n plt.savefig('../data/plots/' + fig_name)\n plt.show()\n",
"step-5": "import pickle\nimport numpy as np\nimport torch\nimport time\nimport torchvision\nimport matplotlib\nimport matplotlib.pyplot as plt\n\ndef load_cifar_data(data_files):\n data = []\n labels = []\n for file in data_files:\n with open(file, 'rb') as fo:\n data_dict = pickle.load(fo, encoding='bytes')\n if len(data) == 0:\n data = data_dict[str.encode('data')]\n labels = data_dict[str.encode('labels')]\n else:\n data = np.vstack((data, data_dict[str.encode('data')]))\n labels.extend(data_dict[str.encode('labels')])\n return data, labels\n\ndef unpickle(file):\n with open(file, 'rb') as fo:\n res = pickle.load(fo, encoding='bytes')\n return res\n \ndef get_classwise_indices(labels):\n label_indices = {}\n for idx, label in enumerate(labels):\n if label not in label_indices.keys():\n label_indices[label] = [idx]\n else:\n label_indices[label].append(idx)\n return label_indices\n \ndef get_data_from_indices(data, indices_dict, count_per_class, image_shape):\n generated_data = []\n generated_labels = []\n for key, val in indices_dict.items():\n if count_per_class:\n for i in range(count_per_class):\n generated_data.append(np.reshape(data[val[i]], image_shape))\n generated_labels.append(key)\n else:\n for i in val:\n generated_data.append(np.reshape(data[i], image_shape))\n generated_labels.append(key)\n return np.asarray(generated_data), np.reshape(np.asarray(generated_labels, dtype=np.int32), (-1,1))\n\ndef create_data_loader(data_x, data_y, batch_size, shuffle):\n tensor_x = torch.stack([torch.Tensor(i) for i in data_x]) # transform to torch tensors\n tensor_y = torch.stack([torch.Tensor(i) for i in data_y])\n\n dataset = torch.utils.data.TensorDataset(tensor_x,tensor_y) # create datset\n dataloader = torch.utils.data.DataLoader(dataset=dataset, batch_size=batch_size, shuffle=shuffle) # create dataloader\n return dataloader\n \ndef train_model(model, train_data_loader, test_data_loader, num_epochs=5, learning_rate=0.001, save_epochs=None, model_name=\"cnn\"):\n num_epochs = num_epochs\n learning_rate = learning_rate\n\n # Loss and optimizer\n criterion = torch.nn.CrossEntropyLoss()\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n \n total_step = len(train_data_loader)\n train_times = []\n train_accuracies = []\n train_losses = []\n test_accuracies = []\n \n for epoch in range(num_epochs):\n start_time = time.time()\n for i, (images, labels) in enumerate(train_data_loader):\n # Forward pass\n outputs = model(images)\n target = torch.max(labels.long(), 1)[0]\n loss = criterion(outputs, target)\n\n # Backward and optimize\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n if (i+1) % 200 == 0:\n print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}' \n .format(epoch+1, num_epochs, i+1, total_step, loss.item()))\n end_time = time.time()\n if save_epochs and epoch + 1 in save_epochs:\n torch.save(model, \"../data/models/\" + model_name + \"_\" + str(epoch+1))\n train_times.append(end_time - start_time)\n train_losses.append(loss.item()) \n print(\"Calculating train accuracy...\")\n train_accuracies.append(get_accuracies(train_data_loader, model)[0])\n print(\"Calculating test accuracy...\")\n test_accuracies.append(get_accuracies(test_data_loader, model)[0])\n print(\"Average training time per epoch:\", np.mean(train_times))\n print(\"Total training time for all epochs:\", np.sum(train_times))\n return train_accuracies, test_accuracies, train_losses\n\ndef get_accuracies(data_loader, model):\n start_time = time.time()\n model.eval()\n with torch.no_grad():\n correct = 0\n total = 0\n for images, labels in data_loader:\n labels = torch.max(labels.long(), 1)[0]\n outputs = model(images)\n _, predicted = torch.max(outputs.data, 1)\n total += labels.size(0)\n correct += (predicted == labels).sum().item()\n accuracy = 100 * correct / total\n end_time = time.time()\n time_taken = end_time - start_time\n print('Accuracy of the model: {} %'.format(accuracy))\n return accuracy, time_taken\n \ndef get_model_size(model, model_name):\n model = pickle.dumps(net)\n byte_size = sys.getsizeof(model)\n print('Size of ' + model_name + ' model: ', byte_size/1000000)\n \ndef imshow(img, label_names, file_name=\"../data/sample_images\"):\n npimg = img.numpy()\n npimg = npimg.astype(np.uint8)\n npimg = np.transpose(npimg, (1, 2, 0))\n plt.clf()\n im = plt.imshow(npimg)\n ylim = im.get_extent()[2]\n plt.yticks(np.arange(0, ylim + 1, ylim/len(label_names)), label_names)\n plt.savefig(file_name)\n plt.show()\n \ndef show_classwise_images(data, labels, label_names, k):\n image_dict = {}\n for idx, l in enumerate(labels):\n label = l[0]\n if label in image_dict.keys() and len(image_dict[label]) < k:\n image_dict[label].append(data[idx])\n elif label not in image_dict.keys():\n image_dict[label] = [data[idx]]\n \n images_to_show = []\n labels_to_show = []\n for label, image in image_dict.items():\n labels_to_show.append(label_names[label])\n for i in image:\n images_to_show.append(i)\n \n images_tensor = torch.stack([torch.Tensor(i) for i in images_to_show])\n \n imshow(torchvision.utils.make_grid(images_tensor, nrow=k), labels_to_show)\n \ndef outlier_analysis(model, outliers_tensor, outlier_label_names, cifar10_label_names):\n model.eval()\n predicted_labels = []\n with torch.no_grad():\n start_time = time.time()\n outputs = model(outliers_tensor)\n end_time = time.time()\n print(\"Time taken for prediction:\", str(end_time - start_time))\n _, predicted = torch.max(outputs.data, 1)\n for idx, label in enumerate(predicted):\n print(\"Original:\", outlier_label_names[idx], \"Predicted:\", cifar10_label_names[label])\n predicted_labels.append(cifar10_label_names[label])\n imshow(torchvision.utils.make_grid(outliers_tensor, nrow=1), predicted_labels)\n \ndef plot_values(x, y, xlabel, ylabel, title, legend, fig_name):\n plt.clf()\n for y_i in y:\n plt.plot(x, y_i)\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.title(title)\n plt.legend(legend)\n plt.savefig(\"../data/plots/\" + fig_name)\n plt.show()",
"step-ids": [
8,
10,
12,
13,
14
]
}
|
[
8,
10,
12,
13,
14
] |
__author__ = 'matthias'
from tcp import *
from data import *
#SERVER = "131.225.237.31"
#PORT = 33487
data = LaserData()
#server = TCP(SERVER, PORT)
server = TCP()
server.start_server()
for i in range(100):
data = server.recv_server()
print data
|
normal
|
{
"blob_id": "1e4d18909b72ceef729efdd7b2ab996ace45f1bd",
"index": 6367,
"step-1": "__author__ = 'matthias'\n\nfrom tcp import *\nfrom data import *\n\n#SERVER = \"131.225.237.31\"\n#PORT = 33487\n\ndata = LaserData()\n#server = TCP(SERVER, PORT)\nserver = TCP()\nserver.start_server()\nfor i in range(100):\n data = server.recv_server()\n\n print data\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
lgr = logging.getLogger(__name__)
lgr.log("hello")
import database
import csv
import codecs
class Stop(object):
"""docstring for Stop"""
def __init__(self, arg):
self.fields = [
'stop_id',
'stop_name',
'stop_lat',
'stop_lon',
'stop_calle',
'stop_numero',
'stop_entre',
'stop_esquina'
]
self.d = {}
self.parse(arg)
def __repr__(self):
return str(self.d)
def parse(self, dictParams):
for k,v in dictParams.items():
if str(k) in 'stop_id':
v = int(v)
if type(v) is str:
v = codecs.decode(v, 'utf-8')
if k in self.fields:
self.d.update({k:v})
def save(self, db):
db.insert('stops', **self.d)
def saveStops(stops):
db = database.dbInterface('../database/cba-1.0.1.sqlite')
for stop_id, stop in stops.items():
stop.save(db)
db.close()
def addFromFile(stops, filename):
repeated = {}
with open('../incoming/'+ filename) as csvFile:
reader = csv.DictReader(csvFile)
for r in reader:
stop_id = r['stop_id']
stop = Stop(r)
if stop_id in stops:
if stop.d != stops[stop_id].d:
pass
repeated[stop_id] = stop
print("stop already in collection, skipping")
print(r)
print(stops[stop_id])
else:
stops[stop_id] = stop
return repeated
def show(stops):
for stop_id, stop in stops.items():
print(stop_id, stop)
def main():
stops = {}
repeated = addFromFile(stops, 'asf/stops.csv')
repeated.update(addFromFile(stops, 'ccba/stops.csv'))
repeated.update(addFromFile(stops, 'coniferal/stops.csv'))
repeated.update(addFromFile(stops, 'ersa/stops.csv'))
# show(stops)
show(repeated)
saveStops(stops)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "39ecbf914b0b2b25ce4290eac4198199b90f95e0",
"index": 5384,
"step-1": "<mask token>\n\n\nclass Stop(object):\n \"\"\"docstring for Stop\"\"\"\n\n def __init__(self, arg):\n self.fields = ['stop_id', 'stop_name', 'stop_lat', 'stop_lon',\n 'stop_calle', 'stop_numero', 'stop_entre', 'stop_esquina']\n self.d = {}\n self.parse(arg)\n\n def __repr__(self):\n return str(self.d)\n\n def parse(self, dictParams):\n for k, v in dictParams.items():\n if str(k) in 'stop_id':\n v = int(v)\n if type(v) is str:\n v = codecs.decode(v, 'utf-8')\n if k in self.fields:\n self.d.update({k: v})\n\n def save(self, db):\n db.insert('stops', **self.d)\n\n\ndef saveStops(stops):\n db = database.dbInterface('../database/cba-1.0.1.sqlite')\n for stop_id, stop in stops.items():\n stop.save(db)\n db.close()\n\n\ndef addFromFile(stops, filename):\n repeated = {}\n with open('../incoming/' + filename) as csvFile:\n reader = csv.DictReader(csvFile)\n for r in reader:\n stop_id = r['stop_id']\n stop = Stop(r)\n if stop_id in stops:\n if stop.d != stops[stop_id].d:\n pass\n repeated[stop_id] = stop\n print('stop already in collection, skipping')\n print(r)\n print(stops[stop_id])\n else:\n stops[stop_id] = stop\n return repeated\n\n\n<mask token>\n\n\ndef main():\n stops = {}\n repeated = addFromFile(stops, 'asf/stops.csv')\n repeated.update(addFromFile(stops, 'ccba/stops.csv'))\n repeated.update(addFromFile(stops, 'coniferal/stops.csv'))\n repeated.update(addFromFile(stops, 'ersa/stops.csv'))\n show(repeated)\n saveStops(stops)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Stop(object):\n \"\"\"docstring for Stop\"\"\"\n\n def __init__(self, arg):\n self.fields = ['stop_id', 'stop_name', 'stop_lat', 'stop_lon',\n 'stop_calle', 'stop_numero', 'stop_entre', 'stop_esquina']\n self.d = {}\n self.parse(arg)\n\n def __repr__(self):\n return str(self.d)\n\n def parse(self, dictParams):\n for k, v in dictParams.items():\n if str(k) in 'stop_id':\n v = int(v)\n if type(v) is str:\n v = codecs.decode(v, 'utf-8')\n if k in self.fields:\n self.d.update({k: v})\n\n def save(self, db):\n db.insert('stops', **self.d)\n\n\ndef saveStops(stops):\n db = database.dbInterface('../database/cba-1.0.1.sqlite')\n for stop_id, stop in stops.items():\n stop.save(db)\n db.close()\n\n\ndef addFromFile(stops, filename):\n repeated = {}\n with open('../incoming/' + filename) as csvFile:\n reader = csv.DictReader(csvFile)\n for r in reader:\n stop_id = r['stop_id']\n stop = Stop(r)\n if stop_id in stops:\n if stop.d != stops[stop_id].d:\n pass\n repeated[stop_id] = stop\n print('stop already in collection, skipping')\n print(r)\n print(stops[stop_id])\n else:\n stops[stop_id] = stop\n return repeated\n\n\ndef show(stops):\n for stop_id, stop in stops.items():\n print(stop_id, stop)\n\n\ndef main():\n stops = {}\n repeated = addFromFile(stops, 'asf/stops.csv')\n repeated.update(addFromFile(stops, 'ccba/stops.csv'))\n repeated.update(addFromFile(stops, 'coniferal/stops.csv'))\n repeated.update(addFromFile(stops, 'ersa/stops.csv'))\n show(repeated)\n saveStops(stops)\n\n\n<mask token>\n",
"step-3": "<mask token>\nlgr = logging.getLogger(__name__)\nlgr.log('hello')\n<mask token>\n\n\nclass Stop(object):\n \"\"\"docstring for Stop\"\"\"\n\n def __init__(self, arg):\n self.fields = ['stop_id', 'stop_name', 'stop_lat', 'stop_lon',\n 'stop_calle', 'stop_numero', 'stop_entre', 'stop_esquina']\n self.d = {}\n self.parse(arg)\n\n def __repr__(self):\n return str(self.d)\n\n def parse(self, dictParams):\n for k, v in dictParams.items():\n if str(k) in 'stop_id':\n v = int(v)\n if type(v) is str:\n v = codecs.decode(v, 'utf-8')\n if k in self.fields:\n self.d.update({k: v})\n\n def save(self, db):\n db.insert('stops', **self.d)\n\n\ndef saveStops(stops):\n db = database.dbInterface('../database/cba-1.0.1.sqlite')\n for stop_id, stop in stops.items():\n stop.save(db)\n db.close()\n\n\ndef addFromFile(stops, filename):\n repeated = {}\n with open('../incoming/' + filename) as csvFile:\n reader = csv.DictReader(csvFile)\n for r in reader:\n stop_id = r['stop_id']\n stop = Stop(r)\n if stop_id in stops:\n if stop.d != stops[stop_id].d:\n pass\n repeated[stop_id] = stop\n print('stop already in collection, skipping')\n print(r)\n print(stops[stop_id])\n else:\n stops[stop_id] = stop\n return repeated\n\n\ndef show(stops):\n for stop_id, stop in stops.items():\n print(stop_id, stop)\n\n\ndef main():\n stops = {}\n repeated = addFromFile(stops, 'asf/stops.csv')\n repeated.update(addFromFile(stops, 'ccba/stops.csv'))\n repeated.update(addFromFile(stops, 'coniferal/stops.csv'))\n repeated.update(addFromFile(stops, 'ersa/stops.csv'))\n show(repeated)\n saveStops(stops)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import logging\nlgr = logging.getLogger(__name__)\nlgr.log('hello')\nimport database\nimport csv\nimport codecs\n\n\nclass Stop(object):\n \"\"\"docstring for Stop\"\"\"\n\n def __init__(self, arg):\n self.fields = ['stop_id', 'stop_name', 'stop_lat', 'stop_lon',\n 'stop_calle', 'stop_numero', 'stop_entre', 'stop_esquina']\n self.d = {}\n self.parse(arg)\n\n def __repr__(self):\n return str(self.d)\n\n def parse(self, dictParams):\n for k, v in dictParams.items():\n if str(k) in 'stop_id':\n v = int(v)\n if type(v) is str:\n v = codecs.decode(v, 'utf-8')\n if k in self.fields:\n self.d.update({k: v})\n\n def save(self, db):\n db.insert('stops', **self.d)\n\n\ndef saveStops(stops):\n db = database.dbInterface('../database/cba-1.0.1.sqlite')\n for stop_id, stop in stops.items():\n stop.save(db)\n db.close()\n\n\ndef addFromFile(stops, filename):\n repeated = {}\n with open('../incoming/' + filename) as csvFile:\n reader = csv.DictReader(csvFile)\n for r in reader:\n stop_id = r['stop_id']\n stop = Stop(r)\n if stop_id in stops:\n if stop.d != stops[stop_id].d:\n pass\n repeated[stop_id] = stop\n print('stop already in collection, skipping')\n print(r)\n print(stops[stop_id])\n else:\n stops[stop_id] = stop\n return repeated\n\n\ndef show(stops):\n for stop_id, stop in stops.items():\n print(stop_id, stop)\n\n\ndef main():\n stops = {}\n repeated = addFromFile(stops, 'asf/stops.csv')\n repeated.update(addFromFile(stops, 'ccba/stops.csv'))\n repeated.update(addFromFile(stops, 'coniferal/stops.csv'))\n repeated.update(addFromFile(stops, 'ersa/stops.csv'))\n show(repeated)\n saveStops(stops)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport logging\nlgr = logging.getLogger(__name__)\nlgr.log(\"hello\")\nimport database\nimport csv\nimport codecs\nclass Stop(object):\n \"\"\"docstring for Stop\"\"\"\n def __init__(self, arg):\n self.fields = [\n 'stop_id',\n 'stop_name',\n 'stop_lat',\n 'stop_lon',\n 'stop_calle',\n 'stop_numero',\n 'stop_entre',\n 'stop_esquina'\n ]\n self.d = {}\n self.parse(arg)\n\n def __repr__(self):\n return str(self.d)\n\n def parse(self, dictParams):\n for k,v in dictParams.items():\n if str(k) in 'stop_id':\n v = int(v)\n if type(v) is str:\n v = codecs.decode(v, 'utf-8')\n if k in self.fields:\n self.d.update({k:v})\n def save(self, db):\n db.insert('stops', **self.d)\n\ndef saveStops(stops):\n db = database.dbInterface('../database/cba-1.0.1.sqlite')\n for stop_id, stop in stops.items():\n stop.save(db)\n db.close()\n\ndef addFromFile(stops, filename):\n repeated = {}\n with open('../incoming/'+ filename) as csvFile:\n reader = csv.DictReader(csvFile)\n for r in reader:\n stop_id = r['stop_id']\n stop = Stop(r)\n if stop_id in stops:\n if stop.d != stops[stop_id].d:\n pass\n repeated[stop_id] = stop\n print(\"stop already in collection, skipping\")\n print(r)\n print(stops[stop_id])\n else:\n stops[stop_id] = stop\n return repeated\n\ndef show(stops):\n for stop_id, stop in stops.items():\n print(stop_id, stop)\n \ndef main():\n stops = {}\n repeated = addFromFile(stops, 'asf/stops.csv')\n repeated.update(addFromFile(stops, 'ccba/stops.csv'))\n repeated.update(addFromFile(stops, 'coniferal/stops.csv'))\n repeated.update(addFromFile(stops, 'ersa/stops.csv'))\n\n \n # show(stops)\n show(repeated)\n\n saveStops(stops)\n\n\nif __name__ == '__main__':\n main()",
"step-ids": [
9,
10,
12,
13,
14
]
}
|
[
9,
10,
12,
13,
14
] |
import pymysql
def main():
conn = pymysql.connect(host='127.0.0.1', port=3306,user='root',password='383240gyz',db='bycicle',charset='utf8')
print(conn)
try:
with conn.cursor() as cursor: # 上下文语法否则需要 # cursor.close()
cursor.execute('''drop table if exists pymysql''')
cursor.execute(''' create table pymysql (a int,b int)''')
cursor.execute('''insert into pymysql(a,b) values(1,1) ''')
conn.commit()
except pymysql.MySQLError as e:
print(e)
conn.rollback()
finally:
conn.close()
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "3135483c68880eeeaf7ebc085a6cd3c0c7f0550c",
"index": 1859,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n conn = pymysql.connect(host='127.0.0.1', port=3306, user='root',\n password='383240gyz', db='bycicle', charset='utf8')\n print(conn)\n try:\n with conn.cursor() as cursor:\n cursor.execute('drop table if exists pymysql')\n cursor.execute(' create table pymysql (a int,b int)')\n cursor.execute('insert into pymysql(a,b) values(1,1) ')\n conn.commit()\n except pymysql.MySQLError as e:\n print(e)\n conn.rollback()\n finally:\n conn.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n conn = pymysql.connect(host='127.0.0.1', port=3306, user='root',\n password='383240gyz', db='bycicle', charset='utf8')\n print(conn)\n try:\n with conn.cursor() as cursor:\n cursor.execute('drop table if exists pymysql')\n cursor.execute(' create table pymysql (a int,b int)')\n cursor.execute('insert into pymysql(a,b) values(1,1) ')\n conn.commit()\n except pymysql.MySQLError as e:\n print(e)\n conn.rollback()\n finally:\n conn.close()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import pymysql\n\n\ndef main():\n conn = pymysql.connect(host='127.0.0.1', port=3306, user='root',\n password='383240gyz', db='bycicle', charset='utf8')\n print(conn)\n try:\n with conn.cursor() as cursor:\n cursor.execute('drop table if exists pymysql')\n cursor.execute(' create table pymysql (a int,b int)')\n cursor.execute('insert into pymysql(a,b) values(1,1) ')\n conn.commit()\n except pymysql.MySQLError as e:\n print(e)\n conn.rollback()\n finally:\n conn.close()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import pymysql\n\n\ndef main():\n conn = pymysql.connect(host='127.0.0.1', port=3306,user='root',password='383240gyz',db='bycicle',charset='utf8')\n print(conn)\n try:\n with conn.cursor() as cursor: # 上下文语法否则需要 # cursor.close()\n cursor.execute('''drop table if exists pymysql''')\n cursor.execute(''' create table pymysql (a int,b int)''')\n cursor.execute('''insert into pymysql(a,b) values(1,1) ''')\n conn.commit()\n except pymysql.MySQLError as e:\n print(e)\n conn.rollback()\n finally:\n conn.close()\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from prediction_model import PredictionModel
import util.nlp as nlp
import re
class NLPPredictionModel(object):
def getPasswordProbabilities(self, sweetwordList):
# can not deal with sweetword that contains no letters
result = []
for s in sweetwordList:
words = re.findall(r"[a-zA-Z']+", s)
if not words:
result.append(0.0)
else:
result.append(sum([nlp.getScore(w) for w in words]) / float(len(words)))
sum_result = sum(result)
return [r / float(sum_result) for r in result]
|
normal
|
{
"blob_id": "1c01fbf7eafd49ada71cb018a62ead5988dcf251",
"index": 2968,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass NLPPredictionModel(object):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass NLPPredictionModel(object):\n\n def getPasswordProbabilities(self, sweetwordList):\n result = []\n for s in sweetwordList:\n words = re.findall(\"[a-zA-Z']+\", s)\n if not words:\n result.append(0.0)\n else:\n result.append(sum([nlp.getScore(w) for w in words]) / float\n (len(words)))\n sum_result = sum(result)\n return [(r / float(sum_result)) for r in result]\n",
"step-4": "from prediction_model import PredictionModel\nimport util.nlp as nlp\nimport re\n\n\nclass NLPPredictionModel(object):\n\n def getPasswordProbabilities(self, sweetwordList):\n result = []\n for s in sweetwordList:\n words = re.findall(\"[a-zA-Z']+\", s)\n if not words:\n result.append(0.0)\n else:\n result.append(sum([nlp.getScore(w) for w in words]) / float\n (len(words)))\n sum_result = sum(result)\n return [(r / float(sum_result)) for r in result]\n",
"step-5": "from prediction_model import PredictionModel\nimport util.nlp as nlp\nimport re\n\n\nclass NLPPredictionModel(object):\n\n def getPasswordProbabilities(self, sweetwordList):\n # can not deal with sweetword that contains no letters\n\n result = []\n for s in sweetwordList:\n words = re.findall(r\"[a-zA-Z']+\", s)\n if not words:\n result.append(0.0)\n else:\n result.append(sum([nlp.getScore(w) for w in words]) / float(len(words)))\n sum_result = sum(result)\n return [r / float(sum_result) for r in result]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.contrib.auth import authenticate, login, logout
from django.template import loader
from django.http import (HttpResponse, JsonResponse,
HttpResponseForbidden, HttpResponseBadRequest)
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
import json
from aimodel.AnalyticSession import AnalyticSession
from data.DatasetConfigManager import DatasetConfigManager
def index(request, err_msg=None):
"""
Renders the index page.
"""
template = loader.get_template("aimodel/index.html")
context = {}
context["err_msg"] = err_msg
return HttpResponse(template.render(context, request))
@require_POST
def log_in(request):
"""
Handles login.
"""
# Get the username and password
username = request.POST.get("username")
password = request.POST.get("password")
if not username or not password:
return index(request, "Invalid credentials!")
# Authenticate and log in
user = authenticate(username=username, password=password)
if user:
login(request, user)
return redirect("/main")
else:
return index(request, "Invalid credentials!")
def main(request):
"""
Renders the main page behind login.
"""
if not request.user.is_authenticated:
return redirect("/")
template = loader.get_template("aimodel/main.html")
context = dict()
context["datasets"] = DatasetConfigManager.loaded_datasets_list()
return HttpResponse(template.render(context, request))
@require_POST
def analytics_session(request):
"""
Starts a new analytic session.
"""
if not request.user.is_authenticated:
return redirect("/")
try:
dataset = request.POST["dataset"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
if "analytics" in request.session:
del request.session["analytics"]
request.session["analytics"] = AnalyticSession(dataset)
bucket_info = request.session["analytics"].bucket_info()
template = loader.get_template("ui/analytics.html")
context = dict()
context["init_buckets"] = json.dumps(bucket_info["buckets"])
context["init_bucket_ordering"] =\
json.dumps(bucket_info["bucket_ordering"])
return HttpResponse(template.render(context, request))
def log_out(request):
"""
Logs the user out.
"""
if request.user.is_authenticated:
logout(request)
return redirect("/")
def _check_session_valid(request):
"""
A helper function checking whether the user is logged in and the session
data is present.
"""
if not request.user.is_authenticated:
return HttpResponseForbidden(reason="Access denied!")
if "analytics" not in request.session:
err = "Could not fetch analytic session data."
return HttpResponseBadRequest(reason=err)
return None
def bucket_info(request):
"""
Fetches information about current buckets.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
return JsonResponse(request.session["analytics"].bucket_info())
def create_bucket(request):
"""
Creates a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
try:
request.session["analytics"].create_bucket()
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def delete_bucket(request):
"""
Deletes a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].delete_bucket(bucket_id)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def rename_bucket(request):
"""
Renames a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
new_bucket_name = request_data["new_bucket_name"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].rename_bucket(bucket_id, new_bucket_name)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def swap_buckets(request):
"""
Swaps the position of two buckets.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket1_id = request_data["bucket1_id"]
bucket2_id = request_data["bucket2_id"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].swap_buckets(bucket1_id, bucket2_id)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def toggle_bucket(request):
"""
Toggles (activates/deactivates) a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].toggle_bucket(bucket_id)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def interaction_round(request):
"""
Performs an interaction round, providing new image suggestions.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
user_feedback = json.loads(request.body)
try:
suggs = request.session["analytics"].interaction_round(user_feedback)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(suggs, safe=False)
@require_POST
def bucket_view_data(request):
"""
Obtains bucket view data, i.e., the images in the bucket with bucket
confidences.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket_id = request_data["bucket_id"]
sort_by = request_data["sort_by"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
bucket_view_data =\
request.session["analytics"].bucket_view_data(bucket_id, sort_by)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(bucket_view_data, safe=False)
def toggle_mode(request):
"""
Toggles between Tetris/grid.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request.session["analytics"].toggle_mode()
return JsonResponse({})
@require_POST
def grid_set_size(request):
"""
Resizes the grid.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
dim = request_data["dim"]
new_size = request_data["new_size"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
new_grid_data = request.session["analytics"].grid_set_size(dim,
new_size)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(new_grid_data, safe=False)
@require_POST
def transfer_images(request):
"""
Transfers (moves/copies) images between buckets.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
images = request_data["images"]
bucket_src = request_data["bucket_src"]
bucket_dst = request_data["bucket_dst"]
mode = request_data["mode"]
sort_by = request_data["sort_by"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].transfer_images(images,
bucket_src, bucket_dst,
mode)
bucket_view_data =\
request.session["analytics"].bucket_view_data(bucket_src, sort_by)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse(bucket_view_data, safe=False)
@require_POST
def fast_forward(request):
"""
Fast-forwards a bucket.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
try:
bucket = request_data["bucket"]
n_ff = request_data["n_ff"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].fast_forward(bucket, n_ff)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
@require_POST
def ff_commit(request):
"""
Commits a fast-forward.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
request_data = json.loads(request.body)
print(request_data)
try:
bucket = request_data["bucket"]
except KeyError:
err = "Invalid request params!"
return HttpResponseBadRequest(reason=err)
try:
request.session["analytics"].ff_commit(bucket)
except ValueError as e:
return HttpResponseBadRequest(reason=str(e))
return JsonResponse({})
def end_session(request):
"""
Ends an analytic session.
"""
session_check = _check_session_valid(request)
if session_check:
return session_check
del request.session["analytics"]
response = {
"redirect_url": "/main"
}
return JsonResponse(response)
|
normal
|
{
"blob_id": "41ca762fe6865613ae4ef2f657f86b516353676f",
"index": 9784,
"step-1": "<mask token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<mask token>\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<mask token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<mask token>\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<mask token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"step-2": "<mask token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n<mask token>\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<mask token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<mask token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"step-3": "<mask token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n username = request.POST.get('username')\n password = request.POST.get('password')\n if not username or not password:\n return index(request, 'Invalid credentials!')\n user = authenticate(username=username, password=password)\n if user:\n login(request, user)\n return redirect('/main')\n else:\n return index(request, 'Invalid credentials!')\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\n<mask token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n<mask token>\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"step-4": "<mask token>\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template('aimodel/index.html')\n context = {}\n context['err_msg'] = err_msg\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n username = request.POST.get('username')\n password = request.POST.get('password')\n if not username or not password:\n return index(request, 'Invalid credentials!')\n user = authenticate(username=username, password=password)\n if user:\n login(request, user)\n return redirect('/main')\n else:\n return index(request, 'Invalid credentials!')\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n template = loader.get_template('aimodel/main.html')\n context = dict()\n context['datasets'] = DatasetConfigManager.loaded_datasets_list()\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n if not request.user.is_authenticated:\n return redirect('/')\n try:\n dataset = request.POST['dataset']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n if 'analytics' in request.session:\n del request.session['analytics']\n request.session['analytics'] = AnalyticSession(dataset)\n bucket_info = request.session['analytics'].bucket_info()\n template = loader.get_template('ui/analytics.html')\n context = dict()\n context['init_buckets'] = json.dumps(bucket_info['buckets'])\n context['init_bucket_ordering'] = json.dumps(bucket_info['bucket_ordering']\n )\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n if request.user.is_authenticated:\n logout(request)\n return redirect('/')\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason='Access denied!')\n if 'analytics' not in request.session:\n err = 'Could not fetch analytic session data.'\n return HttpResponseBadRequest(reason=err)\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n return JsonResponse(request.session['analytics'].bucket_info())\n\n\ndef create_bucket(request):\n \"\"\"\n Creates a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n try:\n request.session['analytics'].create_bucket()\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n<mask token>\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n new_bucket_name = request_data['new_bucket_name']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket1_id = request_data['bucket1_id']\n bucket2_id = request_data['bucket2_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n user_feedback = json.loads(request.body)\n try:\n suggs = request.session['analytics'].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket_id = request_data['bucket_id']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request.session['analytics'].toggle_mode()\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n dim = request_data['dim']\n new_size = request_data['new_size']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n new_grid_data = request.session['analytics'].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n images = request_data['images']\n bucket_src = request_data['bucket_src']\n bucket_dst = request_data['bucket_dst']\n mode = request_data['mode']\n sort_by = request_data['sort_by']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].transfer_images(images, bucket_src,\n bucket_dst, mode)\n bucket_view_data = request.session['analytics'].bucket_view_data(\n bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse(bucket_view_data, safe=False)\n\n\n@require_POST\ndef fast_forward(request):\n \"\"\"\n Fast-forwards a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n try:\n bucket = request_data['bucket']\n n_ff = request_data['n_ff']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].fast_forward(bucket, n_ff)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n request_data = json.loads(request.body)\n print(request_data)\n try:\n bucket = request_data['bucket']\n except KeyError:\n err = 'Invalid request params!'\n return HttpResponseBadRequest(reason=err)\n try:\n request.session['analytics'].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n session_check = _check_session_valid(request)\n if session_check:\n return session_check\n del request.session['analytics']\n response = {'redirect_url': '/main'}\n return JsonResponse(response)\n",
"step-5": "from django.contrib.auth import authenticate, login, logout\nfrom django.template import loader\nfrom django.http import (HttpResponse, JsonResponse,\n HttpResponseForbidden, HttpResponseBadRequest)\nfrom django.shortcuts import redirect\nfrom django.views.decorators.http import require_POST\n\nimport json\n\nfrom aimodel.AnalyticSession import AnalyticSession\nfrom data.DatasetConfigManager import DatasetConfigManager\n\n\ndef index(request, err_msg=None):\n \"\"\"\n Renders the index page.\n \"\"\"\n template = loader.get_template(\"aimodel/index.html\")\n context = {}\n\n context[\"err_msg\"] = err_msg\n\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef log_in(request):\n \"\"\"\n Handles login.\n \"\"\"\n\n # Get the username and password\n username = request.POST.get(\"username\")\n password = request.POST.get(\"password\")\n\n if not username or not password:\n return index(request, \"Invalid credentials!\")\n\n # Authenticate and log in\n user = authenticate(username=username, password=password)\n\n if user:\n login(request, user)\n return redirect(\"/main\")\n else:\n return index(request, \"Invalid credentials!\")\n\n\ndef main(request):\n \"\"\"\n Renders the main page behind login.\n \"\"\"\n\n if not request.user.is_authenticated:\n return redirect(\"/\")\n\n template = loader.get_template(\"aimodel/main.html\")\n context = dict()\n context[\"datasets\"] = DatasetConfigManager.loaded_datasets_list()\n\n return HttpResponse(template.render(context, request))\n\n\n@require_POST\ndef analytics_session(request):\n \"\"\"\n Starts a new analytic session.\n \"\"\"\n\n if not request.user.is_authenticated:\n return redirect(\"/\")\n\n try:\n dataset = request.POST[\"dataset\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n if \"analytics\" in request.session:\n del request.session[\"analytics\"]\n\n request.session[\"analytics\"] = AnalyticSession(dataset)\n\n bucket_info = request.session[\"analytics\"].bucket_info()\n\n template = loader.get_template(\"ui/analytics.html\")\n\n context = dict()\n context[\"init_buckets\"] = json.dumps(bucket_info[\"buckets\"])\n context[\"init_bucket_ordering\"] =\\\n json.dumps(bucket_info[\"bucket_ordering\"])\n\n return HttpResponse(template.render(context, request))\n\n\ndef log_out(request):\n \"\"\"\n Logs the user out.\n \"\"\"\n\n if request.user.is_authenticated:\n logout(request)\n\n return redirect(\"/\")\n\n\ndef _check_session_valid(request):\n \"\"\"\n A helper function checking whether the user is logged in and the session\n data is present.\n \"\"\"\n if not request.user.is_authenticated:\n return HttpResponseForbidden(reason=\"Access denied!\")\n\n if \"analytics\" not in request.session:\n err = \"Could not fetch analytic session data.\"\n return HttpResponseBadRequest(reason=err)\n\n return None\n\n\ndef bucket_info(request):\n \"\"\"\n Fetches information about current buckets.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n return JsonResponse(request.session[\"analytics\"].bucket_info())\n\n\ndef create_bucket(request):\n \"\"\"\n Creates a bucket.\n \"\"\"\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n try:\n request.session[\"analytics\"].create_bucket()\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef delete_bucket(request):\n \"\"\"\n Deletes a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].delete_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef rename_bucket(request):\n \"\"\"\n Renames a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n new_bucket_name = request_data[\"new_bucket_name\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].rename_bucket(bucket_id, new_bucket_name)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef swap_buckets(request):\n \"\"\"\n Swaps the position of two buckets.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket1_id = request_data[\"bucket1_id\"]\n bucket2_id = request_data[\"bucket2_id\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].swap_buckets(bucket1_id, bucket2_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef toggle_bucket(request):\n \"\"\"\n Toggles (activates/deactivates) a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].toggle_bucket(bucket_id)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef interaction_round(request):\n \"\"\"\n Performs an interaction round, providing new image suggestions.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n user_feedback = json.loads(request.body)\n\n try:\n suggs = request.session[\"analytics\"].interaction_round(user_feedback)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(suggs, safe=False)\n\n\n@require_POST\ndef bucket_view_data(request):\n \"\"\"\n Obtains bucket view data, i.e., the images in the bucket with bucket\n confidences.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket_id = request_data[\"bucket_id\"]\n sort_by = request_data[\"sort_by\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n bucket_view_data =\\\n request.session[\"analytics\"].bucket_view_data(bucket_id, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(bucket_view_data, safe=False)\n\n\ndef toggle_mode(request):\n \"\"\"\n Toggles between Tetris/grid.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request.session[\"analytics\"].toggle_mode()\n\n return JsonResponse({})\n\n\n@require_POST\ndef grid_set_size(request):\n \"\"\"\n Resizes the grid.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n dim = request_data[\"dim\"]\n new_size = request_data[\"new_size\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n new_grid_data = request.session[\"analytics\"].grid_set_size(dim,\n new_size)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(new_grid_data, safe=False)\n\n\n@require_POST\ndef transfer_images(request):\n \"\"\"\n Transfers (moves/copies) images between buckets.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n images = request_data[\"images\"]\n bucket_src = request_data[\"bucket_src\"]\n bucket_dst = request_data[\"bucket_dst\"]\n mode = request_data[\"mode\"]\n sort_by = request_data[\"sort_by\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].transfer_images(images,\n bucket_src, bucket_dst,\n mode)\n bucket_view_data =\\\n request.session[\"analytics\"].bucket_view_data(bucket_src, sort_by)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse(bucket_view_data, safe=False)\n\n\n@require_POST\ndef fast_forward(request):\n \"\"\"\n Fast-forwards a bucket.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n try:\n bucket = request_data[\"bucket\"]\n n_ff = request_data[\"n_ff\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].fast_forward(bucket, n_ff)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\n@require_POST\ndef ff_commit(request):\n \"\"\"\n Commits a fast-forward.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n request_data = json.loads(request.body)\n\n print(request_data)\n\n try:\n bucket = request_data[\"bucket\"]\n except KeyError:\n err = \"Invalid request params!\"\n return HttpResponseBadRequest(reason=err)\n\n try:\n request.session[\"analytics\"].ff_commit(bucket)\n except ValueError as e:\n return HttpResponseBadRequest(reason=str(e))\n\n return JsonResponse({})\n\n\ndef end_session(request):\n \"\"\"\n Ends an analytic session.\n \"\"\"\n\n session_check = _check_session_valid(request)\n\n if session_check:\n return session_check\n\n del request.session[\"analytics\"]\n\n response = {\n \"redirect_url\": \"/main\"\n }\n\n return JsonResponse(response)\n",
"step-ids": [
13,
16,
17,
19,
22
]
}
|
[
13,
16,
17,
19,
22
] |
import Environment.spot_environment_model
"""This is basically the control center. All actions here are being condensed and brought in from
spot_market_model...... the BRAIN of the simulator"""
class SpotEnvironmentController():
def __init__(self, debug=False): # debug builds an error trap
self.debug = debug
if self.debug == True:
print("... In Controller -> __init__")
self.sem = Environment.spot_environment_model.SpotEnvironmentModel(self.debug)
def load_file(self, path):
self.sem.load_file(path) # loads file by pulling from file path
def load_file_json(self, path):
self.sem.load_file_json(path) # loads file by pulling from file path
def save_project(self, path):
self.sem.save_file(path)
def save_project_json(self, path):
self.sem.save_file_json(path)
def reset_market(self):
pass
self.sem.reset_market()
def make_market(self, make_d):
if self.debug == True:
print("... In Controller -> make_market")
self.sem.make_market(make_d)
def set_market_parms(self, parms):
if self.debug == True:
print("... In Controller -> set_market_params")
self.sem.set_market_parms(parms)
def add_buyer(self, bn, values):
if self.debug == True:
print("... In Controller -> add_buyer")
print ("... Buyer {}, values {}".format(bn, values))
self.sem.add_buyer(bn, values)
def add_seller(self, sn, costs):
if self.debug == True:
print("... In Controller -> add_seller")
print ("... Seller {}, costs {}".format(sn, costs))
self.sem.add_seller(sn, costs)
def get_num_buyers(self):
return self.sem.get_num_buyers()
def get_num_sellers(self):
return self.sem.get_num_sellers()
def get_num_units(self):
return self.sem.get_num_units()
def get_seller_costs(self, seller):
return self.sem.get_seller_costs(seller)
def get_buyer_values(self, buyer):
return self.sem.get_buyer_values(buyer)
def make_demand(self):
self.sem.make_demand()
def make_supply(self):
self.sem.make_supply()
def show_env_buyers(self):
self.sem.show_env_buyers()
def show_environment(self):
self.sem.show_environment()
def get_supply_demand_plot_info(self):
return self.sem.get_supply_demand_plot_info()
def get_supply_demand_list(self):
return self.sem.get_supply_demand_list()
def get_equilibrium(self):
return self.sem.get_equilibrium()
def show(self):
self.sem.show()
def plot(self):
self.sem.make_demand()
self.sem.make_supply()
self.sem.plot_supply_demand()
def plot_gui(self, name):
self.sem.make_demand()
self.sem.make_supply()
self.sem.plot_supply_demand_gui(name)
|
normal
|
{
"blob_id": "7c19b9521dc874a1ff4bed87dae0452cc329224a",
"index": 6890,
"step-1": "<mask token>\n\n\nclass SpotEnvironmentController:\n\n def __init__(self, debug=False):\n self.debug = debug\n if self.debug == True:\n print('... In Controller -> __init__')\n self.sem = Environment.spot_environment_model.SpotEnvironmentModel(self\n .debug)\n\n def load_file(self, path):\n self.sem.load_file(path)\n\n def load_file_json(self, path):\n self.sem.load_file_json(path)\n\n def save_project(self, path):\n self.sem.save_file(path)\n <mask token>\n\n def reset_market(self):\n pass\n self.sem.reset_market()\n\n def make_market(self, make_d):\n if self.debug == True:\n print('... In Controller -> make_market')\n self.sem.make_market(make_d)\n\n def set_market_parms(self, parms):\n if self.debug == True:\n print('... In Controller -> set_market_params')\n self.sem.set_market_parms(parms)\n <mask token>\n <mask token>\n\n def get_num_buyers(self):\n return self.sem.get_num_buyers()\n\n def get_num_sellers(self):\n return self.sem.get_num_sellers()\n <mask token>\n <mask token>\n <mask token>\n\n def make_demand(self):\n self.sem.make_demand()\n <mask token>\n\n def show_env_buyers(self):\n self.sem.show_env_buyers()\n <mask token>\n\n def get_supply_demand_plot_info(self):\n return self.sem.get_supply_demand_plot_info()\n\n def get_supply_demand_list(self):\n return self.sem.get_supply_demand_list()\n <mask token>\n\n def show(self):\n self.sem.show()\n\n def plot(self):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand()\n\n def plot_gui(self, name):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand_gui(name)\n",
"step-2": "<mask token>\n\n\nclass SpotEnvironmentController:\n\n def __init__(self, debug=False):\n self.debug = debug\n if self.debug == True:\n print('... In Controller -> __init__')\n self.sem = Environment.spot_environment_model.SpotEnvironmentModel(self\n .debug)\n\n def load_file(self, path):\n self.sem.load_file(path)\n\n def load_file_json(self, path):\n self.sem.load_file_json(path)\n\n def save_project(self, path):\n self.sem.save_file(path)\n <mask token>\n\n def reset_market(self):\n pass\n self.sem.reset_market()\n\n def make_market(self, make_d):\n if self.debug == True:\n print('... In Controller -> make_market')\n self.sem.make_market(make_d)\n\n def set_market_parms(self, parms):\n if self.debug == True:\n print('... In Controller -> set_market_params')\n self.sem.set_market_parms(parms)\n <mask token>\n <mask token>\n\n def get_num_buyers(self):\n return self.sem.get_num_buyers()\n\n def get_num_sellers(self):\n return self.sem.get_num_sellers()\n\n def get_num_units(self):\n return self.sem.get_num_units()\n <mask token>\n <mask token>\n\n def make_demand(self):\n self.sem.make_demand()\n <mask token>\n\n def show_env_buyers(self):\n self.sem.show_env_buyers()\n <mask token>\n\n def get_supply_demand_plot_info(self):\n return self.sem.get_supply_demand_plot_info()\n\n def get_supply_demand_list(self):\n return self.sem.get_supply_demand_list()\n <mask token>\n\n def show(self):\n self.sem.show()\n\n def plot(self):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand()\n\n def plot_gui(self, name):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand_gui(name)\n",
"step-3": "<mask token>\n\n\nclass SpotEnvironmentController:\n\n def __init__(self, debug=False):\n self.debug = debug\n if self.debug == True:\n print('... In Controller -> __init__')\n self.sem = Environment.spot_environment_model.SpotEnvironmentModel(self\n .debug)\n\n def load_file(self, path):\n self.sem.load_file(path)\n\n def load_file_json(self, path):\n self.sem.load_file_json(path)\n\n def save_project(self, path):\n self.sem.save_file(path)\n\n def save_project_json(self, path):\n self.sem.save_file_json(path)\n\n def reset_market(self):\n pass\n self.sem.reset_market()\n\n def make_market(self, make_d):\n if self.debug == True:\n print('... In Controller -> make_market')\n self.sem.make_market(make_d)\n\n def set_market_parms(self, parms):\n if self.debug == True:\n print('... In Controller -> set_market_params')\n self.sem.set_market_parms(parms)\n <mask token>\n\n def add_seller(self, sn, costs):\n if self.debug == True:\n print('... In Controller -> add_seller')\n print('... Seller {}, costs {}'.format(sn, costs))\n self.sem.add_seller(sn, costs)\n\n def get_num_buyers(self):\n return self.sem.get_num_buyers()\n\n def get_num_sellers(self):\n return self.sem.get_num_sellers()\n\n def get_num_units(self):\n return self.sem.get_num_units()\n\n def get_seller_costs(self, seller):\n return self.sem.get_seller_costs(seller)\n\n def get_buyer_values(self, buyer):\n return self.sem.get_buyer_values(buyer)\n\n def make_demand(self):\n self.sem.make_demand()\n <mask token>\n\n def show_env_buyers(self):\n self.sem.show_env_buyers()\n\n def show_environment(self):\n self.sem.show_environment()\n\n def get_supply_demand_plot_info(self):\n return self.sem.get_supply_demand_plot_info()\n\n def get_supply_demand_list(self):\n return self.sem.get_supply_demand_list()\n\n def get_equilibrium(self):\n return self.sem.get_equilibrium()\n\n def show(self):\n self.sem.show()\n\n def plot(self):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand()\n\n def plot_gui(self, name):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand_gui(name)\n",
"step-4": "import Environment.spot_environment_model\n<mask token>\n\n\nclass SpotEnvironmentController:\n\n def __init__(self, debug=False):\n self.debug = debug\n if self.debug == True:\n print('... In Controller -> __init__')\n self.sem = Environment.spot_environment_model.SpotEnvironmentModel(self\n .debug)\n\n def load_file(self, path):\n self.sem.load_file(path)\n\n def load_file_json(self, path):\n self.sem.load_file_json(path)\n\n def save_project(self, path):\n self.sem.save_file(path)\n\n def save_project_json(self, path):\n self.sem.save_file_json(path)\n\n def reset_market(self):\n pass\n self.sem.reset_market()\n\n def make_market(self, make_d):\n if self.debug == True:\n print('... In Controller -> make_market')\n self.sem.make_market(make_d)\n\n def set_market_parms(self, parms):\n if self.debug == True:\n print('... In Controller -> set_market_params')\n self.sem.set_market_parms(parms)\n\n def add_buyer(self, bn, values):\n if self.debug == True:\n print('... In Controller -> add_buyer')\n print('... Buyer {}, values {}'.format(bn, values))\n self.sem.add_buyer(bn, values)\n\n def add_seller(self, sn, costs):\n if self.debug == True:\n print('... In Controller -> add_seller')\n print('... Seller {}, costs {}'.format(sn, costs))\n self.sem.add_seller(sn, costs)\n\n def get_num_buyers(self):\n return self.sem.get_num_buyers()\n\n def get_num_sellers(self):\n return self.sem.get_num_sellers()\n\n def get_num_units(self):\n return self.sem.get_num_units()\n\n def get_seller_costs(self, seller):\n return self.sem.get_seller_costs(seller)\n\n def get_buyer_values(self, buyer):\n return self.sem.get_buyer_values(buyer)\n\n def make_demand(self):\n self.sem.make_demand()\n\n def make_supply(self):\n self.sem.make_supply()\n\n def show_env_buyers(self):\n self.sem.show_env_buyers()\n\n def show_environment(self):\n self.sem.show_environment()\n\n def get_supply_demand_plot_info(self):\n return self.sem.get_supply_demand_plot_info()\n\n def get_supply_demand_list(self):\n return self.sem.get_supply_demand_list()\n\n def get_equilibrium(self):\n return self.sem.get_equilibrium()\n\n def show(self):\n self.sem.show()\n\n def plot(self):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand()\n\n def plot_gui(self, name):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand_gui(name)\n",
"step-5": "import Environment.spot_environment_model\n\n\"\"\"This is basically the control center. All actions here are being condensed and brought in from\n spot_market_model...... the BRAIN of the simulator\"\"\"\n\nclass SpotEnvironmentController():\n def __init__(self, debug=False): # debug builds an error trap\n self.debug = debug\n if self.debug == True:\n print(\"... In Controller -> __init__\")\n self.sem = Environment.spot_environment_model.SpotEnvironmentModel(self.debug)\n\n def load_file(self, path):\n self.sem.load_file(path) # loads file by pulling from file path\n\n def load_file_json(self, path):\n self.sem.load_file_json(path) # loads file by pulling from file path\n\n def save_project(self, path):\n self.sem.save_file(path)\n\n def save_project_json(self, path):\n self.sem.save_file_json(path)\n\n def reset_market(self):\n pass\n self.sem.reset_market()\n\n def make_market(self, make_d):\n if self.debug == True:\n print(\"... In Controller -> make_market\")\n self.sem.make_market(make_d)\n\n def set_market_parms(self, parms):\n if self.debug == True:\n print(\"... In Controller -> set_market_params\")\n self.sem.set_market_parms(parms)\n\n def add_buyer(self, bn, values):\n if self.debug == True:\n print(\"... In Controller -> add_buyer\")\n print (\"... Buyer {}, values {}\".format(bn, values))\n\n self.sem.add_buyer(bn, values)\n\n def add_seller(self, sn, costs):\n if self.debug == True:\n print(\"... In Controller -> add_seller\")\n print (\"... Seller {}, costs {}\".format(sn, costs))\n self.sem.add_seller(sn, costs)\n\n def get_num_buyers(self):\n return self.sem.get_num_buyers()\n\n def get_num_sellers(self):\n return self.sem.get_num_sellers()\n\n def get_num_units(self):\n return self.sem.get_num_units()\n\n def get_seller_costs(self, seller):\n return self.sem.get_seller_costs(seller)\n\n def get_buyer_values(self, buyer):\n return self.sem.get_buyer_values(buyer)\n\n def make_demand(self):\n self.sem.make_demand()\n\n def make_supply(self):\n self.sem.make_supply()\n\n def show_env_buyers(self):\n self.sem.show_env_buyers()\n\n def show_environment(self):\n self.sem.show_environment()\n\n def get_supply_demand_plot_info(self):\n return self.sem.get_supply_demand_plot_info()\n\n def get_supply_demand_list(self):\n return self.sem.get_supply_demand_list()\n\n def get_equilibrium(self):\n return self.sem.get_equilibrium()\n\n def show(self):\n self.sem.show()\n\n def plot(self):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand()\n\n def plot_gui(self, name):\n self.sem.make_demand()\n self.sem.make_supply()\n self.sem.plot_supply_demand_gui(name)",
"step-ids": [
17,
18,
24,
27,
28
]
}
|
[
17,
18,
24,
27,
28
] |
import sys
from collections import namedtuple
from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, \
QHBoxLayout, QStackedWidget, QListWidget, QListWidgetItem
from PyQt5.QtCore import Qt, QSize
from runWidget import RunWidget
from recordWidget import RecordWidget
def QListWidget_qss():
return '''
QListWidget{
outline: 0px;
}
QListWidget {
min-width: 30px;
max-width: 50px;
color: Black;
background: #CCCCCC;
}
QListWidget::Item:selected {
background: #888888;
border-left: 5px solid red;
}
HistoryPanel:hover {
background: rgb(52, 52, 52);
}
'''
class MainCentralWidget(QWidget):
def __init__(self):
super().__init__()
tab_bar = self.getTabBar(('录制', '运行'))
tab_page = self.getTabPage()
tab_bar.currentRowChanged.connect(tab_page.setCurrentIndex)
hbox = QHBoxLayout(spacing=0)
hbox.setContentsMargins(0, 0, 0, 0)
hbox.addWidget(tab_bar)
hbox.addWidget(tab_page)
self.setLayout(hbox)
def getTabBar(self, names):
tab_bar = QListWidget()
tab_bar.setStyleSheet(QListWidget_qss())
tab_bar.setFrameShape(QListWidget.NoFrame)
tab_bar.setItemAlignment(Qt.AlignCenter)
tab_bar.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
for name in names:
item = QListWidgetItem(name)
item.setTextAlignment(Qt.AlignCenter)
item.setSizeHint(QSize(50, 50))
tab_bar.addItem(item)
tab_bar.setCurrentRow(0)
return tab_bar
def getTabPage(self):
tab_page = QStackedWidget()
tab_page.addWidget(RecordWidget())
tab_page.addWidget(RunWidget())
return tab_page
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.setGeometry(50, 50, 900, 300)
self.setWindowTitle('AutoMouse')
self.setCentralWidget(MainCentralWidget())
self.show()
if __name__ == '__main__':
app = QApplication(sys.argv)
main_window = MainWindow()
sys.exit(app.exec_())
|
normal
|
{
"blob_id": "252a6b97f108b7fdc165ccb2a7f61ce31f129d3d",
"index": 8693,
"step-1": "<mask token>\n\n\nclass MainCentralWidget(QWidget):\n\n def __init__(self):\n super().__init__()\n tab_bar = self.getTabBar(('录制', '运行'))\n tab_page = self.getTabPage()\n tab_bar.currentRowChanged.connect(tab_page.setCurrentIndex)\n hbox = QHBoxLayout(spacing=0)\n hbox.setContentsMargins(0, 0, 0, 0)\n hbox.addWidget(tab_bar)\n hbox.addWidget(tab_page)\n self.setLayout(hbox)\n <mask token>\n\n def getTabPage(self):\n tab_page = QStackedWidget()\n tab_page.addWidget(RecordWidget())\n tab_page.addWidget(RunWidget())\n return tab_page\n\n\nclass MainWindow(QMainWindow):\n\n def __init__(self):\n super().__init__()\n self.setGeometry(50, 50, 900, 300)\n self.setWindowTitle('AutoMouse')\n self.setCentralWidget(MainCentralWidget())\n self.show()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MainCentralWidget(QWidget):\n\n def __init__(self):\n super().__init__()\n tab_bar = self.getTabBar(('录制', '运行'))\n tab_page = self.getTabPage()\n tab_bar.currentRowChanged.connect(tab_page.setCurrentIndex)\n hbox = QHBoxLayout(spacing=0)\n hbox.setContentsMargins(0, 0, 0, 0)\n hbox.addWidget(tab_bar)\n hbox.addWidget(tab_page)\n self.setLayout(hbox)\n\n def getTabBar(self, names):\n tab_bar = QListWidget()\n tab_bar.setStyleSheet(QListWidget_qss())\n tab_bar.setFrameShape(QListWidget.NoFrame)\n tab_bar.setItemAlignment(Qt.AlignCenter)\n tab_bar.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)\n for name in names:\n item = QListWidgetItem(name)\n item.setTextAlignment(Qt.AlignCenter)\n item.setSizeHint(QSize(50, 50))\n tab_bar.addItem(item)\n tab_bar.setCurrentRow(0)\n return tab_bar\n\n def getTabPage(self):\n tab_page = QStackedWidget()\n tab_page.addWidget(RecordWidget())\n tab_page.addWidget(RunWidget())\n return tab_page\n\n\nclass MainWindow(QMainWindow):\n\n def __init__(self):\n super().__init__()\n self.setGeometry(50, 50, 900, 300)\n self.setWindowTitle('AutoMouse')\n self.setCentralWidget(MainCentralWidget())\n self.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef QListWidget_qss():\n return \"\"\"\n QListWidget{\n outline: 0px;\n }\n\n QListWidget {\n min-width: 30px;\n max-width: 50px;\n color: Black;\n background: #CCCCCC;\n }\n\n QListWidget::Item:selected {\n background: #888888;\n border-left: 5px solid red;\n }\n HistoryPanel:hover {\n background: rgb(52, 52, 52);\n }\n \"\"\"\n\n\nclass MainCentralWidget(QWidget):\n\n def __init__(self):\n super().__init__()\n tab_bar = self.getTabBar(('录制', '运行'))\n tab_page = self.getTabPage()\n tab_bar.currentRowChanged.connect(tab_page.setCurrentIndex)\n hbox = QHBoxLayout(spacing=0)\n hbox.setContentsMargins(0, 0, 0, 0)\n hbox.addWidget(tab_bar)\n hbox.addWidget(tab_page)\n self.setLayout(hbox)\n\n def getTabBar(self, names):\n tab_bar = QListWidget()\n tab_bar.setStyleSheet(QListWidget_qss())\n tab_bar.setFrameShape(QListWidget.NoFrame)\n tab_bar.setItemAlignment(Qt.AlignCenter)\n tab_bar.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)\n for name in names:\n item = QListWidgetItem(name)\n item.setTextAlignment(Qt.AlignCenter)\n item.setSizeHint(QSize(50, 50))\n tab_bar.addItem(item)\n tab_bar.setCurrentRow(0)\n return tab_bar\n\n def getTabPage(self):\n tab_page = QStackedWidget()\n tab_page.addWidget(RecordWidget())\n tab_page.addWidget(RunWidget())\n return tab_page\n\n\nclass MainWindow(QMainWindow):\n\n def __init__(self):\n super().__init__()\n self.setGeometry(50, 50, 900, 300)\n self.setWindowTitle('AutoMouse')\n self.setCentralWidget(MainCentralWidget())\n self.show()\n\n\n<mask token>\n",
"step-4": "import sys\nfrom collections import namedtuple\nfrom PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, QHBoxLayout, QStackedWidget, QListWidget, QListWidgetItem\nfrom PyQt5.QtCore import Qt, QSize\nfrom runWidget import RunWidget\nfrom recordWidget import RecordWidget\n\n\ndef QListWidget_qss():\n return \"\"\"\n QListWidget{\n outline: 0px;\n }\n\n QListWidget {\n min-width: 30px;\n max-width: 50px;\n color: Black;\n background: #CCCCCC;\n }\n\n QListWidget::Item:selected {\n background: #888888;\n border-left: 5px solid red;\n }\n HistoryPanel:hover {\n background: rgb(52, 52, 52);\n }\n \"\"\"\n\n\nclass MainCentralWidget(QWidget):\n\n def __init__(self):\n super().__init__()\n tab_bar = self.getTabBar(('录制', '运行'))\n tab_page = self.getTabPage()\n tab_bar.currentRowChanged.connect(tab_page.setCurrentIndex)\n hbox = QHBoxLayout(spacing=0)\n hbox.setContentsMargins(0, 0, 0, 0)\n hbox.addWidget(tab_bar)\n hbox.addWidget(tab_page)\n self.setLayout(hbox)\n\n def getTabBar(self, names):\n tab_bar = QListWidget()\n tab_bar.setStyleSheet(QListWidget_qss())\n tab_bar.setFrameShape(QListWidget.NoFrame)\n tab_bar.setItemAlignment(Qt.AlignCenter)\n tab_bar.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)\n for name in names:\n item = QListWidgetItem(name)\n item.setTextAlignment(Qt.AlignCenter)\n item.setSizeHint(QSize(50, 50))\n tab_bar.addItem(item)\n tab_bar.setCurrentRow(0)\n return tab_bar\n\n def getTabPage(self):\n tab_page = QStackedWidget()\n tab_page.addWidget(RecordWidget())\n tab_page.addWidget(RunWidget())\n return tab_page\n\n\nclass MainWindow(QMainWindow):\n\n def __init__(self):\n super().__init__()\n self.setGeometry(50, 50, 900, 300)\n self.setWindowTitle('AutoMouse')\n self.setCentralWidget(MainCentralWidget())\n self.show()\n\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n main_window = MainWindow()\n sys.exit(app.exec_())\n",
"step-5": "import sys\nfrom collections import namedtuple\nfrom PyQt5.QtWidgets import QApplication, QWidget, QMainWindow, \\\n QHBoxLayout, QStackedWidget, QListWidget, QListWidgetItem\nfrom PyQt5.QtCore import Qt, QSize\n\nfrom runWidget import RunWidget\nfrom recordWidget import RecordWidget\n\n\ndef QListWidget_qss():\n return '''\n QListWidget{\n outline: 0px;\n }\n\n QListWidget {\n min-width: 30px;\n max-width: 50px;\n color: Black;\n background: #CCCCCC;\n }\n\n QListWidget::Item:selected {\n background: #888888;\n border-left: 5px solid red;\n }\n HistoryPanel:hover {\n background: rgb(52, 52, 52);\n }\n '''\n\n\nclass MainCentralWidget(QWidget):\n def __init__(self):\n super().__init__()\n tab_bar = self.getTabBar(('录制', '运行'))\n tab_page = self.getTabPage()\n tab_bar.currentRowChanged.connect(tab_page.setCurrentIndex)\n hbox = QHBoxLayout(spacing=0)\n hbox.setContentsMargins(0, 0, 0, 0)\n hbox.addWidget(tab_bar)\n hbox.addWidget(tab_page)\n self.setLayout(hbox)\n \n def getTabBar(self, names):\n tab_bar = QListWidget()\n tab_bar.setStyleSheet(QListWidget_qss())\n tab_bar.setFrameShape(QListWidget.NoFrame)\n tab_bar.setItemAlignment(Qt.AlignCenter)\n tab_bar.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)\n for name in names:\n item = QListWidgetItem(name)\n item.setTextAlignment(Qt.AlignCenter)\n item.setSizeHint(QSize(50, 50))\n tab_bar.addItem(item)\n tab_bar.setCurrentRow(0)\n return tab_bar\n\n def getTabPage(self):\n tab_page = QStackedWidget()\n tab_page.addWidget(RecordWidget())\n tab_page.addWidget(RunWidget())\n return tab_page\n\n\nclass MainWindow(QMainWindow):\n def __init__(self):\n super().__init__()\n self.setGeometry(50, 50, 900, 300)\n self.setWindowTitle('AutoMouse')\n self.setCentralWidget(MainCentralWidget())\n self.show()\n\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n main_window = MainWindow()\n sys.exit(app.exec_())",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
from quiz.schema.base import Schema
from quiz.schema.schemas import UserSchemas
class RegisterSchema(Schema):
"""
注册
"""
_schema = UserSchemas.REG_SCHEMA.value
class LoginSchema(Schema):
"""
登录
"""
_schema = UserSchemas.LOGIN_SCHEMA.value
|
normal
|
{
"blob_id": "e0d7fb8a9799c91dca0ca0827a5149804c9efabb",
"index": 7082,
"step-1": "<mask token>\n\n\nclass RegisterSchema(Schema):\n <mask token>\n <mask token>\n\n\nclass LoginSchema(Schema):\n \"\"\"\n 登录\n \"\"\"\n _schema = UserSchemas.LOGIN_SCHEMA.value\n",
"step-2": "<mask token>\n\n\nclass RegisterSchema(Schema):\n <mask token>\n _schema = UserSchemas.REG_SCHEMA.value\n\n\nclass LoginSchema(Schema):\n \"\"\"\n 登录\n \"\"\"\n _schema = UserSchemas.LOGIN_SCHEMA.value\n",
"step-3": "<mask token>\n\n\nclass RegisterSchema(Schema):\n \"\"\"\n 注册\n \"\"\"\n _schema = UserSchemas.REG_SCHEMA.value\n\n\nclass LoginSchema(Schema):\n \"\"\"\n 登录\n \"\"\"\n _schema = UserSchemas.LOGIN_SCHEMA.value\n",
"step-4": "from quiz.schema.base import Schema\nfrom quiz.schema.schemas import UserSchemas\n\n\nclass RegisterSchema(Schema):\n \"\"\"\n 注册\n \"\"\"\n _schema = UserSchemas.REG_SCHEMA.value\n\n\nclass LoginSchema(Schema):\n \"\"\"\n 登录\n \"\"\"\n _schema = UserSchemas.LOGIN_SCHEMA.value\n",
"step-5": null,
"step-ids": [
4,
5,
6,
7
]
}
|
[
4,
5,
6,
7
] |
'''
8-6. 도시 이름
도시와 국가 이름을 받는 city_country() 함수를 만드세요. 이 함수는 다음과 같은 문자열을 반환해야 합니다.
'Santiago, Chile'
- 최소한 세 개의 도시-국가 쌍으로 함수를 호출하고 반환값을 출력하세요.
Output:
santiago, chile
ushuaia, argentina
longyearbyen, svalbard
'''
|
normal
|
{
"blob_id": "2d5abcd75dcbeb1baa3f387035bdcc3b7adbfe3f",
"index": 7856,
"step-1": "<mask token>\n",
"step-2": "'''\n8-6. 도시 이름\n도시와 국가 이름을 받는 city_country() 함수를 만드세요. 이 함수는 다음과 같은 문자열을 반환해야 합니다.\n'Santiago, Chile'\n- 최소한 세 개의 도시-국가 쌍으로 함수를 호출하고 반환값을 출력하세요.\n\nOutput:\nsantiago, chile\nushuaia, argentina\nlongyearbyen, svalbard\n'''\n\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from sqlalchemy import literal, Column, String, Integer, ForeignKey
from sqlalchemy.orm import relationship
from common.db import Base
class Airplane(Base):
__tablename__ = 'airplanes'
id = Column(Integer, primary_key=True)
icao_code = Column(String(6), unique=True, nullable=False) # ICAO 24-bit identifier
airline_id = Column(Integer, ForeignKey('airlines.id'))
airline = relationship('Airline', backref='airplanes')
manufacturer = Column(String)
model = Column(String)
def __init__(self, icao_code, airline, manufacturer=None, model=None):
self.icao_code = icao_code
self.airline = airline
self.manufacturer = manufacturer
self.model = model
def __repr__(self):
return 'Airplane({icao_code}, {airline})'.format(
icao_code=self.icao_code,
airline=self.airline)
@staticmethod
def exists_airplane(session, icao_code):
q = session.query(Airplane).filter(Airplane.icao_code == icao_code)
return session.query(literal(True)).filter(q.exists()).scalar()
@staticmethod
def airplane_from_icao_code(session, icao_code):
return session.query(Airplane).filter(Airplane.icao_code == icao_code).first()
|
normal
|
{
"blob_id": "98dac1ea372f16ecdb818fbe3287ab7e51a0d67c",
"index": 7916,
"step-1": "<mask token>\n\n\nclass Airplane(Base):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, icao_code, airline, manufacturer=None, model=None):\n self.icao_code = icao_code\n self.airline = airline\n self.manufacturer = manufacturer\n self.model = model\n <mask token>\n\n @staticmethod\n def exists_airplane(session, icao_code):\n q = session.query(Airplane).filter(Airplane.icao_code == icao_code)\n return session.query(literal(True)).filter(q.exists()).scalar()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Airplane(Base):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, icao_code, airline, manufacturer=None, model=None):\n self.icao_code = icao_code\n self.airline = airline\n self.manufacturer = manufacturer\n self.model = model\n <mask token>\n\n @staticmethod\n def exists_airplane(session, icao_code):\n q = session.query(Airplane).filter(Airplane.icao_code == icao_code)\n return session.query(literal(True)).filter(q.exists()).scalar()\n\n @staticmethod\n def airplane_from_icao_code(session, icao_code):\n return session.query(Airplane).filter(Airplane.icao_code == icao_code\n ).first()\n",
"step-3": "<mask token>\n\n\nclass Airplane(Base):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, icao_code, airline, manufacturer=None, model=None):\n self.icao_code = icao_code\n self.airline = airline\n self.manufacturer = manufacturer\n self.model = model\n\n def __repr__(self):\n return 'Airplane({icao_code}, {airline})'.format(icao_code=self.\n icao_code, airline=self.airline)\n\n @staticmethod\n def exists_airplane(session, icao_code):\n q = session.query(Airplane).filter(Airplane.icao_code == icao_code)\n return session.query(literal(True)).filter(q.exists()).scalar()\n\n @staticmethod\n def airplane_from_icao_code(session, icao_code):\n return session.query(Airplane).filter(Airplane.icao_code == icao_code\n ).first()\n",
"step-4": "from sqlalchemy import literal, Column, String, Integer, ForeignKey\nfrom sqlalchemy.orm import relationship\nfrom common.db import Base\n\n\nclass Airplane(Base):\n __tablename__ = 'airplanes'\n id = Column(Integer, primary_key=True)\n icao_code = Column(String(6), unique=True, nullable=False)\n airline_id = Column(Integer, ForeignKey('airlines.id'))\n airline = relationship('Airline', backref='airplanes')\n manufacturer = Column(String)\n model = Column(String)\n\n def __init__(self, icao_code, airline, manufacturer=None, model=None):\n self.icao_code = icao_code\n self.airline = airline\n self.manufacturer = manufacturer\n self.model = model\n\n def __repr__(self):\n return 'Airplane({icao_code}, {airline})'.format(icao_code=self.\n icao_code, airline=self.airline)\n\n @staticmethod\n def exists_airplane(session, icao_code):\n q = session.query(Airplane).filter(Airplane.icao_code == icao_code)\n return session.query(literal(True)).filter(q.exists()).scalar()\n\n @staticmethod\n def airplane_from_icao_code(session, icao_code):\n return session.query(Airplane).filter(Airplane.icao_code == icao_code\n ).first()\n",
"step-5": "from sqlalchemy import literal, Column, String, Integer, ForeignKey\nfrom sqlalchemy.orm import relationship\nfrom common.db import Base\n\n\nclass Airplane(Base):\n __tablename__ = 'airplanes'\n\n id = Column(Integer, primary_key=True)\n icao_code = Column(String(6), unique=True, nullable=False) # ICAO 24-bit identifier\n airline_id = Column(Integer, ForeignKey('airlines.id'))\n airline = relationship('Airline', backref='airplanes')\n manufacturer = Column(String)\n model = Column(String)\n\n def __init__(self, icao_code, airline, manufacturer=None, model=None):\n self.icao_code = icao_code\n self.airline = airline\n self.manufacturer = manufacturer\n self.model = model\n\n def __repr__(self):\n return 'Airplane({icao_code}, {airline})'.format(\n icao_code=self.icao_code,\n airline=self.airline)\n\n @staticmethod\n def exists_airplane(session, icao_code):\n q = session.query(Airplane).filter(Airplane.icao_code == icao_code)\n return session.query(literal(True)).filter(q.exists()).scalar()\n\n @staticmethod\n def airplane_from_icao_code(session, icao_code):\n return session.query(Airplane).filter(Airplane.icao_code == icao_code).first()\n \n ",
"step-ids": [
3,
4,
5,
7,
8
]
}
|
[
3,
4,
5,
7,
8
] |
# Create your views here.
from django.shortcuts import render_to_response, Http404, render
from django.template import RequestContext
from books.models import Book
from django.http import HttpResponse, HttpResponseRedirect
import urllib, urllib2
import json
def incr_reads(request, book_id):
if request.POST:
try:
readers = Book.objects.get(id=book_id).incr_reads()
return HttpResponse(readers)
except Book.DoesNotExist:
pass
return HttpResponse('FAILED')
def index(request):
'''
No processing, should use direct to template.
'''
return render_to_response('index.html', {}, context_instance=RequestContext(request))
def search(request):
if request.GET and 'q' in request.GET:
b = Book.search.query(request.GET['q'])
return render_to_response('books/book_list.html', {'object_list':b}, context_instance=RequestContext(request))
def suggest_image(request, book_id):
'''
So this is a helper view for staff to update the picture.
'''
b = Book.objects.get(id=book_id)
_img = b.get_image_suggestions(first=False)
return render_to_response('books/image_suggestor.html', {'images':_img, 'book':b}, context_instance=RequestContext(request))
|
normal
|
{
"blob_id": "bcbcb4ea3a3b8b5c11e9b107103418ae79a3921c",
"index": 3628,
"step-1": "<mask token>\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\n<mask token>\n\n\ndef suggest_image(request, book_id):\n \"\"\"\n So this is a helper view for staff to update the picture.\n \"\"\"\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images': _img,\n 'book': b}, context_instance=RequestContext(request))\n",
"step-3": "<mask token>\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\ndef search(request):\n if request.GET and 'q' in request.GET:\n b = Book.search.query(request.GET['q'])\n return render_to_response('books/book_list.html', {'object_list': b},\n context_instance=RequestContext(request))\n\n\ndef suggest_image(request, book_id):\n \"\"\"\n So this is a helper view for staff to update the picture.\n \"\"\"\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images': _img,\n 'book': b}, context_instance=RequestContext(request))\n",
"step-4": "from django.shortcuts import render_to_response, Http404, render\nfrom django.template import RequestContext\nfrom books.models import Book\nfrom django.http import HttpResponse, HttpResponseRedirect\nimport urllib, urllib2\nimport json\n\n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\n\ndef index(request):\n \"\"\"\n No processing, should use direct to template.\n \"\"\"\n return render_to_response('index.html', {}, context_instance=\n RequestContext(request))\n\n\ndef search(request):\n if request.GET and 'q' in request.GET:\n b = Book.search.query(request.GET['q'])\n return render_to_response('books/book_list.html', {'object_list': b},\n context_instance=RequestContext(request))\n\n\ndef suggest_image(request, book_id):\n \"\"\"\n So this is a helper view for staff to update the picture.\n \"\"\"\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images': _img,\n 'book': b}, context_instance=RequestContext(request))\n",
"step-5": "# Create your views here.\nfrom django.shortcuts import render_to_response, Http404, render\nfrom django.template import RequestContext\nfrom books.models import Book\nfrom django.http import HttpResponse, HttpResponseRedirect\nimport urllib, urllib2\nimport json \n\ndef incr_reads(request, book_id):\n if request.POST:\n try:\n readers = Book.objects.get(id=book_id).incr_reads()\n return HttpResponse(readers)\n except Book.DoesNotExist:\n pass\n return HttpResponse('FAILED')\n\ndef index(request):\n '''\n No processing, should use direct to template.\n '''\n return render_to_response('index.html', {}, context_instance=RequestContext(request))\n\ndef search(request):\n if request.GET and 'q' in request.GET:\n b = Book.search.query(request.GET['q'])\n return render_to_response('books/book_list.html', {'object_list':b}, context_instance=RequestContext(request))\n\ndef suggest_image(request, book_id):\n '''\n So this is a helper view for staff to update the picture.\n '''\n b = Book.objects.get(id=book_id)\n _img = b.get_image_suggestions(first=False)\n return render_to_response('books/image_suggestor.html', {'images':_img, 'book':b}, context_instance=RequestContext(request))\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from sikuli import *
import logging
import myTools
from datetime import date
import reports_Compare
#---------------------------------------------------#
def fSet_BillDate(pMonth):
#---------------------------------------------------#
if pMonth == 13:
pMonth = 12
logging.debug('- change bill date: ' + str(pMonth) + "/27/" + Settings.dataYear)
time.sleep(1)
# make sure timeslips has focus
myTools.getFocus()
# open revise date
type("b",KeyModifier.ALT)
type("d")
time.sleep(2)
# go to today
type("t")
#get to 01/01 of current year
type(Key.HOME,KeyModifier.CTRL)
# get to 01/01 of the data year
thisYear = date.today().year
for prevYear in range(int(Settings.dataYear),thisYear):
type(Key.PAGE_UP,KeyModifier.CTRL)
time.sleep(1)
# get to 01/27 of the data year
myTools.pressDOWN(4)
myTools.pressLEFT(2)
for nextMonth in range(pMonth-1):
type(Key.PAGE_DOWN)
time.sleep(1)
type(Key.ENTER)
time.sleep(1)
#---------------------------------------------------#
def fRemove_Sort():
#---------------------------------------------------#
time.sleep(1)
logging.debug('- remove sort')
type(Key.F6)
time.sleep(1)
click(Pattern("remove_sort-1.png").similar(0.80))
time.sleep(1)
type(Key.F6)
time.sleep(1)
#---------------------------------------------------#
def fPrint_BillRun(pMonth):
#---------------------------------------------------#
reportName = "Bill-" + myTools.padZero(pMonth) + "-" + Settings.tsVersion + ".txt"
logging.debug('fPrint_BillRun: ' + reportName)
type("b",KeyModifier.CTRL)
time.sleep(1)
fRemove_Sort()
myTools.enterSlipFilter(pMonth,"n")
# print bills to text
logging.debug('-- print')
type(Key.ENTER)
time.sleep(1)
# fill in path and name; press ENTER
type(Settings.repFolder + "\\" + reportName)
time.sleep(1)
type(Key.ENTER)
time.sleep(1)
if exists("replace_msg.png"):
type("y")
# approve bills
logging.debug('-- approve')
wait(Pattern("approve_bills-1.png").targetOffset(-100,-8),FOREVER)
click(Pattern("approve_bills-1.png").targetOffset(-100,-8))
type(Key.ENTER)
time.sleep(3)
if int(Settings.tsVersion) > 2015:
wait("approving_bills.png",FOREVER)
while exists("approving_bills.png"):
logging.debug('--- msg exists')
time.sleep(2)
else:
waitVanish("approving_statusbar.png",FOREVER)
time.sleep(1)
# compare the report with baseline
reports_Compare.Compare_OneReport(reportName)
# close report entry / don't save
logging.debug('-- close report window')
click("report_generate_bills.png")
type(Key.F4,KeyModifier.CTRL)
time.sleep(2)
type("n")
time.sleep(1)
#---------------------------------------------------#
def fPrint_Bills(pMonth):
#---------------------------------------------------#
myTools.sectionStartTimeStamp("bills" + str(pMonth))
logging.debug('Print_Bills: ' + str(pMonth))
fSet_BillDate(pMonth)
fPrint_BillRun(pMonth)
myTools.sectionEndTimeStamp()
|
normal
|
{
"blob_id": "69721dca0f5d8396e330696cde52bfabad33c895",
"index": 3242,
"step-1": "<mask token>\n\n\ndef fSet_BillDate(pMonth):\n if pMonth == 13:\n pMonth = 12\n logging.debug('- change bill date: ' + str(pMonth) + '/27/' + Settings.\n dataYear)\n time.sleep(1)\n myTools.getFocus()\n type('b', KeyModifier.ALT)\n type('d')\n time.sleep(2)\n type('t')\n type(Key.HOME, KeyModifier.CTRL)\n thisYear = date.today().year\n for prevYear in range(int(Settings.dataYear), thisYear):\n type(Key.PAGE_UP, KeyModifier.CTRL)\n time.sleep(1)\n myTools.pressDOWN(4)\n myTools.pressLEFT(2)\n for nextMonth in range(pMonth - 1):\n type(Key.PAGE_DOWN)\n time.sleep(1)\n type(Key.ENTER)\n time.sleep(1)\n\n\ndef fRemove_Sort():\n time.sleep(1)\n logging.debug('- remove sort')\n type(Key.F6)\n time.sleep(1)\n click(Pattern('remove_sort-1.png').similar(0.8))\n time.sleep(1)\n type(Key.F6)\n time.sleep(1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fSet_BillDate(pMonth):\n if pMonth == 13:\n pMonth = 12\n logging.debug('- change bill date: ' + str(pMonth) + '/27/' + Settings.\n dataYear)\n time.sleep(1)\n myTools.getFocus()\n type('b', KeyModifier.ALT)\n type('d')\n time.sleep(2)\n type('t')\n type(Key.HOME, KeyModifier.CTRL)\n thisYear = date.today().year\n for prevYear in range(int(Settings.dataYear), thisYear):\n type(Key.PAGE_UP, KeyModifier.CTRL)\n time.sleep(1)\n myTools.pressDOWN(4)\n myTools.pressLEFT(2)\n for nextMonth in range(pMonth - 1):\n type(Key.PAGE_DOWN)\n time.sleep(1)\n type(Key.ENTER)\n time.sleep(1)\n\n\ndef fRemove_Sort():\n time.sleep(1)\n logging.debug('- remove sort')\n type(Key.F6)\n time.sleep(1)\n click(Pattern('remove_sort-1.png').similar(0.8))\n time.sleep(1)\n type(Key.F6)\n time.sleep(1)\n\n\n<mask token>\n\n\ndef fPrint_Bills(pMonth):\n myTools.sectionStartTimeStamp('bills' + str(pMonth))\n logging.debug('Print_Bills: ' + str(pMonth))\n fSet_BillDate(pMonth)\n fPrint_BillRun(pMonth)\n myTools.sectionEndTimeStamp()\n",
"step-3": "<mask token>\n\n\ndef fSet_BillDate(pMonth):\n if pMonth == 13:\n pMonth = 12\n logging.debug('- change bill date: ' + str(pMonth) + '/27/' + Settings.\n dataYear)\n time.sleep(1)\n myTools.getFocus()\n type('b', KeyModifier.ALT)\n type('d')\n time.sleep(2)\n type('t')\n type(Key.HOME, KeyModifier.CTRL)\n thisYear = date.today().year\n for prevYear in range(int(Settings.dataYear), thisYear):\n type(Key.PAGE_UP, KeyModifier.CTRL)\n time.sleep(1)\n myTools.pressDOWN(4)\n myTools.pressLEFT(2)\n for nextMonth in range(pMonth - 1):\n type(Key.PAGE_DOWN)\n time.sleep(1)\n type(Key.ENTER)\n time.sleep(1)\n\n\ndef fRemove_Sort():\n time.sleep(1)\n logging.debug('- remove sort')\n type(Key.F6)\n time.sleep(1)\n click(Pattern('remove_sort-1.png').similar(0.8))\n time.sleep(1)\n type(Key.F6)\n time.sleep(1)\n\n\ndef fPrint_BillRun(pMonth):\n reportName = 'Bill-' + myTools.padZero(pMonth\n ) + '-' + Settings.tsVersion + '.txt'\n logging.debug('fPrint_BillRun: ' + reportName)\n type('b', KeyModifier.CTRL)\n time.sleep(1)\n fRemove_Sort()\n myTools.enterSlipFilter(pMonth, 'n')\n logging.debug('-- print')\n type(Key.ENTER)\n time.sleep(1)\n type(Settings.repFolder + '\\\\' + reportName)\n time.sleep(1)\n type(Key.ENTER)\n time.sleep(1)\n if exists('replace_msg.png'):\n type('y')\n logging.debug('-- approve')\n wait(Pattern('approve_bills-1.png').targetOffset(-100, -8), FOREVER)\n click(Pattern('approve_bills-1.png').targetOffset(-100, -8))\n type(Key.ENTER)\n time.sleep(3)\n if int(Settings.tsVersion) > 2015:\n wait('approving_bills.png', FOREVER)\n while exists('approving_bills.png'):\n logging.debug('--- msg exists')\n time.sleep(2)\n else:\n waitVanish('approving_statusbar.png', FOREVER)\n time.sleep(1)\n reports_Compare.Compare_OneReport(reportName)\n logging.debug('-- close report window')\n click('report_generate_bills.png')\n type(Key.F4, KeyModifier.CTRL)\n time.sleep(2)\n type('n')\n time.sleep(1)\n\n\ndef fPrint_Bills(pMonth):\n myTools.sectionStartTimeStamp('bills' + str(pMonth))\n logging.debug('Print_Bills: ' + str(pMonth))\n fSet_BillDate(pMonth)\n fPrint_BillRun(pMonth)\n myTools.sectionEndTimeStamp()\n",
"step-4": "from sikuli import *\nimport logging\nimport myTools\nfrom datetime import date\nimport reports_Compare\n\n\ndef fSet_BillDate(pMonth):\n if pMonth == 13:\n pMonth = 12\n logging.debug('- change bill date: ' + str(pMonth) + '/27/' + Settings.\n dataYear)\n time.sleep(1)\n myTools.getFocus()\n type('b', KeyModifier.ALT)\n type('d')\n time.sleep(2)\n type('t')\n type(Key.HOME, KeyModifier.CTRL)\n thisYear = date.today().year\n for prevYear in range(int(Settings.dataYear), thisYear):\n type(Key.PAGE_UP, KeyModifier.CTRL)\n time.sleep(1)\n myTools.pressDOWN(4)\n myTools.pressLEFT(2)\n for nextMonth in range(pMonth - 1):\n type(Key.PAGE_DOWN)\n time.sleep(1)\n type(Key.ENTER)\n time.sleep(1)\n\n\ndef fRemove_Sort():\n time.sleep(1)\n logging.debug('- remove sort')\n type(Key.F6)\n time.sleep(1)\n click(Pattern('remove_sort-1.png').similar(0.8))\n time.sleep(1)\n type(Key.F6)\n time.sleep(1)\n\n\ndef fPrint_BillRun(pMonth):\n reportName = 'Bill-' + myTools.padZero(pMonth\n ) + '-' + Settings.tsVersion + '.txt'\n logging.debug('fPrint_BillRun: ' + reportName)\n type('b', KeyModifier.CTRL)\n time.sleep(1)\n fRemove_Sort()\n myTools.enterSlipFilter(pMonth, 'n')\n logging.debug('-- print')\n type(Key.ENTER)\n time.sleep(1)\n type(Settings.repFolder + '\\\\' + reportName)\n time.sleep(1)\n type(Key.ENTER)\n time.sleep(1)\n if exists('replace_msg.png'):\n type('y')\n logging.debug('-- approve')\n wait(Pattern('approve_bills-1.png').targetOffset(-100, -8), FOREVER)\n click(Pattern('approve_bills-1.png').targetOffset(-100, -8))\n type(Key.ENTER)\n time.sleep(3)\n if int(Settings.tsVersion) > 2015:\n wait('approving_bills.png', FOREVER)\n while exists('approving_bills.png'):\n logging.debug('--- msg exists')\n time.sleep(2)\n else:\n waitVanish('approving_statusbar.png', FOREVER)\n time.sleep(1)\n reports_Compare.Compare_OneReport(reportName)\n logging.debug('-- close report window')\n click('report_generate_bills.png')\n type(Key.F4, KeyModifier.CTRL)\n time.sleep(2)\n type('n')\n time.sleep(1)\n\n\ndef fPrint_Bills(pMonth):\n myTools.sectionStartTimeStamp('bills' + str(pMonth))\n logging.debug('Print_Bills: ' + str(pMonth))\n fSet_BillDate(pMonth)\n fPrint_BillRun(pMonth)\n myTools.sectionEndTimeStamp()\n",
"step-5": "from sikuli import *\nimport logging\nimport myTools\nfrom datetime import date\nimport reports_Compare\n\n#---------------------------------------------------#\ndef fSet_BillDate(pMonth):\n#---------------------------------------------------#\n\n if pMonth == 13:\n pMonth = 12 \n\n logging.debug('- change bill date: ' + str(pMonth) + \"/27/\" + Settings.dataYear)\n time.sleep(1)\n\n # make sure timeslips has focus\n myTools.getFocus()\n\n # open revise date\n type(\"b\",KeyModifier.ALT)\n type(\"d\") \n time.sleep(2)\n\n # go to today\n type(\"t\")\n\n #get to 01/01 of current year\n type(Key.HOME,KeyModifier.CTRL) \n\n # get to 01/01 of the data year\n thisYear = date.today().year\n for prevYear in range(int(Settings.dataYear),thisYear):\n type(Key.PAGE_UP,KeyModifier.CTRL) \n time.sleep(1)\n\n # get to 01/27 of the data year\n myTools.pressDOWN(4)\n myTools.pressLEFT(2) \n\n for nextMonth in range(pMonth-1):\n type(Key.PAGE_DOWN) \n time.sleep(1)\n \n type(Key.ENTER)\n time.sleep(1) \n\n#---------------------------------------------------#\ndef fRemove_Sort():\n#---------------------------------------------------#\n\n time.sleep(1)\n logging.debug('- remove sort')\n \n type(Key.F6)\n time.sleep(1)\n\n click(Pattern(\"remove_sort-1.png\").similar(0.80))\n time.sleep(1)\n \n type(Key.F6)\n time.sleep(1)\n\n#---------------------------------------------------#\ndef fPrint_BillRun(pMonth):\n#---------------------------------------------------#\n \n reportName = \"Bill-\" + myTools.padZero(pMonth) + \"-\" + Settings.tsVersion + \".txt\" \n logging.debug('fPrint_BillRun: ' + reportName)\n\n type(\"b\",KeyModifier.CTRL)\n time.sleep(1)\n\n fRemove_Sort()\n myTools.enterSlipFilter(pMonth,\"n\")\n\n # print bills to text\n logging.debug('-- print') \n type(Key.ENTER) \n time.sleep(1)\n\n # fill in path and name; press ENTER\n type(Settings.repFolder + \"\\\\\" + reportName)\n time.sleep(1)\n type(Key.ENTER) \n time.sleep(1)\n\n if exists(\"replace_msg.png\"):\n type(\"y\")\n\n # approve bills\n logging.debug('-- approve') \n wait(Pattern(\"approve_bills-1.png\").targetOffset(-100,-8),FOREVER)\n click(Pattern(\"approve_bills-1.png\").targetOffset(-100,-8))\n type(Key.ENTER)\n time.sleep(3)\n\n if int(Settings.tsVersion) > 2015:\n wait(\"approving_bills.png\",FOREVER) \n while exists(\"approving_bills.png\"):\n logging.debug('--- msg exists')\n time.sleep(2)\n else:\n waitVanish(\"approving_statusbar.png\",FOREVER) \n time.sleep(1)\n\n # compare the report with baseline\n reports_Compare.Compare_OneReport(reportName)\n\n # close report entry / don't save\n logging.debug('-- close report window')\n click(\"report_generate_bills.png\")\n type(Key.F4,KeyModifier.CTRL)\n time.sleep(2)\n type(\"n\") \n time.sleep(1)\n\n#---------------------------------------------------#\ndef fPrint_Bills(pMonth):\n#---------------------------------------------------#\n\n myTools.sectionStartTimeStamp(\"bills\" + str(pMonth))\n logging.debug('Print_Bills: ' + str(pMonth))\n \n fSet_BillDate(pMonth)\n fPrint_BillRun(pMonth)\n myTools.sectionEndTimeStamp()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from decimal import Decimal
from django.conf import settings
from blood.models import Bank, Blood
class Cart(object):
def __init__(self, request):
self.session = request.session
cart = self.session.get(settings.CART_SESSION_ID)
if not cart:
cart = self.session[settings.CART_SESSION_ID] = {}
self.cart = cart
def add(self, blood, quantity=1, update_quantity=False):
blood_id = str(blood.id)
max_quantity = Blood.objects.get(id=blood.id).stock
if blood_id not in self.cart:
self.cart[blood_id] = {
'quantity': 0, 'price': str(blood.price)}
if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:
self.cart[blood_id]['quantity'] = quantity
elif int(self.cart[blood_id]['quantity']+quantity) <= max_quantity:
self.cart[blood_id]['quantity'] += quantity
self.save()
def save(self):
self.session[settings.CART_SESSION_ID] = self.cart
self.session.modified = True
def remove(self, blood):
blood_id = str(blood.id)
if blood_id in self.cart:
del self.cart[blood_id]
self.save()
def __iter__(self):
blood_ids = self.cart.keys()
bloods = Blood.objects.filter(id__in=blood_ids)
for blood in bloods:
self.cart[str(blood.id)]['blood'] = blood
for item in self.cart.values():
item['price'] = Decimal(item['price'])
item['total_price'] = item['price'] * item['quantity']
yield item
def __len__(self):
return sum(item['quantity'] for item in self.cart.values())
def get_total_price(self):
return sum(Decimal(item['price']) * item['quantity'] for item in self.cart.values())
def clear(self):
del self.session[settings.CART_SESSION_ID]
self.session.modified = True
|
normal
|
{
"blob_id": "a638504737d0069d4fa40b0fc5026203904563e8",
"index": 5537,
"step-1": "<mask token>\n\n\nclass Cart(object):\n <mask token>\n <mask token>\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n <mask token>\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Cart(object):\n <mask token>\n <mask token>\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n <mask token>\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-3": "<mask token>\n\n\nclass Cart(object):\n <mask token>\n\n def add(self, blood, quantity=1, update_quantity=False):\n blood_id = str(blood.id)\n max_quantity = Blood.objects.get(id=blood.id).stock\n if blood_id not in self.cart:\n self.cart[blood_id] = {'quantity': 0, 'price': str(blood.price)}\n if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:\n self.cart[blood_id]['quantity'] = quantity\n elif int(self.cart[blood_id]['quantity'] + quantity) <= max_quantity:\n self.cart[blood_id]['quantity'] += quantity\n self.save()\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n\n def remove(self, blood):\n blood_id = str(blood.id)\n if blood_id in self.cart:\n del self.cart[blood_id]\n self.save()\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-4": "<mask token>\n\n\nclass Cart(object):\n\n def __init__(self, request):\n self.session = request.session\n cart = self.session.get(settings.CART_SESSION_ID)\n if not cart:\n cart = self.session[settings.CART_SESSION_ID] = {}\n self.cart = cart\n\n def add(self, blood, quantity=1, update_quantity=False):\n blood_id = str(blood.id)\n max_quantity = Blood.objects.get(id=blood.id).stock\n if blood_id not in self.cart:\n self.cart[blood_id] = {'quantity': 0, 'price': str(blood.price)}\n if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:\n self.cart[blood_id]['quantity'] = quantity\n elif int(self.cart[blood_id]['quantity'] + quantity) <= max_quantity:\n self.cart[blood_id]['quantity'] += quantity\n self.save()\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n\n def remove(self, blood):\n blood_id = str(blood.id)\n if blood_id in self.cart:\n del self.cart[blood_id]\n self.save()\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in\n self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-5": "from decimal import Decimal\nfrom django.conf import settings\nfrom blood.models import Bank, Blood\n\n\nclass Cart(object):\n def __init__(self, request):\n self.session = request.session\n cart = self.session.get(settings.CART_SESSION_ID)\n if not cart:\n cart = self.session[settings.CART_SESSION_ID] = {}\n self.cart = cart\n\n def add(self, blood, quantity=1, update_quantity=False):\n blood_id = str(blood.id)\n max_quantity = Blood.objects.get(id=blood.id).stock\n if blood_id not in self.cart:\n self.cart[blood_id] = {\n 'quantity': 0, 'price': str(blood.price)}\n if update_quantity and self.cart[blood_id]['quantity'] <= max_quantity:\n self.cart[blood_id]['quantity'] = quantity\n elif int(self.cart[blood_id]['quantity']+quantity) <= max_quantity:\n self.cart[blood_id]['quantity'] += quantity\n self.save()\n\n def save(self):\n self.session[settings.CART_SESSION_ID] = self.cart\n self.session.modified = True\n\n def remove(self, blood):\n blood_id = str(blood.id)\n if blood_id in self.cart:\n del self.cart[blood_id]\n self.save()\n\n def __iter__(self):\n blood_ids = self.cart.keys()\n bloods = Blood.objects.filter(id__in=blood_ids)\n for blood in bloods:\n self.cart[str(blood.id)]['blood'] = blood\n\n for item in self.cart.values():\n item['price'] = Decimal(item['price'])\n item['total_price'] = item['price'] * item['quantity']\n yield item\n\n def __len__(self):\n return sum(item['quantity'] for item in self.cart.values())\n\n def get_total_price(self):\n return sum(Decimal(item['price']) * item['quantity'] for item in self.cart.values())\n\n def clear(self):\n del self.session[settings.CART_SESSION_ID]\n self.session.modified = True\n",
"step-ids": [
5,
6,
8,
9,
11
]
}
|
[
5,
6,
8,
9,
11
] |
from django.db import models
from accounts.models import User
from cmdb.models.base import IDC
from cmdb.models.asset import Server, NetDevice
class CPU(models.Model):
# Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz
version = models.CharField('型号版本', max_length=100, unique=True)
speed = models.PositiveSmallIntegerField('频率MHz')
process = models.PositiveSmallIntegerField('线程数')
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_cpu'
verbose_name = u'配件CPU表'
verbose_name_plural = u'配件CPU表'
class Memory(models.Model):
ram_type = models.CharField('内存类型', max_length=4, choices=(('ddr3', 'DDR3'), ('ddr4', 'DDR4'), ('ddr5', 'DDR5')))
ram_size = models.PositiveSmallIntegerField('内存容量(G)')
speed = models.PositiveSmallIntegerField('速率(MT/s)')
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_memory'
unique_together = ('ram_type', 'ram_size', 'speed')
verbose_name = u'配件内存表'
verbose_name_plural = u'配件内存表'
class Disk(models.Model):
device_type = models.CharField('硬盘类型', max_length=4, choices=(('sata', 'SATA'), ('sas', 'SAS'), ('ssd', 'SSD')))
capacity = models.PositiveSmallIntegerField('容量(G)')
rpm = models.PositiveSmallIntegerField('转率')
dimensions = models.CharField('尺寸(英寸)', max_length=3, choices=(('2.5', '2.5寸'), ('3.5', '3.5寸')))
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_disk'
unique_together = ('device_type', 'capacity', 'rpm', 'dimensions')
verbose_name = u'配件硬盘表'
verbose_name_plural = u'配件硬盘表'
class Caddy(models.Model):
caddy_dimensions = {
'2.5s': '2.5寸 R740', '2.5': '2.5寸', '3.5': '3.5寸'
}
dimensions = models.CharField('尺寸(英寸)', max_length=4, choices=caddy_dimensions.items(), unique=True)
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_caddy'
verbose_name = u'配件硬盘托架表'
verbose_name_plural = u'配件硬盘托架表'
class NetworkAdapter(models.Model):
speed = models.CharField('网卡速率', max_length=6, choices=(('100MbE', '百兆'), ('GbE', '千兆'), ('10GbE', '万兆')), unique=True)
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_network_adapter'
verbose_name = u'配件网卡表'
verbose_name_plural = u'配件网卡表'
class NetworkCable(models.Model):
cat = models.CharField('网线类型', max_length=2, choices=(('5', '5类线'), ('5e', '超5类线'), ('6', '6类线'), ('6e', '超6类线')))
length = models.PositiveSmallIntegerField('长度(米)')
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_network_cable'
unique_together = ('cat', 'length')
verbose_name = u'配件网线表'
verbose_name_plural = u'配件网线表'
class OpticalTransceiver(models.Model):
# Small form-factor pluggable transceiver 小型可插拔光模块
"""
Mfg. Compatibility: Cisco
Part Number: SFP-10G-LR-10pk
Form Factor: SFP+
TX Wavelength: 1310nm
Reach: 10km
Cable Type: SMF
Rate Category: 10GBase
Interface Type: LR
DDM: Yes
Connector Type: Dual-LC
"""
information = models.CharField('综述介绍', max_length=20, blank=True, null=True)
mode = models.CharField('模式', max_length=6, choices=(('single', '单模'), ('multi', '多模')))
reach = models.FloatField('最大传输距离(km)')
rate = models.CharField('传输速率', max_length=6, choices=(('100MbE', '百兆'), ('GbE', '千兆'), ('10GbE', '万兆')))
image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d', null=True, blank=True)
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_optical_transceiver'
unique_together = ('mode', 'reach', 'rate')
verbose_name = u'配件光模块表'
verbose_name_plural = u'配件光模块表'
class JumpWire(models.Model):
information = models.CharField('综述介绍', max_length=20, blank=True, null=True)
mode = models.CharField('模式', max_length=6, choices=(('single', '单模'), ('multi', '多模')))
interface = models.CharField('光纤接口', max_length=6, choices=(('lc', '小方头'), ('sc', '大方头'), ('fc', '圆头')))
length = models.PositiveSmallIntegerField('长度(米)')
image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d', null=True, blank=True)
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_jump_wire'
unique_together = ('mode', 'interface', 'length')
verbose_name = u'配件跳线表'
verbose_name_plural = u'配件跳线表'
accessory_item = {
'cpu': 'CPU', 'memory': '内存', 'disk': '硬盘', 'caddy': '硬盘托架', 'network_adapter': '网卡', 'network_cable': '网线',
'transceiver': '光模块', 'jump_wire': '跳线'
}
class Accessory(models.Model):
storehouse = models.ForeignKey(IDC, on_delete=models.CASCADE, help_text='仓库')
mode = models.CharField('配件类型', max_length=20, choices=accessory_item.items())
mode_id = models.IntegerField('配件型号表主键ID')
manufacturer = models.CharField('硬件制造商', max_length=20, blank=True, null=True)
sn = models.CharField('Serial Number', max_length=50, blank=True, null=True)
vendor = models.CharField('采购渠道(供应商)', max_length=20)
trade_date = models.DateField('采购时间', blank=True, null=True)
expired_date = models.DateField('过保时间', blank=True, null=True)
comment = models.CharField('备注', max_length=50, blank=True, null=True)
is_active = models.BooleanField('是否可用', default=True)
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_accessory'
verbose_name = u'配件详细表'
verbose_name_plural = u'配件详细表'
class UseRecord(models.Model):
"""
CPU、内存、硬盘、网卡、光模块 配件,需要知道被哪个资产使用
"""
accessory = models.ForeignKey(Accessory, on_delete=models.CASCADE, help_text='配件')
server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text='服务器', blank=True, null=True)
net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE, help_text='网络设备', blank=True, null=True)
operate = models.CharField('操作', max_length=7, choices=(('install', '安装'), ('remove', '取下')), default='install')
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_use_record'
verbose_name = u'配件使用记录表'
verbose_name_plural = u'配件使用记录表'
class InventoryRecord(models.Model):
accessory = models.CharField('配件', max_length=20, choices=accessory_item.items())
operate = models.CharField('操作', max_length=8, choices=(('purchase', '采购'), ('receive', '领用'), ('revert', '归还')))
server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text='服务器', blank=True, null=True)
net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE, help_text='网络设备', blank=True, null=True)
content = models.CharField('内容', max_length=250, blank=True, null=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='操作员')
created_date = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = 'cmdb_acc_inventory_record'
verbose_name = u'配件进货及消费记录表'
verbose_name_plural = u'配件进货及消费记录表'
|
normal
|
{
"blob_id": "6bd423223e1ec2bb3a213158ac6da3a6483b531f",
"index": 4914,
"step-1": "<mask token>\n\n\nclass NetworkAdapter(models.Model):\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'cmdb_acc_network_adapter'\n verbose_name = u'配件网卡表'\n verbose_name_plural = u'配件网卡表'\n\n\nclass NetworkCable(models.Model):\n cat = models.CharField('网线类型', max_length=2, choices=(('5', '5类线'), (\n '5e', '超5类线'), ('6', '6类线'), ('6e', '超6类线')))\n length = models.PositiveSmallIntegerField('长度(米)')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_network_cable'\n unique_together = 'cat', 'length'\n verbose_name = u'配件网线表'\n verbose_name_plural = u'配件网线表'\n\n\nclass OpticalTransceiver(models.Model):\n \"\"\"\n Mfg. Compatibility: Cisco\n Part Number: SFP-10G-LR-10pk\n Form Factor: SFP+\n TX Wavelength: 1310nm\n Reach: 10km\n Cable Type: SMF\n Rate Category: 10GBase\n Interface Type: LR\n DDM: Yes\n Connector Type: Dual-LC\n \"\"\"\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n reach = models.FloatField('最大传输距离(km)')\n rate = models.CharField('传输速率', max_length=6, choices=(('100MbE', '百兆'),\n ('GbE', '千兆'), ('10GbE', '万兆')))\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_optical_transceiver'\n unique_together = 'mode', 'reach', 'rate'\n verbose_name = u'配件光模块表'\n verbose_name_plural = u'配件光模块表'\n\n\nclass JumpWire(models.Model):\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n interface = models.CharField('光纤接口', max_length=6, choices=(('lc',\n '小方头'), ('sc', '大方头'), ('fc', '圆头')))\n length = models.PositiveSmallIntegerField('长度(米)')\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_jump_wire'\n unique_together = 'mode', 'interface', 'length'\n verbose_name = u'配件跳线表'\n verbose_name_plural = u'配件跳线表'\n\n\n<mask token>\n\n\nclass Accessory(models.Model):\n storehouse = models.ForeignKey(IDC, on_delete=models.CASCADE, help_text\n ='仓库')\n mode = models.CharField('配件类型', max_length=20, choices=accessory_item.\n items())\n mode_id = models.IntegerField('配件型号表主键ID')\n manufacturer = models.CharField('硬件制造商', max_length=20, blank=True,\n null=True)\n sn = models.CharField('Serial Number', max_length=50, blank=True, null=True\n )\n vendor = models.CharField('采购渠道(供应商)', max_length=20)\n trade_date = models.DateField('采购时间', blank=True, null=True)\n expired_date = models.DateField('过保时间', blank=True, null=True)\n comment = models.CharField('备注', max_length=50, blank=True, null=True)\n is_active = models.BooleanField('是否可用', default=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_accessory'\n verbose_name = u'配件详细表'\n verbose_name_plural = u'配件详细表'\n\n\nclass UseRecord(models.Model):\n \"\"\"\n CPU、内存、硬盘、网卡、光模块 配件,需要知道被哪个资产使用\n \"\"\"\n accessory = models.ForeignKey(Accessory, on_delete=models.CASCADE,\n help_text='配件')\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n operate = models.CharField('操作', max_length=7, choices=(('install',\n '安装'), ('remove', '取下')), default='install')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_use_record'\n verbose_name = u'配件使用记录表'\n verbose_name_plural = u'配件使用记录表'\n\n\nclass InventoryRecord(models.Model):\n accessory = models.CharField('配件', max_length=20, choices=\n accessory_item.items())\n operate = models.CharField('操作', max_length=8, choices=(('purchase',\n '采购'), ('receive', '领用'), ('revert', '归还')))\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n content = models.CharField('内容', max_length=250, blank=True, null=True)\n user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='操作员')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_inventory_record'\n verbose_name = u'配件进货及消费记录表'\n verbose_name_plural = u'配件进货及消费记录表'\n",
"step-2": "<mask token>\n\n\nclass Caddy(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'cmdb_acc_caddy'\n verbose_name = u'配件硬盘托架表'\n verbose_name_plural = u'配件硬盘托架表'\n\n\nclass NetworkAdapter(models.Model):\n speed = models.CharField('网卡速率', max_length=6, choices=(('100MbE', '百兆'\n ), ('GbE', '千兆'), ('10GbE', '万兆')), unique=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_network_adapter'\n verbose_name = u'配件网卡表'\n verbose_name_plural = u'配件网卡表'\n\n\nclass NetworkCable(models.Model):\n cat = models.CharField('网线类型', max_length=2, choices=(('5', '5类线'), (\n '5e', '超5类线'), ('6', '6类线'), ('6e', '超6类线')))\n length = models.PositiveSmallIntegerField('长度(米)')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_network_cable'\n unique_together = 'cat', 'length'\n verbose_name = u'配件网线表'\n verbose_name_plural = u'配件网线表'\n\n\nclass OpticalTransceiver(models.Model):\n \"\"\"\n Mfg. Compatibility: Cisco\n Part Number: SFP-10G-LR-10pk\n Form Factor: SFP+\n TX Wavelength: 1310nm\n Reach: 10km\n Cable Type: SMF\n Rate Category: 10GBase\n Interface Type: LR\n DDM: Yes\n Connector Type: Dual-LC\n \"\"\"\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n reach = models.FloatField('最大传输距离(km)')\n rate = models.CharField('传输速率', max_length=6, choices=(('100MbE', '百兆'),\n ('GbE', '千兆'), ('10GbE', '万兆')))\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_optical_transceiver'\n unique_together = 'mode', 'reach', 'rate'\n verbose_name = u'配件光模块表'\n verbose_name_plural = u'配件光模块表'\n\n\nclass JumpWire(models.Model):\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n interface = models.CharField('光纤接口', max_length=6, choices=(('lc',\n '小方头'), ('sc', '大方头'), ('fc', '圆头')))\n length = models.PositiveSmallIntegerField('长度(米)')\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_jump_wire'\n unique_together = 'mode', 'interface', 'length'\n verbose_name = u'配件跳线表'\n verbose_name_plural = u'配件跳线表'\n\n\n<mask token>\n\n\nclass Accessory(models.Model):\n storehouse = models.ForeignKey(IDC, on_delete=models.CASCADE, help_text\n ='仓库')\n mode = models.CharField('配件类型', max_length=20, choices=accessory_item.\n items())\n mode_id = models.IntegerField('配件型号表主键ID')\n manufacturer = models.CharField('硬件制造商', max_length=20, blank=True,\n null=True)\n sn = models.CharField('Serial Number', max_length=50, blank=True, null=True\n )\n vendor = models.CharField('采购渠道(供应商)', max_length=20)\n trade_date = models.DateField('采购时间', blank=True, null=True)\n expired_date = models.DateField('过保时间', blank=True, null=True)\n comment = models.CharField('备注', max_length=50, blank=True, null=True)\n is_active = models.BooleanField('是否可用', default=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_accessory'\n verbose_name = u'配件详细表'\n verbose_name_plural = u'配件详细表'\n\n\nclass UseRecord(models.Model):\n \"\"\"\n CPU、内存、硬盘、网卡、光模块 配件,需要知道被哪个资产使用\n \"\"\"\n accessory = models.ForeignKey(Accessory, on_delete=models.CASCADE,\n help_text='配件')\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n operate = models.CharField('操作', max_length=7, choices=(('install',\n '安装'), ('remove', '取下')), default='install')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_use_record'\n verbose_name = u'配件使用记录表'\n verbose_name_plural = u'配件使用记录表'\n\n\nclass InventoryRecord(models.Model):\n accessory = models.CharField('配件', max_length=20, choices=\n accessory_item.items())\n operate = models.CharField('操作', max_length=8, choices=(('purchase',\n '采购'), ('receive', '领用'), ('revert', '归还')))\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n content = models.CharField('内容', max_length=250, blank=True, null=True)\n user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='操作员')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_inventory_record'\n verbose_name = u'配件进货及消费记录表'\n verbose_name_plural = u'配件进货及消费记录表'\n",
"step-3": "<mask token>\n\n\nclass Caddy(models.Model):\n caddy_dimensions = {'2.5s': '2.5寸 R740', '2.5': '2.5寸', '3.5': '3.5寸'}\n dimensions = models.CharField('尺寸(英寸)', max_length=4, choices=\n caddy_dimensions.items(), unique=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_caddy'\n verbose_name = u'配件硬盘托架表'\n verbose_name_plural = u'配件硬盘托架表'\n\n\nclass NetworkAdapter(models.Model):\n speed = models.CharField('网卡速率', max_length=6, choices=(('100MbE', '百兆'\n ), ('GbE', '千兆'), ('10GbE', '万兆')), unique=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_network_adapter'\n verbose_name = u'配件网卡表'\n verbose_name_plural = u'配件网卡表'\n\n\nclass NetworkCable(models.Model):\n cat = models.CharField('网线类型', max_length=2, choices=(('5', '5类线'), (\n '5e', '超5类线'), ('6', '6类线'), ('6e', '超6类线')))\n length = models.PositiveSmallIntegerField('长度(米)')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_network_cable'\n unique_together = 'cat', 'length'\n verbose_name = u'配件网线表'\n verbose_name_plural = u'配件网线表'\n\n\nclass OpticalTransceiver(models.Model):\n \"\"\"\n Mfg. Compatibility: Cisco\n Part Number: SFP-10G-LR-10pk\n Form Factor: SFP+\n TX Wavelength: 1310nm\n Reach: 10km\n Cable Type: SMF\n Rate Category: 10GBase\n Interface Type: LR\n DDM: Yes\n Connector Type: Dual-LC\n \"\"\"\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n reach = models.FloatField('最大传输距离(km)')\n rate = models.CharField('传输速率', max_length=6, choices=(('100MbE', '百兆'),\n ('GbE', '千兆'), ('10GbE', '万兆')))\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_optical_transceiver'\n unique_together = 'mode', 'reach', 'rate'\n verbose_name = u'配件光模块表'\n verbose_name_plural = u'配件光模块表'\n\n\nclass JumpWire(models.Model):\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n interface = models.CharField('光纤接口', max_length=6, choices=(('lc',\n '小方头'), ('sc', '大方头'), ('fc', '圆头')))\n length = models.PositiveSmallIntegerField('长度(米)')\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_jump_wire'\n unique_together = 'mode', 'interface', 'length'\n verbose_name = u'配件跳线表'\n verbose_name_plural = u'配件跳线表'\n\n\n<mask token>\n\n\nclass Accessory(models.Model):\n storehouse = models.ForeignKey(IDC, on_delete=models.CASCADE, help_text\n ='仓库')\n mode = models.CharField('配件类型', max_length=20, choices=accessory_item.\n items())\n mode_id = models.IntegerField('配件型号表主键ID')\n manufacturer = models.CharField('硬件制造商', max_length=20, blank=True,\n null=True)\n sn = models.CharField('Serial Number', max_length=50, blank=True, null=True\n )\n vendor = models.CharField('采购渠道(供应商)', max_length=20)\n trade_date = models.DateField('采购时间', blank=True, null=True)\n expired_date = models.DateField('过保时间', blank=True, null=True)\n comment = models.CharField('备注', max_length=50, blank=True, null=True)\n is_active = models.BooleanField('是否可用', default=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_accessory'\n verbose_name = u'配件详细表'\n verbose_name_plural = u'配件详细表'\n\n\nclass UseRecord(models.Model):\n \"\"\"\n CPU、内存、硬盘、网卡、光模块 配件,需要知道被哪个资产使用\n \"\"\"\n accessory = models.ForeignKey(Accessory, on_delete=models.CASCADE,\n help_text='配件')\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n operate = models.CharField('操作', max_length=7, choices=(('install',\n '安装'), ('remove', '取下')), default='install')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_use_record'\n verbose_name = u'配件使用记录表'\n verbose_name_plural = u'配件使用记录表'\n\n\nclass InventoryRecord(models.Model):\n accessory = models.CharField('配件', max_length=20, choices=\n accessory_item.items())\n operate = models.CharField('操作', max_length=8, choices=(('purchase',\n '采购'), ('receive', '领用'), ('revert', '归还')))\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n content = models.CharField('内容', max_length=250, blank=True, null=True)\n user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='操作员')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_inventory_record'\n verbose_name = u'配件进货及消费记录表'\n verbose_name_plural = u'配件进货及消费记录表'\n",
"step-4": "<mask token>\n\n\nclass Disk(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n db_table = 'cmdb_acc_disk'\n unique_together = 'device_type', 'capacity', 'rpm', 'dimensions'\n verbose_name = u'配件硬盘表'\n verbose_name_plural = u'配件硬盘表'\n\n\nclass Caddy(models.Model):\n caddy_dimensions = {'2.5s': '2.5寸 R740', '2.5': '2.5寸', '3.5': '3.5寸'}\n dimensions = models.CharField('尺寸(英寸)', max_length=4, choices=\n caddy_dimensions.items(), unique=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_caddy'\n verbose_name = u'配件硬盘托架表'\n verbose_name_plural = u'配件硬盘托架表'\n\n\nclass NetworkAdapter(models.Model):\n speed = models.CharField('网卡速率', max_length=6, choices=(('100MbE', '百兆'\n ), ('GbE', '千兆'), ('10GbE', '万兆')), unique=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_network_adapter'\n verbose_name = u'配件网卡表'\n verbose_name_plural = u'配件网卡表'\n\n\nclass NetworkCable(models.Model):\n cat = models.CharField('网线类型', max_length=2, choices=(('5', '5类线'), (\n '5e', '超5类线'), ('6', '6类线'), ('6e', '超6类线')))\n length = models.PositiveSmallIntegerField('长度(米)')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_network_cable'\n unique_together = 'cat', 'length'\n verbose_name = u'配件网线表'\n verbose_name_plural = u'配件网线表'\n\n\nclass OpticalTransceiver(models.Model):\n \"\"\"\n Mfg. Compatibility: Cisco\n Part Number: SFP-10G-LR-10pk\n Form Factor: SFP+\n TX Wavelength: 1310nm\n Reach: 10km\n Cable Type: SMF\n Rate Category: 10GBase\n Interface Type: LR\n DDM: Yes\n Connector Type: Dual-LC\n \"\"\"\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n reach = models.FloatField('最大传输距离(km)')\n rate = models.CharField('传输速率', max_length=6, choices=(('100MbE', '百兆'),\n ('GbE', '千兆'), ('10GbE', '万兆')))\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_optical_transceiver'\n unique_together = 'mode', 'reach', 'rate'\n verbose_name = u'配件光模块表'\n verbose_name_plural = u'配件光模块表'\n\n\nclass JumpWire(models.Model):\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True\n )\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'),\n ('multi', '多模')))\n interface = models.CharField('光纤接口', max_length=6, choices=(('lc',\n '小方头'), ('sc', '大方头'), ('fc', '圆头')))\n length = models.PositiveSmallIntegerField('长度(米)')\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d',\n null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_jump_wire'\n unique_together = 'mode', 'interface', 'length'\n verbose_name = u'配件跳线表'\n verbose_name_plural = u'配件跳线表'\n\n\n<mask token>\n\n\nclass Accessory(models.Model):\n storehouse = models.ForeignKey(IDC, on_delete=models.CASCADE, help_text\n ='仓库')\n mode = models.CharField('配件类型', max_length=20, choices=accessory_item.\n items())\n mode_id = models.IntegerField('配件型号表主键ID')\n manufacturer = models.CharField('硬件制造商', max_length=20, blank=True,\n null=True)\n sn = models.CharField('Serial Number', max_length=50, blank=True, null=True\n )\n vendor = models.CharField('采购渠道(供应商)', max_length=20)\n trade_date = models.DateField('采购时间', blank=True, null=True)\n expired_date = models.DateField('过保时间', blank=True, null=True)\n comment = models.CharField('备注', max_length=50, blank=True, null=True)\n is_active = models.BooleanField('是否可用', default=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_accessory'\n verbose_name = u'配件详细表'\n verbose_name_plural = u'配件详细表'\n\n\nclass UseRecord(models.Model):\n \"\"\"\n CPU、内存、硬盘、网卡、光模块 配件,需要知道被哪个资产使用\n \"\"\"\n accessory = models.ForeignKey(Accessory, on_delete=models.CASCADE,\n help_text='配件')\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n operate = models.CharField('操作', max_length=7, choices=(('install',\n '安装'), ('remove', '取下')), default='install')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_use_record'\n verbose_name = u'配件使用记录表'\n verbose_name_plural = u'配件使用记录表'\n\n\nclass InventoryRecord(models.Model):\n accessory = models.CharField('配件', max_length=20, choices=\n accessory_item.items())\n operate = models.CharField('操作', max_length=8, choices=(('purchase',\n '采购'), ('receive', '领用'), ('revert', '归还')))\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text=\n '服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE,\n help_text='网络设备', blank=True, null=True)\n content = models.CharField('内容', max_length=250, blank=True, null=True)\n user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='操作员')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n\n class Meta:\n db_table = 'cmdb_acc_inventory_record'\n verbose_name = u'配件进货及消费记录表'\n verbose_name_plural = u'配件进货及消费记录表'\n",
"step-5": "from django.db import models\nfrom accounts.models import User\nfrom cmdb.models.base import IDC\nfrom cmdb.models.asset import Server, NetDevice\n\n\nclass CPU(models.Model):\n # Intel(R) Xeon(R) Gold 5118 CPU @ 2.30GHz\n version = models.CharField('型号版本', max_length=100, unique=True)\n speed = models.PositiveSmallIntegerField('频率MHz')\n process = models.PositiveSmallIntegerField('线程数')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_cpu'\n verbose_name = u'配件CPU表'\n verbose_name_plural = u'配件CPU表'\n\n\nclass Memory(models.Model):\n ram_type = models.CharField('内存类型', max_length=4, choices=(('ddr3', 'DDR3'), ('ddr4', 'DDR4'), ('ddr5', 'DDR5')))\n ram_size = models.PositiveSmallIntegerField('内存容量(G)')\n speed = models.PositiveSmallIntegerField('速率(MT/s)')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_memory'\n unique_together = ('ram_type', 'ram_size', 'speed')\n verbose_name = u'配件内存表'\n verbose_name_plural = u'配件内存表'\n\n\nclass Disk(models.Model):\n device_type = models.CharField('硬盘类型', max_length=4, choices=(('sata', 'SATA'), ('sas', 'SAS'), ('ssd', 'SSD')))\n capacity = models.PositiveSmallIntegerField('容量(G)')\n rpm = models.PositiveSmallIntegerField('转率')\n dimensions = models.CharField('尺寸(英寸)', max_length=3, choices=(('2.5', '2.5寸'), ('3.5', '3.5寸')))\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_disk'\n unique_together = ('device_type', 'capacity', 'rpm', 'dimensions')\n verbose_name = u'配件硬盘表'\n verbose_name_plural = u'配件硬盘表'\n\n\nclass Caddy(models.Model):\n caddy_dimensions = {\n '2.5s': '2.5寸 R740', '2.5': '2.5寸', '3.5': '3.5寸'\n }\n dimensions = models.CharField('尺寸(英寸)', max_length=4, choices=caddy_dimensions.items(), unique=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_caddy'\n verbose_name = u'配件硬盘托架表'\n verbose_name_plural = u'配件硬盘托架表'\n\n\nclass NetworkAdapter(models.Model):\n speed = models.CharField('网卡速率', max_length=6, choices=(('100MbE', '百兆'), ('GbE', '千兆'), ('10GbE', '万兆')), unique=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_network_adapter'\n verbose_name = u'配件网卡表'\n verbose_name_plural = u'配件网卡表'\n\n\nclass NetworkCable(models.Model):\n cat = models.CharField('网线类型', max_length=2, choices=(('5', '5类线'), ('5e', '超5类线'), ('6', '6类线'), ('6e', '超6类线')))\n length = models.PositiveSmallIntegerField('长度(米)')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_network_cable'\n unique_together = ('cat', 'length')\n verbose_name = u'配件网线表'\n verbose_name_plural = u'配件网线表'\n\n\nclass OpticalTransceiver(models.Model):\n # Small form-factor pluggable transceiver 小型可插拔光模块\n \"\"\"\n Mfg. Compatibility: Cisco\n Part Number: SFP-10G-LR-10pk\n Form Factor: SFP+\n TX Wavelength: 1310nm\n Reach: 10km\n Cable Type: SMF\n Rate Category: 10GBase\n Interface Type: LR\n DDM: Yes\n Connector Type: Dual-LC\n \"\"\"\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True)\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'), ('multi', '多模')))\n reach = models.FloatField('最大传输距离(km)')\n rate = models.CharField('传输速率', max_length=6, choices=(('100MbE', '百兆'), ('GbE', '千兆'), ('10GbE', '万兆')))\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d', null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_optical_transceiver'\n unique_together = ('mode', 'reach', 'rate')\n verbose_name = u'配件光模块表'\n verbose_name_plural = u'配件光模块表'\n\n\nclass JumpWire(models.Model):\n information = models.CharField('综述介绍', max_length=20, blank=True, null=True)\n mode = models.CharField('模式', max_length=6, choices=(('single', '单模'), ('multi', '多模')))\n interface = models.CharField('光纤接口', max_length=6, choices=(('lc', '小方头'), ('sc', '大方头'), ('fc', '圆头')))\n length = models.PositiveSmallIntegerField('长度(米)')\n image = models.ImageField(u'图片', upload_to='images/accessory/%Y%m%d', null=True, blank=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_jump_wire'\n unique_together = ('mode', 'interface', 'length')\n verbose_name = u'配件跳线表'\n verbose_name_plural = u'配件跳线表'\n\n\naccessory_item = {\n 'cpu': 'CPU', 'memory': '内存', 'disk': '硬盘', 'caddy': '硬盘托架', 'network_adapter': '网卡', 'network_cable': '网线',\n 'transceiver': '光模块', 'jump_wire': '跳线'\n}\n\n\nclass Accessory(models.Model):\n storehouse = models.ForeignKey(IDC, on_delete=models.CASCADE, help_text='仓库')\n mode = models.CharField('配件类型', max_length=20, choices=accessory_item.items())\n mode_id = models.IntegerField('配件型号表主键ID')\n manufacturer = models.CharField('硬件制造商', max_length=20, blank=True, null=True)\n sn = models.CharField('Serial Number', max_length=50, blank=True, null=True)\n vendor = models.CharField('采购渠道(供应商)', max_length=20)\n trade_date = models.DateField('采购时间', blank=True, null=True)\n expired_date = models.DateField('过保时间', blank=True, null=True)\n comment = models.CharField('备注', max_length=50, blank=True, null=True)\n is_active = models.BooleanField('是否可用', default=True)\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_accessory'\n verbose_name = u'配件详细表'\n verbose_name_plural = u'配件详细表'\n\n\nclass UseRecord(models.Model):\n \"\"\"\n CPU、内存、硬盘、网卡、光模块 配件,需要知道被哪个资产使用\n \"\"\"\n accessory = models.ForeignKey(Accessory, on_delete=models.CASCADE, help_text='配件')\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text='服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE, help_text='网络设备', blank=True, null=True)\n operate = models.CharField('操作', max_length=7, choices=(('install', '安装'), ('remove', '取下')), default='install')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_use_record'\n verbose_name = u'配件使用记录表'\n verbose_name_plural = u'配件使用记录表'\n\n\nclass InventoryRecord(models.Model):\n accessory = models.CharField('配件', max_length=20, choices=accessory_item.items())\n operate = models.CharField('操作', max_length=8, choices=(('purchase', '采购'), ('receive', '领用'), ('revert', '归还')))\n server = models.ForeignKey(Server, on_delete=models.CASCADE, help_text='服务器', blank=True, null=True)\n net_device = models.ForeignKey(NetDevice, on_delete=models.CASCADE, help_text='网络设备', blank=True, null=True)\n content = models.CharField('内容', max_length=250, blank=True, null=True)\n user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='操作员')\n created_date = models.DateTimeField('创建时间', auto_now_add=True)\n\n class Meta:\n db_table = 'cmdb_acc_inventory_record'\n verbose_name = u'配件进货及消费记录表'\n verbose_name_plural = u'配件进货及消费记录表'\n",
"step-ids": [
15,
17,
18,
19,
27
]
}
|
[
15,
17,
18,
19,
27
] |
from microbit import *
import speech
while True:
speech.say("I am a DALEK - EXTERMINATE", speed=120, pitch=100, throat=100, mouth=200) #kokeile muuttaa parametrejä
|
normal
|
{
"blob_id": "dad78d7948fb1038f9cf66732f39c18a18f2a3c8",
"index": 5233,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n speech.say('I am a DALEK - EXTERMINATE', speed=120, pitch=100, throat=\n 100, mouth=200)\n",
"step-3": "from microbit import *\nimport speech\nwhile True:\n speech.say('I am a DALEK - EXTERMINATE', speed=120, pitch=100, throat=\n 100, mouth=200)\n",
"step-4": "from microbit import *\n\nimport speech\n\n\nwhile True:\n speech.say(\"I am a DALEK - EXTERMINATE\", speed=120, pitch=100, throat=100, mouth=200) #kokeile muuttaa parametrejä\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.conf.urls import url
from ..views import (buildings_upload, keytype_upload, key_upload, keystatus_upload, keyissue_upload)
from django.contrib.auth.decorators import login_required
urlpatterns = [
url(r'^buildings_csv/$', # NOQA
buildings_upload,
name="buildings_upload"),
url(r'^keytype_csv/$', # NOQA
keytype_upload,
name="keytype_upload"),
url(r'^key_csv/$', # NOQA
key_upload,
name="key_upload"),
url(r'^keystatus_csv/$', # NOQA
keystatus_upload,
name="keystatus_upload"),
url(r'^keyissue_csv/$', # NOQA
keyissue_upload,
name="keyissue_upload"),
]
|
normal
|
{
"blob_id": "4a0d8e6b6205fa57b8614857e1462203a2a7d2c5",
"index": 3002,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^buildings_csv/$', buildings_upload, name=\n 'buildings_upload'), url('^keytype_csv/$', keytype_upload, name=\n 'keytype_upload'), url('^key_csv/$', key_upload, name='key_upload'),\n url('^keystatus_csv/$', keystatus_upload, name='keystatus_upload'), url\n ('^keyissue_csv/$', keyissue_upload, name='keyissue_upload')]\n",
"step-3": "from django.conf.urls import url\nfrom ..views import buildings_upload, keytype_upload, key_upload, keystatus_upload, keyissue_upload\nfrom django.contrib.auth.decorators import login_required\nurlpatterns = [url('^buildings_csv/$', buildings_upload, name=\n 'buildings_upload'), url('^keytype_csv/$', keytype_upload, name=\n 'keytype_upload'), url('^key_csv/$', key_upload, name='key_upload'),\n url('^keystatus_csv/$', keystatus_upload, name='keystatus_upload'), url\n ('^keyissue_csv/$', keyissue_upload, name='keyissue_upload')]\n",
"step-4": "from django.conf.urls import url\nfrom ..views import (buildings_upload, keytype_upload, key_upload, keystatus_upload, keyissue_upload)\nfrom django.contrib.auth.decorators import login_required\n\nurlpatterns = [\n url(r'^buildings_csv/$', # NOQA\n buildings_upload,\n name=\"buildings_upload\"),\n url(r'^keytype_csv/$', # NOQA\n keytype_upload,\n name=\"keytype_upload\"),\n url(r'^key_csv/$', # NOQA\n key_upload,\n name=\"key_upload\"),\n url(r'^keystatus_csv/$', # NOQA\n keystatus_upload,\n name=\"keystatus_upload\"),\n url(r'^keyissue_csv/$', # NOQA\n keyissue_upload,\n name=\"keyissue_upload\"),\n]\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
OK = 200
CREATED = 201
NOT_MODIFIED = 304
UNAUTHORIZED = 401
FORBIDDEN = 403
BAD_REQUEST = 400
NOT_FOUND = 404
CONFLICT = 409
UNPROCESSABLE = 422
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
SERVICE_UNAVAILABLE = 503
ADMIN = 'admin'
ELITE = 'elite'
NOOB = 'noob'
WITHDRAW = 'withdraw'
FUND = 'fund'
|
normal
|
{
"blob_id": "d90942f22cbbd9cfc3a431b7857cd909a7690966",
"index": 92,
"step-1": "<mask token>\n",
"step-2": "OK = 200\nCREATED = 201\nNOT_MODIFIED = 304\nUNAUTHORIZED = 401\nFORBIDDEN = 403\nBAD_REQUEST = 400\nNOT_FOUND = 404\nCONFLICT = 409\nUNPROCESSABLE = 422\nINTERNAL_SERVER_ERROR = 500\nNOT_IMPLEMENTED = 501\nSERVICE_UNAVAILABLE = 503\nADMIN = 'admin'\nELITE = 'elite'\nNOOB = 'noob'\nWITHDRAW = 'withdraw'\nFUND = 'fund'\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from rest_framework.views import APIView
from .serializers import UserSerializer
from rest_framework import permissions
from .models import users
from rest_framework.response import Response
from django.http import JsonResponse
from rest_framework import viewsets
from profiles.models import profile
from profiles.serializers import ProfileSerializer
from follows.models import Follow
class GetDefaultUsers(APIView):
permission_classes =[
permissions.IsAuthenticated
]
def post(self,request, *args, **kwargs):
user = self.request.user
userers = users.objects.all()[:5]
users_to_pass = []
for user_now in userers:
user_id = user.id
check_if_already_followed = Follow.objects.filter(user_id = user_now.id).filter(follower_id = user.id)
if len(check_if_already_followed) == 0:
users_to_pass.append(user_now)
serilizer_class_many = UserSerializer(users_to_pass, many=True)
serilizer_class = UserSerializer(user)
return Response({
'users':serilizer_class_many.data,
"user":serilizer_class.data
})
class GetSpecificUser(APIView):
permission_classes =[
permissions.IsAuthenticated
]
def post(self, request,id=None, *args, **kwargs):
try:
queryset = users.objects.get(id=id)
except user.DoesNotExist:
return JsonResponse({'error':"user does not exits"}, status = 400)
try:
profile_queryset = profile.objects.get(user = queryset)
except profile.DoesNotExist:
return JsonResponse({'error':"user does not have a profile"}, status = 400)
serializer_class = UserSerializer(queryset)
serializer_class_profile = ProfileSerializer(profile_queryset)
return Response(
{'user':serializer_class.data,
'profile':serializer_class_profile.data
},
status=200)
|
normal
|
{
"blob_id": "c5a7f269f579bd1960afa4f700b5c3436ac6d91a",
"index": 2733,
"step-1": "<mask token>\n\n\nclass GetDefaultUsers(APIView):\n <mask token>\n <mask token>\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-2": "<mask token>\n\n\nclass GetDefaultUsers(APIView):\n <mask token>\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id=\n user_now.id).filter(follower_id=user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({'users': serilizer_class_many.data, 'user':\n serilizer_class.data})\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-3": "<mask token>\n\n\nclass GetDefaultUsers(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id=\n user_now.id).filter(follower_id=user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({'users': serilizer_class_many.data, 'user':\n serilizer_class.data})\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-4": "from rest_framework.views import APIView\nfrom .serializers import UserSerializer\nfrom rest_framework import permissions\nfrom .models import users\nfrom rest_framework.response import Response\nfrom django.http import JsonResponse\nfrom rest_framework import viewsets\nfrom profiles.models import profile\nfrom profiles.serializers import ProfileSerializer\nfrom follows.models import Follow\n\n\nclass GetDefaultUsers(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id=\n user_now.id).filter(follower_id=user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({'users': serilizer_class_many.data, 'user':\n serilizer_class.data})\n\n\nclass GetSpecificUser(APIView):\n permission_classes = [permissions.IsAuthenticated]\n\n def post(self, request, id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error': 'user does not exits'}, status=400)\n try:\n profile_queryset = profile.objects.get(user=queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error': 'user does not have a profile'},\n status=400)\n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n return Response({'user': serializer_class.data, 'profile':\n serializer_class_profile.data}, status=200)\n",
"step-5": "from rest_framework.views import APIView\nfrom .serializers import UserSerializer\nfrom rest_framework import permissions\nfrom .models import users\nfrom rest_framework.response import Response\nfrom django.http import JsonResponse\nfrom rest_framework import viewsets\nfrom profiles.models import profile\nfrom profiles.serializers import ProfileSerializer\nfrom follows.models import Follow\n\n\nclass GetDefaultUsers(APIView):\n permission_classes =[\n permissions.IsAuthenticated\n ]\n \n def post(self,request, *args, **kwargs):\n user = self.request.user\n userers = users.objects.all()[:5]\n users_to_pass = []\n for user_now in userers:\n user_id = user.id\n check_if_already_followed = Follow.objects.filter(user_id = user_now.id).filter(follower_id = user.id)\n if len(check_if_already_followed) == 0:\n users_to_pass.append(user_now)\n \n serilizer_class_many = UserSerializer(users_to_pass, many=True)\n serilizer_class = UserSerializer(user)\n return Response({\n 'users':serilizer_class_many.data,\n \"user\":serilizer_class.data\n })\n \nclass GetSpecificUser(APIView):\n permission_classes =[\n permissions.IsAuthenticated\n ]\n def post(self, request,id=None, *args, **kwargs):\n try:\n queryset = users.objects.get(id=id)\n except user.DoesNotExist:\n return JsonResponse({'error':\"user does not exits\"}, status = 400)\n try:\n profile_queryset = profile.objects.get(user = queryset)\n except profile.DoesNotExist:\n return JsonResponse({'error':\"user does not have a profile\"}, status = 400)\n \n serializer_class = UserSerializer(queryset)\n serializer_class_profile = ProfileSerializer(profile_queryset)\n \n return Response(\n {'user':serializer_class.data,\n 'profile':serializer_class_profile.data \n },\n status=200)\n \n ",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#사각형의 면적을 구하는 프로그램을 작성하시오,
#사각형의 면적 = 높이*밑변
height=int(input('높이 입력: '))
base=int(input('밑변 입력: '))
area=height*base
print('높이는',height,' 밑변은',base,'사각형의 면적은',area,'입니다.')
|
normal
|
{
"blob_id": "f9b48c1b6489d8981e192838cf1c734e2296ab15",
"index": 9833,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('높이는', height, ' 밑변은', base, '사각형의 면적은', area, '입니다.')\n",
"step-3": "height = int(input('높이 입력: '))\nbase = int(input('밑변 입력: '))\narea = height * base\nprint('높이는', height, ' 밑변은', base, '사각형의 면적은', area, '입니다.')\n",
"step-4": "#사각형의 면적을 구하는 프로그램을 작성하시오,\r\n#사각형의 면적 = 높이*밑변\r\n\r\nheight=int(input('높이 입력: '))\r\nbase=int(input('밑변 입력: '))\r\n\r\narea=height*base\r\n\r\nprint('높이는',height,' 밑변은',base,'사각형의 면적은',area,'입니다.')\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
from django.db import models
# from applications.models import ApplicationReview
# from profiles.models import Restaurant, Program, Courier
# Enum for Admin
BASIC_ADMIN = 'ADMIN'
SUPER_ADMIN = 'SUPER'
MANAGER = 'MNGR'
DEVELOPER = 'DEV'
STAFF = 'STAFF'
ADMIN_ROLE_OPTIONS = [
(BASIC_ADMIN, 'basic admin'),
(SUPER_ADMIN, 'super admin'),
(MANAGER, 'manager'),
(DEVELOPER, 'developer'),
(STAFF, 'stuff'),
]
PROGRAM = "PR"
RESTAURANT = "RE"
USER_TYPE_OPTIONS = [
(PROGRAM, 'Program'),
(RESTAURANT, 'Restaurant'),
]
PHONE = "PH"
EMAIL = "EM"
PREFERRED_CONTACT = [
(PHONE, 'Phone'),
(EMAIL, 'Email'),
]
ADMIN = "ADM"
BASIC_USER = "BSC"
USER_TYPES = [
(ADMIN, 'Admin'),
(BASIC_USER, 'Basic User'),
]
class UserClassManager(BaseUserManager):
"""Manager for User class"""
# method for creatig admins, but not super admins
def create_staffuser(self, last_name, first_name, email, password, role, phone_number=''):
new_account = self.create_user(phone_number=phone_number, last_name=last_name, first_name=first_name,
email=email, password=password)
new_account.staff = True
admin_object = AdminUser.objects.create(role=role)
new_account.admin_object = admin_object
new_account.user_type = ADMIN
admin_object.save(using=self._db)
new_account.save(using=self._db)
return new_account
def create_basic_user(self, type, last_name, first_name, email, password, phone_number=''):
new_account = self.create_user(phone_number=phone_number, last_name=last_name, first_name=first_name,
email=email, password=password)
user_object = BasicUser.objects.create(type=type)
new_account.user_object = user_object
new_account.user_type = BASIC_USER
user_object.save(using=self._db)
new_account.save(using=self._db)
return new_account
# method for creating restaurants, schools, etc.
def create_user(self, last_name, first_name, email, password, phone_number=''):
new_account = self.model(email=self.normalize_email(email),)
new_account.set_password(password)
new_account.last_name = last_name
new_account.first_name = first_name
new_account.phone_number = phone_number
new_account.save(using=self._db)
return new_account
# method for creating superadmins
def create_superuser(self, last_name, first_name, email, password, phone_number=''):
new_account = self.create_user(phone_number=phone_number, last_name=last_name, first_name=first_name,
email=email, password=password)
new_account.staff = True
new_account.admin = True
admin_object = AdminUser.objects.create(role=SUPER_ADMIN)
new_account.admin_object = admin_object
new_account.user_type = ADMIN
admin_object.save(using=self._db)
new_account.save(using=self._db)
return new_account
# add any required fields here other than email and password
REQUIRED_FIELDS = []
USERNAME_FIELD = 'email'
class UserClass(AbstractBaseUser):
"""Class for general user - can be basic user or admin"""
phone_number = models.CharField(verbose_name='phone number', max_length=255, unique=False, default='')
active = models.BooleanField(default=True)
is_active = models.BooleanField(default=True)
email = models.EmailField(verbose_name='email', max_length=255, unique=True, )
last_name = models.CharField(verbose_name='last name', max_length=255, unique=False, )
first_name = models.CharField(verbose_name='first name', max_length=255, unique=False, )
objects = UserClassManager()
staff = models.BooleanField(default=False)
admin = models.BooleanField(default=False)
image = models.CharField(verbose_name='user image', max_length=255, unique=False, default='defaultIcon.png')
USERNAME_FIELD = "email"
REQUIRED_FIELDS = ['first_name', 'last_name']
user_type = models.CharField(
max_length=20,
choices=USER_TYPES,
default=BASIC_USER,
)
user_object = models.ForeignKey('profiles.BasicUser', on_delete=models.DO_NOTHING, null=True, related_name='basic_user_parent')
admin_object = models.ForeignKey('profiles.AdminUser', on_delete=models.DO_NOTHING, null=True, related_name='admin_user_parent')
def has_module_perms(self, app_label):
return True
@property
def is_admin(self):
return self.admin
def get_full_name(self):
return self.first_name + ' ' + self.last_name
def get_short_name(self):
return self.first_name
@property
def is_staff(self):
return self.staff
def __str__(self):
return self.email
class AdminUser(models.Model):
"""Model for admin user data"""
role = models.CharField(
max_length=20,
choices=ADMIN_ROLE_OPTIONS,
default=STAFF,
)
class BasicUser(models.Model):
"""Model for basic user data"""
type = models.CharField(
max_length=20,
choices=USER_TYPE_OPTIONS,
default=RESTAURANT,
)
preferred_contact = models.CharField(
max_length=20,
choices=PREFERRED_CONTACT,
default=EMAIL,
)
position = models.CharField(verbose_name='position/title', max_length=255, unique=False, null=True)
restaurant = models.ForeignKey('profiles.Restaurant', on_delete=models.CASCADE, null=True)
program = models.ForeignKey('profiles.Program', on_delete=models.CASCADE, null=True)
courier = models.ForeignKey('profiles.Courier', on_delete=models.CASCADE, null=True)
class Schedule(models.Model):
monday_start = models.TimeField(auto_now=False, null=True, blank=True)
monday_end = models.TimeField(auto_now=False, null=True, blank=True)
tuesday_start = models.TimeField(auto_now=False, null=True, blank=True)
tuesday_end = models.TimeField(auto_now=False, null=True, blank=True)
wednesday_start = models.TimeField(auto_now=False, null=True, blank=True)
wednesday_end = models.TimeField(auto_now=False, null=True, blank=True)
thursday_start = models.TimeField(auto_now=False, null=True, blank=True)
thursday_end = models.TimeField(auto_now=False, null=True, blank=True)
friday_start = models.TimeField(auto_now=False, null=True, blank=True)
friday_end = models.TimeField(auto_now=False, null=True, blank=True)
saturday_start = models.TimeField(auto_now=False, null=True, blank=True)
saturday_end = models.TimeField(auto_now=False, null=True, blank=True)
sunday_start = models.TimeField(auto_now=False, null=True, blank=True)
sunday_end = models.TimeField(auto_now=False, null=True, blank=True)
def getSchedule(self):
schedule = {}
if self.monday_start:
schedule['monday_start'] = self.monday_start.strftime("%-I:%M %p")
else:
schedule['monday_start'] = ''
if self.monday_end:
schedule['monday_end'] = self.monday_end.strftime("%-I:%M %p")
else:
schedule['monday_end'] = ''
if self.tuesday_start:
schedule['tuesday_start'] = self.tuesday_start.strftime("%-I:%M %p")
else:
schedule['tuesday_start'] = ''
if self.tuesday_end:
schedule['tuesday_end'] = self.tuesday_end.strftime("%-I:%M %p")
else:
schedule['tuesday_end'] = ''
if self.wednesday_start:
schedule['wednesday_start'] = self.wednesday_start.strftime("%-I:%M %p")
else:
schedule['wednesday_start'] = ''
if self.wednesday_end:
schedule['wednesday_end'] = self.wednesday_end.strftime("%-I:%M %p")
else:
schedule['wednesday_end'] = ''
if self.thursday_start:
schedule['thursday_start'] = self.thursday_start.strftime("%-I:%M %p")
else:
schedule['thursday_start'] = ''
if self.thursday_end:
schedule['thursday_end'] = self.thursday_end.strftime("%-I:%M %p")
else:
schedule['thursday_end'] = ''
if self.friday_start:
schedule['friday_start'] = self.friday_start.strftime("%-I:%M %p")
else:
schedule['friday_start'] = ''
if self.friday_end:
schedule['friday_end'] = self.friday_end.strftime("%-I:%M %p")
else:
schedule['friday_end'] = ''
if self.saturday_start:
schedule['saturday_start'] = self.saturday_start.strftime("%-I:%M %p")
else:
schedule['saturday_start'] = ''
if self.saturday_end:
schedule['saturday_end'] = self.saturday_end.strftime("%-I:%M %p")
else:
schedule['saturday_end'] = ''
if self.sunday_start:
schedule['sunday_start'] = self.sunday_start.strftime("%-I:%M %p")
else:
schedule['sunday_start'] = ''
if self.sunday_end:
schedule['sunday_end'] = self.sunday_end.strftime("%-I:%M %p")
else:
schedule['sunday_end'] = ''
return schedule
class Restaurant(models.Model):
created_at = models.DateTimeField(auto_now=True)
company_name = models.CharField(verbose_name='company name', max_length=255, unique=False, )
main_contact = models.ForeignKey('profiles.UserClass', on_delete=models.DO_NOTHING, related_name="restaurant_object", null=True)
phone_number = models.CharField(verbose_name='phone number', max_length=255, unique=False, )
schedule = models.ForeignKey('profiles.Schedule', on_delete=models.DO_NOTHING, null=True)
meals = models.IntegerField()
uber_eats = models.BooleanField(default=False)
delivery_capacity = models.BooleanField(default=False)
packaging = models.BooleanField(default=False)
health_certificate = models.CharField(verbose_name='health certificate', max_length=255, unique=False, )
address = models.CharField(verbose_name='address', max_length=255, unique=False, )
coordinates = models.CharField(verbose_name='coordinates', max_length=255, unique=False, null=True)
latitude = models.CharField(verbose_name='latitude', max_length=255, unique=False, null=True)
longitude = models.CharField(verbose_name='longitude', max_length=255, unique=False, null=True)
review = models.ForeignKey('applications.ApplicationReview', related_name='restaurants',
on_delete=models.DO_NOTHING, null=True)
class Program(models.Model):
created_at = models.DateTimeField(auto_now=True)
program_name = models.CharField(verbose_name='program name', max_length=255, unique=False, )
main_contact = models.ForeignKey('profiles.UserClass', on_delete=models.DO_NOTHING, related_name="program_object", null=True)
phone_number = models.CharField(verbose_name='phone number', max_length=255, unique=False, )
schedule = models.ForeignKey('profiles.Schedule', on_delete=models.DO_NOTHING, null=True)
meals = models.IntegerField(default=0, null=True)
address = models.CharField(verbose_name='address', max_length=255, unique=False, )
coordinates = models.CharField(verbose_name='address', max_length=255, unique=False, null=True)
latitude = models.CharField(verbose_name='latitude', max_length=255, unique=False, null=True)
longitude = models.CharField(verbose_name='longitude', max_length=255, unique=False, null=True)
review = models.ForeignKey('applications.ApplicationReview', related_name="programs",
on_delete=models.DO_NOTHING, null=True)
class Courier(models.Model):
created_at = models.DateTimeField(auto_now=True)
class Profile(models.Model):
user = models.OneToOneField(BasicUser, on_delete=models.CASCADE)
avatar = models.ImageField(upload_to='avatars', blank=True)
def __str__(self):
return self.user.username
|
normal
|
{
"blob_id": "8a1f024be00200218782c919b21161bf48fc817e",
"index": 7805,
"step-1": "<mask token>\n\n\nclass UserClass(AbstractBaseUser):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_full_name(self):\n return self.first_name + ' ' + self.last_name\n <mask token>\n <mask token>\n <mask token>\n\n\nclass AdminUser(models.Model):\n \"\"\"Model for admin user data\"\"\"\n role = models.CharField(max_length=20, choices=ADMIN_ROLE_OPTIONS,\n default=STAFF)\n\n\nclass BasicUser(models.Model):\n \"\"\"Model for basic user data\"\"\"\n type = models.CharField(max_length=20, choices=USER_TYPE_OPTIONS,\n default=RESTAURANT)\n preferred_contact = models.CharField(max_length=20, choices=\n PREFERRED_CONTACT, default=EMAIL)\n position = models.CharField(verbose_name='position/title', max_length=\n 255, unique=False, null=True)\n restaurant = models.ForeignKey('profiles.Restaurant', on_delete=models.\n CASCADE, null=True)\n program = models.ForeignKey('profiles.Program', on_delete=models.\n CASCADE, null=True)\n courier = models.ForeignKey('profiles.Courier', on_delete=models.\n CASCADE, null=True)\n\n\nclass Schedule(models.Model):\n monday_start = models.TimeField(auto_now=False, null=True, blank=True)\n monday_end = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_start = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_end = models.TimeField(auto_now=False, null=True, blank=True)\n friday_start = models.TimeField(auto_now=False, null=True, blank=True)\n friday_end = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_start = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_end = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_start = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_end = models.TimeField(auto_now=False, null=True, blank=True)\n\n def getSchedule(self):\n schedule = {}\n if self.monday_start:\n schedule['monday_start'] = self.monday_start.strftime('%-I:%M %p')\n else:\n schedule['monday_start'] = ''\n if self.monday_end:\n schedule['monday_end'] = self.monday_end.strftime('%-I:%M %p')\n else:\n schedule['monday_end'] = ''\n if self.tuesday_start:\n schedule['tuesday_start'] = self.tuesday_start.strftime('%-I:%M %p'\n )\n else:\n schedule['tuesday_start'] = ''\n if self.tuesday_end:\n schedule['tuesday_end'] = self.tuesday_end.strftime('%-I:%M %p')\n else:\n schedule['tuesday_end'] = ''\n if self.wednesday_start:\n schedule['wednesday_start'] = self.wednesday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['wednesday_start'] = ''\n if self.wednesday_end:\n schedule['wednesday_end'] = self.wednesday_end.strftime('%-I:%M %p'\n )\n else:\n schedule['wednesday_end'] = ''\n if self.thursday_start:\n schedule['thursday_start'] = self.thursday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['thursday_start'] = ''\n if self.thursday_end:\n schedule['thursday_end'] = self.thursday_end.strftime('%-I:%M %p')\n else:\n schedule['thursday_end'] = ''\n if self.friday_start:\n schedule['friday_start'] = self.friday_start.strftime('%-I:%M %p')\n else:\n schedule['friday_start'] = ''\n if self.friday_end:\n schedule['friday_end'] = self.friday_end.strftime('%-I:%M %p')\n else:\n schedule['friday_end'] = ''\n if self.saturday_start:\n schedule['saturday_start'] = self.saturday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['saturday_start'] = ''\n if self.saturday_end:\n schedule['saturday_end'] = self.saturday_end.strftime('%-I:%M %p')\n else:\n schedule['saturday_end'] = ''\n if self.sunday_start:\n schedule['sunday_start'] = self.sunday_start.strftime('%-I:%M %p')\n else:\n schedule['sunday_start'] = ''\n if self.sunday_end:\n schedule['sunday_end'] = self.sunday_end.strftime('%-I:%M %p')\n else:\n schedule['sunday_end'] = ''\n return schedule\n\n\nclass Restaurant(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n company_name = models.CharField(verbose_name='company name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='restaurant_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField()\n uber_eats = models.BooleanField(default=False)\n delivery_capacity = models.BooleanField(default=False)\n packaging = models.BooleanField(default=False)\n health_certificate = models.CharField(verbose_name='health certificate',\n max_length=255, unique=False)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='coordinates', max_length=\n 255, unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='restaurants', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Program(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n program_name = models.CharField(verbose_name='program name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='program_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField(default=0, null=True)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='address', max_length=255,\n unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='programs', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Courier(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(BasicUser, on_delete=models.CASCADE)\n avatar = models.ImageField(upload_to='avatars', blank=True)\n\n def __str__(self):\n return self.user.username\n",
"step-2": "<mask token>\n\n\nclass UserClassManager(BaseUserManager):\n <mask token>\n <mask token>\n\n def create_basic_user(self, type, last_name, first_name, email,\n password, phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n user_object = BasicUser.objects.create(type=type)\n new_account.user_object = user_object\n new_account.user_type = BASIC_USER\n user_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n\n def create_user(self, last_name, first_name, email, password,\n phone_number=''):\n new_account = self.model(email=self.normalize_email(email))\n new_account.set_password(password)\n new_account.last_name = last_name\n new_account.first_name = first_name\n new_account.phone_number = phone_number\n new_account.save(using=self._db)\n return new_account\n\n def create_superuser(self, last_name, first_name, email, password,\n phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n new_account.staff = True\n new_account.admin = True\n admin_object = AdminUser.objects.create(role=SUPER_ADMIN)\n new_account.admin_object = admin_object\n new_account.user_type = ADMIN\n admin_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n <mask token>\n <mask token>\n\n\nclass UserClass(AbstractBaseUser):\n \"\"\"Class for general user - can be basic user or admin\"\"\"\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False, default='')\n active = models.BooleanField(default=True)\n is_active = models.BooleanField(default=True)\n email = models.EmailField(verbose_name='email', max_length=255, unique=True\n )\n last_name = models.CharField(verbose_name='last name', max_length=255,\n unique=False)\n first_name = models.CharField(verbose_name='first name', max_length=255,\n unique=False)\n objects = UserClassManager()\n staff = models.BooleanField(default=False)\n admin = models.BooleanField(default=False)\n image = models.CharField(verbose_name='user image', max_length=255,\n unique=False, default='defaultIcon.png')\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = ['first_name', 'last_name']\n user_type = models.CharField(max_length=20, choices=USER_TYPES, default\n =BASIC_USER)\n user_object = models.ForeignKey('profiles.BasicUser', on_delete=models.\n DO_NOTHING, null=True, related_name='basic_user_parent')\n admin_object = models.ForeignKey('profiles.AdminUser', on_delete=models\n .DO_NOTHING, null=True, related_name='admin_user_parent')\n\n def has_module_perms(self, app_label):\n return True\n\n @property\n def is_admin(self):\n return self.admin\n\n def get_full_name(self):\n return self.first_name + ' ' + self.last_name\n\n def get_short_name(self):\n return self.first_name\n\n @property\n def is_staff(self):\n return self.staff\n\n def __str__(self):\n return self.email\n\n\nclass AdminUser(models.Model):\n \"\"\"Model for admin user data\"\"\"\n role = models.CharField(max_length=20, choices=ADMIN_ROLE_OPTIONS,\n default=STAFF)\n\n\nclass BasicUser(models.Model):\n \"\"\"Model for basic user data\"\"\"\n type = models.CharField(max_length=20, choices=USER_TYPE_OPTIONS,\n default=RESTAURANT)\n preferred_contact = models.CharField(max_length=20, choices=\n PREFERRED_CONTACT, default=EMAIL)\n position = models.CharField(verbose_name='position/title', max_length=\n 255, unique=False, null=True)\n restaurant = models.ForeignKey('profiles.Restaurant', on_delete=models.\n CASCADE, null=True)\n program = models.ForeignKey('profiles.Program', on_delete=models.\n CASCADE, null=True)\n courier = models.ForeignKey('profiles.Courier', on_delete=models.\n CASCADE, null=True)\n\n\nclass Schedule(models.Model):\n monday_start = models.TimeField(auto_now=False, null=True, blank=True)\n monday_end = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_start = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_end = models.TimeField(auto_now=False, null=True, blank=True)\n friday_start = models.TimeField(auto_now=False, null=True, blank=True)\n friday_end = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_start = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_end = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_start = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_end = models.TimeField(auto_now=False, null=True, blank=True)\n\n def getSchedule(self):\n schedule = {}\n if self.monday_start:\n schedule['monday_start'] = self.monday_start.strftime('%-I:%M %p')\n else:\n schedule['monday_start'] = ''\n if self.monday_end:\n schedule['monday_end'] = self.monday_end.strftime('%-I:%M %p')\n else:\n schedule['monday_end'] = ''\n if self.tuesday_start:\n schedule['tuesday_start'] = self.tuesday_start.strftime('%-I:%M %p'\n )\n else:\n schedule['tuesday_start'] = ''\n if self.tuesday_end:\n schedule['tuesday_end'] = self.tuesday_end.strftime('%-I:%M %p')\n else:\n schedule['tuesday_end'] = ''\n if self.wednesday_start:\n schedule['wednesday_start'] = self.wednesday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['wednesday_start'] = ''\n if self.wednesday_end:\n schedule['wednesday_end'] = self.wednesday_end.strftime('%-I:%M %p'\n )\n else:\n schedule['wednesday_end'] = ''\n if self.thursday_start:\n schedule['thursday_start'] = self.thursday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['thursday_start'] = ''\n if self.thursday_end:\n schedule['thursday_end'] = self.thursday_end.strftime('%-I:%M %p')\n else:\n schedule['thursday_end'] = ''\n if self.friday_start:\n schedule['friday_start'] = self.friday_start.strftime('%-I:%M %p')\n else:\n schedule['friday_start'] = ''\n if self.friday_end:\n schedule['friday_end'] = self.friday_end.strftime('%-I:%M %p')\n else:\n schedule['friday_end'] = ''\n if self.saturday_start:\n schedule['saturday_start'] = self.saturday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['saturday_start'] = ''\n if self.saturday_end:\n schedule['saturday_end'] = self.saturday_end.strftime('%-I:%M %p')\n else:\n schedule['saturday_end'] = ''\n if self.sunday_start:\n schedule['sunday_start'] = self.sunday_start.strftime('%-I:%M %p')\n else:\n schedule['sunday_start'] = ''\n if self.sunday_end:\n schedule['sunday_end'] = self.sunday_end.strftime('%-I:%M %p')\n else:\n schedule['sunday_end'] = ''\n return schedule\n\n\nclass Restaurant(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n company_name = models.CharField(verbose_name='company name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='restaurant_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField()\n uber_eats = models.BooleanField(default=False)\n delivery_capacity = models.BooleanField(default=False)\n packaging = models.BooleanField(default=False)\n health_certificate = models.CharField(verbose_name='health certificate',\n max_length=255, unique=False)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='coordinates', max_length=\n 255, unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='restaurants', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Program(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n program_name = models.CharField(verbose_name='program name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='program_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField(default=0, null=True)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='address', max_length=255,\n unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='programs', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Courier(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(BasicUser, on_delete=models.CASCADE)\n avatar = models.ImageField(upload_to='avatars', blank=True)\n\n def __str__(self):\n return self.user.username\n",
"step-3": "<mask token>\n\n\nclass UserClassManager(BaseUserManager):\n <mask token>\n\n def create_staffuser(self, last_name, first_name, email, password, role,\n phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n new_account.staff = True\n admin_object = AdminUser.objects.create(role=role)\n new_account.admin_object = admin_object\n new_account.user_type = ADMIN\n admin_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n\n def create_basic_user(self, type, last_name, first_name, email,\n password, phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n user_object = BasicUser.objects.create(type=type)\n new_account.user_object = user_object\n new_account.user_type = BASIC_USER\n user_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n\n def create_user(self, last_name, first_name, email, password,\n phone_number=''):\n new_account = self.model(email=self.normalize_email(email))\n new_account.set_password(password)\n new_account.last_name = last_name\n new_account.first_name = first_name\n new_account.phone_number = phone_number\n new_account.save(using=self._db)\n return new_account\n\n def create_superuser(self, last_name, first_name, email, password,\n phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n new_account.staff = True\n new_account.admin = True\n admin_object = AdminUser.objects.create(role=SUPER_ADMIN)\n new_account.admin_object = admin_object\n new_account.user_type = ADMIN\n admin_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n <mask token>\n <mask token>\n\n\nclass UserClass(AbstractBaseUser):\n \"\"\"Class for general user - can be basic user or admin\"\"\"\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False, default='')\n active = models.BooleanField(default=True)\n is_active = models.BooleanField(default=True)\n email = models.EmailField(verbose_name='email', max_length=255, unique=True\n )\n last_name = models.CharField(verbose_name='last name', max_length=255,\n unique=False)\n first_name = models.CharField(verbose_name='first name', max_length=255,\n unique=False)\n objects = UserClassManager()\n staff = models.BooleanField(default=False)\n admin = models.BooleanField(default=False)\n image = models.CharField(verbose_name='user image', max_length=255,\n unique=False, default='defaultIcon.png')\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = ['first_name', 'last_name']\n user_type = models.CharField(max_length=20, choices=USER_TYPES, default\n =BASIC_USER)\n user_object = models.ForeignKey('profiles.BasicUser', on_delete=models.\n DO_NOTHING, null=True, related_name='basic_user_parent')\n admin_object = models.ForeignKey('profiles.AdminUser', on_delete=models\n .DO_NOTHING, null=True, related_name='admin_user_parent')\n\n def has_module_perms(self, app_label):\n return True\n\n @property\n def is_admin(self):\n return self.admin\n\n def get_full_name(self):\n return self.first_name + ' ' + self.last_name\n\n def get_short_name(self):\n return self.first_name\n\n @property\n def is_staff(self):\n return self.staff\n\n def __str__(self):\n return self.email\n\n\nclass AdminUser(models.Model):\n \"\"\"Model for admin user data\"\"\"\n role = models.CharField(max_length=20, choices=ADMIN_ROLE_OPTIONS,\n default=STAFF)\n\n\nclass BasicUser(models.Model):\n \"\"\"Model for basic user data\"\"\"\n type = models.CharField(max_length=20, choices=USER_TYPE_OPTIONS,\n default=RESTAURANT)\n preferred_contact = models.CharField(max_length=20, choices=\n PREFERRED_CONTACT, default=EMAIL)\n position = models.CharField(verbose_name='position/title', max_length=\n 255, unique=False, null=True)\n restaurant = models.ForeignKey('profiles.Restaurant', on_delete=models.\n CASCADE, null=True)\n program = models.ForeignKey('profiles.Program', on_delete=models.\n CASCADE, null=True)\n courier = models.ForeignKey('profiles.Courier', on_delete=models.\n CASCADE, null=True)\n\n\nclass Schedule(models.Model):\n monday_start = models.TimeField(auto_now=False, null=True, blank=True)\n monday_end = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_start = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_end = models.TimeField(auto_now=False, null=True, blank=True)\n friday_start = models.TimeField(auto_now=False, null=True, blank=True)\n friday_end = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_start = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_end = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_start = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_end = models.TimeField(auto_now=False, null=True, blank=True)\n\n def getSchedule(self):\n schedule = {}\n if self.monday_start:\n schedule['monday_start'] = self.monday_start.strftime('%-I:%M %p')\n else:\n schedule['monday_start'] = ''\n if self.monday_end:\n schedule['monday_end'] = self.monday_end.strftime('%-I:%M %p')\n else:\n schedule['monday_end'] = ''\n if self.tuesday_start:\n schedule['tuesday_start'] = self.tuesday_start.strftime('%-I:%M %p'\n )\n else:\n schedule['tuesday_start'] = ''\n if self.tuesday_end:\n schedule['tuesday_end'] = self.tuesday_end.strftime('%-I:%M %p')\n else:\n schedule['tuesday_end'] = ''\n if self.wednesday_start:\n schedule['wednesday_start'] = self.wednesday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['wednesday_start'] = ''\n if self.wednesday_end:\n schedule['wednesday_end'] = self.wednesday_end.strftime('%-I:%M %p'\n )\n else:\n schedule['wednesday_end'] = ''\n if self.thursday_start:\n schedule['thursday_start'] = self.thursday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['thursday_start'] = ''\n if self.thursday_end:\n schedule['thursday_end'] = self.thursday_end.strftime('%-I:%M %p')\n else:\n schedule['thursday_end'] = ''\n if self.friday_start:\n schedule['friday_start'] = self.friday_start.strftime('%-I:%M %p')\n else:\n schedule['friday_start'] = ''\n if self.friday_end:\n schedule['friday_end'] = self.friday_end.strftime('%-I:%M %p')\n else:\n schedule['friday_end'] = ''\n if self.saturday_start:\n schedule['saturday_start'] = self.saturday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['saturday_start'] = ''\n if self.saturday_end:\n schedule['saturday_end'] = self.saturday_end.strftime('%-I:%M %p')\n else:\n schedule['saturday_end'] = ''\n if self.sunday_start:\n schedule['sunday_start'] = self.sunday_start.strftime('%-I:%M %p')\n else:\n schedule['sunday_start'] = ''\n if self.sunday_end:\n schedule['sunday_end'] = self.sunday_end.strftime('%-I:%M %p')\n else:\n schedule['sunday_end'] = ''\n return schedule\n\n\nclass Restaurant(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n company_name = models.CharField(verbose_name='company name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='restaurant_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField()\n uber_eats = models.BooleanField(default=False)\n delivery_capacity = models.BooleanField(default=False)\n packaging = models.BooleanField(default=False)\n health_certificate = models.CharField(verbose_name='health certificate',\n max_length=255, unique=False)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='coordinates', max_length=\n 255, unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='restaurants', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Program(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n program_name = models.CharField(verbose_name='program name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='program_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField(default=0, null=True)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='address', max_length=255,\n unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='programs', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Courier(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(BasicUser, on_delete=models.CASCADE)\n avatar = models.ImageField(upload_to='avatars', blank=True)\n\n def __str__(self):\n return self.user.username\n",
"step-4": "from django.db.models.signals import post_save\nfrom django.contrib.auth.models import AbstractBaseUser, BaseUserManager\nfrom django.db import models\nBASIC_ADMIN = 'ADMIN'\nSUPER_ADMIN = 'SUPER'\nMANAGER = 'MNGR'\nDEVELOPER = 'DEV'\nSTAFF = 'STAFF'\nADMIN_ROLE_OPTIONS = [(BASIC_ADMIN, 'basic admin'), (SUPER_ADMIN,\n 'super admin'), (MANAGER, 'manager'), (DEVELOPER, 'developer'), (STAFF,\n 'stuff')]\nPROGRAM = 'PR'\nRESTAURANT = 'RE'\nUSER_TYPE_OPTIONS = [(PROGRAM, 'Program'), (RESTAURANT, 'Restaurant')]\nPHONE = 'PH'\nEMAIL = 'EM'\nPREFERRED_CONTACT = [(PHONE, 'Phone'), (EMAIL, 'Email')]\nADMIN = 'ADM'\nBASIC_USER = 'BSC'\nUSER_TYPES = [(ADMIN, 'Admin'), (BASIC_USER, 'Basic User')]\n\n\nclass UserClassManager(BaseUserManager):\n \"\"\"Manager for User class\"\"\"\n\n def create_staffuser(self, last_name, first_name, email, password, role,\n phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n new_account.staff = True\n admin_object = AdminUser.objects.create(role=role)\n new_account.admin_object = admin_object\n new_account.user_type = ADMIN\n admin_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n\n def create_basic_user(self, type, last_name, first_name, email,\n password, phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n user_object = BasicUser.objects.create(type=type)\n new_account.user_object = user_object\n new_account.user_type = BASIC_USER\n user_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n\n def create_user(self, last_name, first_name, email, password,\n phone_number=''):\n new_account = self.model(email=self.normalize_email(email))\n new_account.set_password(password)\n new_account.last_name = last_name\n new_account.first_name = first_name\n new_account.phone_number = phone_number\n new_account.save(using=self._db)\n return new_account\n\n def create_superuser(self, last_name, first_name, email, password,\n phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name\n =last_name, first_name=first_name, email=email, password=password)\n new_account.staff = True\n new_account.admin = True\n admin_object = AdminUser.objects.create(role=SUPER_ADMIN)\n new_account.admin_object = admin_object\n new_account.user_type = ADMIN\n admin_object.save(using=self._db)\n new_account.save(using=self._db)\n return new_account\n REQUIRED_FIELDS = []\n USERNAME_FIELD = 'email'\n\n\nclass UserClass(AbstractBaseUser):\n \"\"\"Class for general user - can be basic user or admin\"\"\"\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False, default='')\n active = models.BooleanField(default=True)\n is_active = models.BooleanField(default=True)\n email = models.EmailField(verbose_name='email', max_length=255, unique=True\n )\n last_name = models.CharField(verbose_name='last name', max_length=255,\n unique=False)\n first_name = models.CharField(verbose_name='first name', max_length=255,\n unique=False)\n objects = UserClassManager()\n staff = models.BooleanField(default=False)\n admin = models.BooleanField(default=False)\n image = models.CharField(verbose_name='user image', max_length=255,\n unique=False, default='defaultIcon.png')\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = ['first_name', 'last_name']\n user_type = models.CharField(max_length=20, choices=USER_TYPES, default\n =BASIC_USER)\n user_object = models.ForeignKey('profiles.BasicUser', on_delete=models.\n DO_NOTHING, null=True, related_name='basic_user_parent')\n admin_object = models.ForeignKey('profiles.AdminUser', on_delete=models\n .DO_NOTHING, null=True, related_name='admin_user_parent')\n\n def has_module_perms(self, app_label):\n return True\n\n @property\n def is_admin(self):\n return self.admin\n\n def get_full_name(self):\n return self.first_name + ' ' + self.last_name\n\n def get_short_name(self):\n return self.first_name\n\n @property\n def is_staff(self):\n return self.staff\n\n def __str__(self):\n return self.email\n\n\nclass AdminUser(models.Model):\n \"\"\"Model for admin user data\"\"\"\n role = models.CharField(max_length=20, choices=ADMIN_ROLE_OPTIONS,\n default=STAFF)\n\n\nclass BasicUser(models.Model):\n \"\"\"Model for basic user data\"\"\"\n type = models.CharField(max_length=20, choices=USER_TYPE_OPTIONS,\n default=RESTAURANT)\n preferred_contact = models.CharField(max_length=20, choices=\n PREFERRED_CONTACT, default=EMAIL)\n position = models.CharField(verbose_name='position/title', max_length=\n 255, unique=False, null=True)\n restaurant = models.ForeignKey('profiles.Restaurant', on_delete=models.\n CASCADE, null=True)\n program = models.ForeignKey('profiles.Program', on_delete=models.\n CASCADE, null=True)\n courier = models.ForeignKey('profiles.Courier', on_delete=models.\n CASCADE, null=True)\n\n\nclass Schedule(models.Model):\n monday_start = models.TimeField(auto_now=False, null=True, blank=True)\n monday_end = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_start = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_end = models.TimeField(auto_now=False, null=True, blank=True)\n friday_start = models.TimeField(auto_now=False, null=True, blank=True)\n friday_end = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_start = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_end = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_start = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_end = models.TimeField(auto_now=False, null=True, blank=True)\n\n def getSchedule(self):\n schedule = {}\n if self.monday_start:\n schedule['monday_start'] = self.monday_start.strftime('%-I:%M %p')\n else:\n schedule['monday_start'] = ''\n if self.monday_end:\n schedule['monday_end'] = self.monday_end.strftime('%-I:%M %p')\n else:\n schedule['monday_end'] = ''\n if self.tuesday_start:\n schedule['tuesday_start'] = self.tuesday_start.strftime('%-I:%M %p'\n )\n else:\n schedule['tuesday_start'] = ''\n if self.tuesday_end:\n schedule['tuesday_end'] = self.tuesday_end.strftime('%-I:%M %p')\n else:\n schedule['tuesday_end'] = ''\n if self.wednesday_start:\n schedule['wednesday_start'] = self.wednesday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['wednesday_start'] = ''\n if self.wednesday_end:\n schedule['wednesday_end'] = self.wednesday_end.strftime('%-I:%M %p'\n )\n else:\n schedule['wednesday_end'] = ''\n if self.thursday_start:\n schedule['thursday_start'] = self.thursday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['thursday_start'] = ''\n if self.thursday_end:\n schedule['thursday_end'] = self.thursday_end.strftime('%-I:%M %p')\n else:\n schedule['thursday_end'] = ''\n if self.friday_start:\n schedule['friday_start'] = self.friday_start.strftime('%-I:%M %p')\n else:\n schedule['friday_start'] = ''\n if self.friday_end:\n schedule['friday_end'] = self.friday_end.strftime('%-I:%M %p')\n else:\n schedule['friday_end'] = ''\n if self.saturday_start:\n schedule['saturday_start'] = self.saturday_start.strftime(\n '%-I:%M %p')\n else:\n schedule['saturday_start'] = ''\n if self.saturday_end:\n schedule['saturday_end'] = self.saturday_end.strftime('%-I:%M %p')\n else:\n schedule['saturday_end'] = ''\n if self.sunday_start:\n schedule['sunday_start'] = self.sunday_start.strftime('%-I:%M %p')\n else:\n schedule['sunday_start'] = ''\n if self.sunday_end:\n schedule['sunday_end'] = self.sunday_end.strftime('%-I:%M %p')\n else:\n schedule['sunday_end'] = ''\n return schedule\n\n\nclass Restaurant(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n company_name = models.CharField(verbose_name='company name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='restaurant_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField()\n uber_eats = models.BooleanField(default=False)\n delivery_capacity = models.BooleanField(default=False)\n packaging = models.BooleanField(default=False)\n health_certificate = models.CharField(verbose_name='health certificate',\n max_length=255, unique=False)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='coordinates', max_length=\n 255, unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='restaurants', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Program(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n program_name = models.CharField(verbose_name='program name', max_length\n =255, unique=False)\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models\n .DO_NOTHING, related_name='program_object', null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length\n =255, unique=False)\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.\n DO_NOTHING, null=True)\n meals = models.IntegerField(default=0, null=True)\n address = models.CharField(verbose_name='address', max_length=255,\n unique=False)\n coordinates = models.CharField(verbose_name='address', max_length=255,\n unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255,\n unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255,\n unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview',\n related_name='programs', on_delete=models.DO_NOTHING, null=True)\n\n\nclass Courier(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(BasicUser, on_delete=models.CASCADE)\n avatar = models.ImageField(upload_to='avatars', blank=True)\n\n def __str__(self):\n return self.user.username\n",
"step-5": "# from django.contrib.auth.models import User\nfrom django.db.models.signals import post_save\nfrom django.contrib.auth.models import AbstractBaseUser, BaseUserManager\nfrom django.db import models\n\n# from applications.models import ApplicationReview\n# from profiles.models import Restaurant, Program, Courier\n\n\n\n\n\n# Enum for Admin\nBASIC_ADMIN = 'ADMIN'\nSUPER_ADMIN = 'SUPER'\nMANAGER = 'MNGR'\nDEVELOPER = 'DEV'\nSTAFF = 'STAFF'\n\n\nADMIN_ROLE_OPTIONS = [\n (BASIC_ADMIN, 'basic admin'),\n (SUPER_ADMIN, 'super admin'),\n (MANAGER, 'manager'),\n (DEVELOPER, 'developer'),\n (STAFF, 'stuff'),\n]\n\n\nPROGRAM = \"PR\"\nRESTAURANT = \"RE\"\n\nUSER_TYPE_OPTIONS = [\n (PROGRAM, 'Program'),\n (RESTAURANT, 'Restaurant'),\n]\n\n\nPHONE = \"PH\"\nEMAIL = \"EM\"\n\n\n\nPREFERRED_CONTACT = [\n (PHONE, 'Phone'),\n (EMAIL, 'Email'),\n]\n\n\nADMIN = \"ADM\"\nBASIC_USER = \"BSC\"\n\nUSER_TYPES = [\n (ADMIN, 'Admin'),\n (BASIC_USER, 'Basic User'),\n]\n\n\nclass UserClassManager(BaseUserManager):\n \"\"\"Manager for User class\"\"\"\n\n # method for creatig admins, but not super admins\n def create_staffuser(self, last_name, first_name, email, password, role, phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name=last_name, first_name=first_name,\n email=email, password=password)\n new_account.staff = True\n\n admin_object = AdminUser.objects.create(role=role)\n new_account.admin_object = admin_object\n new_account.user_type = ADMIN\n admin_object.save(using=self._db)\n\n new_account.save(using=self._db)\n return new_account\n\n def create_basic_user(self, type, last_name, first_name, email, password, phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name=last_name, first_name=first_name,\n email=email, password=password)\n user_object = BasicUser.objects.create(type=type)\n new_account.user_object = user_object\n new_account.user_type = BASIC_USER\n\n user_object.save(using=self._db)\n new_account.save(using=self._db)\n\n return new_account\n\n # method for creating restaurants, schools, etc.\n def create_user(self, last_name, first_name, email, password, phone_number=''):\n new_account = self.model(email=self.normalize_email(email),)\n new_account.set_password(password)\n\n new_account.last_name = last_name\n new_account.first_name = first_name\n\n new_account.phone_number = phone_number\n\n new_account.save(using=self._db)\n return new_account\n\n # method for creating superadmins\n def create_superuser(self, last_name, first_name, email, password, phone_number=''):\n new_account = self.create_user(phone_number=phone_number, last_name=last_name, first_name=first_name,\n email=email, password=password)\n new_account.staff = True\n new_account.admin = True\n\n admin_object = AdminUser.objects.create(role=SUPER_ADMIN)\n new_account.admin_object = admin_object\n new_account.user_type = ADMIN\n admin_object.save(using=self._db)\n\n new_account.save(using=self._db)\n return new_account\n\n # add any required fields here other than email and password\n REQUIRED_FIELDS = []\n USERNAME_FIELD = 'email'\n\n\nclass UserClass(AbstractBaseUser):\n \"\"\"Class for general user - can be basic user or admin\"\"\"\n phone_number = models.CharField(verbose_name='phone number', max_length=255, unique=False, default='')\n active = models.BooleanField(default=True)\n\n is_active = models.BooleanField(default=True)\n\n email = models.EmailField(verbose_name='email', max_length=255, unique=True, )\n last_name = models.CharField(verbose_name='last name', max_length=255, unique=False, )\n first_name = models.CharField(verbose_name='first name', max_length=255, unique=False, )\n objects = UserClassManager()\n staff = models.BooleanField(default=False)\n admin = models.BooleanField(default=False)\n image = models.CharField(verbose_name='user image', max_length=255, unique=False, default='defaultIcon.png')\n USERNAME_FIELD = \"email\"\n REQUIRED_FIELDS = ['first_name', 'last_name']\n\n user_type = models.CharField(\n max_length=20,\n choices=USER_TYPES,\n default=BASIC_USER,\n )\n\n user_object = models.ForeignKey('profiles.BasicUser', on_delete=models.DO_NOTHING, null=True, related_name='basic_user_parent')\n admin_object = models.ForeignKey('profiles.AdminUser', on_delete=models.DO_NOTHING, null=True, related_name='admin_user_parent')\n\n def has_module_perms(self, app_label):\n return True\n\n @property\n def is_admin(self):\n return self.admin\n\n def get_full_name(self):\n return self.first_name + ' ' + self.last_name\n\n def get_short_name(self):\n return self.first_name\n\n @property\n def is_staff(self):\n return self.staff\n\n def __str__(self):\n return self.email\n\nclass AdminUser(models.Model):\n \"\"\"Model for admin user data\"\"\"\n role = models.CharField(\n max_length=20,\n choices=ADMIN_ROLE_OPTIONS,\n default=STAFF,\n )\n\n\nclass BasicUser(models.Model):\n \"\"\"Model for basic user data\"\"\"\n type = models.CharField(\n max_length=20,\n choices=USER_TYPE_OPTIONS,\n default=RESTAURANT,\n )\n\n preferred_contact = models.CharField(\n max_length=20,\n choices=PREFERRED_CONTACT,\n default=EMAIL,\n )\n\n position = models.CharField(verbose_name='position/title', max_length=255, unique=False, null=True)\n\n restaurant = models.ForeignKey('profiles.Restaurant', on_delete=models.CASCADE, null=True)\n program = models.ForeignKey('profiles.Program', on_delete=models.CASCADE, null=True)\n courier = models.ForeignKey('profiles.Courier', on_delete=models.CASCADE, null=True)\n\n\nclass Schedule(models.Model):\n monday_start = models.TimeField(auto_now=False, null=True, blank=True)\n monday_end = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n tuesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_start = models.TimeField(auto_now=False, null=True, blank=True)\n wednesday_end = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_start = models.TimeField(auto_now=False, null=True, blank=True)\n thursday_end = models.TimeField(auto_now=False, null=True, blank=True)\n friday_start = models.TimeField(auto_now=False, null=True, blank=True)\n friday_end = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_start = models.TimeField(auto_now=False, null=True, blank=True)\n saturday_end = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_start = models.TimeField(auto_now=False, null=True, blank=True)\n sunday_end = models.TimeField(auto_now=False, null=True, blank=True)\n\n def getSchedule(self):\n schedule = {}\n if self.monday_start:\n schedule['monday_start'] = self.monday_start.strftime(\"%-I:%M %p\")\n else:\n schedule['monday_start'] = ''\n if self.monday_end:\n schedule['monday_end'] = self.monday_end.strftime(\"%-I:%M %p\")\n else:\n schedule['monday_end'] = ''\n if self.tuesday_start:\n schedule['tuesday_start'] = self.tuesday_start.strftime(\"%-I:%M %p\")\n else:\n schedule['tuesday_start'] = ''\n if self.tuesday_end:\n schedule['tuesday_end'] = self.tuesday_end.strftime(\"%-I:%M %p\")\n else:\n schedule['tuesday_end'] = ''\n if self.wednesday_start:\n schedule['wednesday_start'] = self.wednesday_start.strftime(\"%-I:%M %p\")\n else:\n schedule['wednesday_start'] = ''\n if self.wednesday_end:\n schedule['wednesday_end'] = self.wednesday_end.strftime(\"%-I:%M %p\")\n else:\n schedule['wednesday_end'] = ''\n if self.thursday_start:\n schedule['thursday_start'] = self.thursday_start.strftime(\"%-I:%M %p\")\n else:\n schedule['thursday_start'] = ''\n if self.thursday_end:\n schedule['thursday_end'] = self.thursday_end.strftime(\"%-I:%M %p\")\n else:\n schedule['thursday_end'] = ''\n if self.friday_start:\n schedule['friday_start'] = self.friday_start.strftime(\"%-I:%M %p\")\n else:\n schedule['friday_start'] = ''\n if self.friday_end:\n schedule['friday_end'] = self.friday_end.strftime(\"%-I:%M %p\")\n else:\n schedule['friday_end'] = ''\n if self.saturday_start:\n schedule['saturday_start'] = self.saturday_start.strftime(\"%-I:%M %p\")\n else:\n schedule['saturday_start'] = ''\n if self.saturday_end:\n schedule['saturday_end'] = self.saturday_end.strftime(\"%-I:%M %p\")\n else:\n schedule['saturday_end'] = ''\n if self.sunday_start:\n schedule['sunday_start'] = self.sunday_start.strftime(\"%-I:%M %p\")\n else:\n schedule['sunday_start'] = ''\n if self.sunday_end:\n schedule['sunday_end'] = self.sunday_end.strftime(\"%-I:%M %p\")\n else:\n schedule['sunday_end'] = ''\n\n return schedule\n\n\nclass Restaurant(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n company_name = models.CharField(verbose_name='company name', max_length=255, unique=False, )\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models.DO_NOTHING, related_name=\"restaurant_object\", null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length=255, unique=False, )\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.DO_NOTHING, null=True)\n meals = models.IntegerField()\n uber_eats = models.BooleanField(default=False)\n delivery_capacity = models.BooleanField(default=False)\n packaging = models.BooleanField(default=False)\n health_certificate = models.CharField(verbose_name='health certificate', max_length=255, unique=False, )\n address = models.CharField(verbose_name='address', max_length=255, unique=False, )\n coordinates = models.CharField(verbose_name='coordinates', max_length=255, unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255, unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255, unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview', related_name='restaurants',\n on_delete=models.DO_NOTHING, null=True)\n\n\n\n\nclass Program(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n program_name = models.CharField(verbose_name='program name', max_length=255, unique=False, )\n main_contact = models.ForeignKey('profiles.UserClass', on_delete=models.DO_NOTHING, related_name=\"program_object\", null=True)\n phone_number = models.CharField(verbose_name='phone number', max_length=255, unique=False, )\n schedule = models.ForeignKey('profiles.Schedule', on_delete=models.DO_NOTHING, null=True)\n meals = models.IntegerField(default=0, null=True)\n address = models.CharField(verbose_name='address', max_length=255, unique=False, )\n coordinates = models.CharField(verbose_name='address', max_length=255, unique=False, null=True)\n latitude = models.CharField(verbose_name='latitude', max_length=255, unique=False, null=True)\n longitude = models.CharField(verbose_name='longitude', max_length=255, unique=False, null=True)\n review = models.ForeignKey('applications.ApplicationReview', related_name=\"programs\",\n on_delete=models.DO_NOTHING, null=True)\n\n\n\n\n\nclass Courier(models.Model):\n created_at = models.DateTimeField(auto_now=True)\n\n\n\n\n\n\n\nclass Profile(models.Model):\n user = models.OneToOneField(BasicUser, on_delete=models.CASCADE)\n avatar = models.ImageField(upload_to='avatars', blank=True)\n\n def __str__(self):\n return self.user.username\n",
"step-ids": [
20,
31,
32,
36,
37
]
}
|
[
20,
31,
32,
36,
37
] |
import mock
def exc():
print 'here should raise'
def recursion():
try:
print 'here'
return exc()
except StandardError:
print 'exc'
return recursion()
def test_recursion():
global exc
exc = mock.Mock(side_effect = [StandardError, StandardError, mock.DEFAULT])
recursion()
test_recursion()
|
normal
|
{
"blob_id": "5ef7c838d8e9a05a09bd974790a85ff36d56a336",
"index": 990,
"step-1": "import mock\n\ndef exc():\n print 'here should raise'\n\ndef recursion():\n try:\n print 'here'\n return exc()\n except StandardError:\n print 'exc'\n return recursion()\n\n\ndef test_recursion():\n global exc\n exc = mock.Mock(side_effect = [StandardError, StandardError, mock.DEFAULT])\n recursion()\n\ntest_recursion()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
word=input()
letter,digit=0,0
for i in word:
if('a'<=i and i<='z') or ('A'<=i and i<='Z'):
letter+=1
if '0'<=i and i<='9':
digit+=1
print("LETTERS {0} \n DIGITS {1}".format(letter,digit))
|
normal
|
{
"blob_id": "f2a508ae99697d6ba320b158a1000379b975d568",
"index": 2227,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in word:\n if 'a' <= i and i <= 'z' or 'A' <= i and i <= 'Z':\n letter += 1\n if '0' <= i and i <= '9':\n digit += 1\nprint(\"\"\"LETTERS {0} \n DIGITS {1}\"\"\".format(letter, digit))\n",
"step-3": "word = input()\nletter, digit = 0, 0\nfor i in word:\n if 'a' <= i and i <= 'z' or 'A' <= i and i <= 'Z':\n letter += 1\n if '0' <= i and i <= '9':\n digit += 1\nprint(\"\"\"LETTERS {0} \n DIGITS {1}\"\"\".format(letter, digit))\n",
"step-4": "word=input()\nletter,digit=0,0\n\nfor i in word:\n if('a'<=i and i<='z') or ('A'<=i and i<='Z'):\n letter+=1\n if '0'<=i and i<='9':\n digit+=1\n\nprint(\"LETTERS {0} \\n DIGITS {1}\".format(letter,digit))\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
class Role:
"""
角色类
卧底
平民
"""
def __init__(self,key_word="",role_id = 0):
self.key_word = key_word
self.role_id = role_id #平民-0;卧底-1;
class User(Role):
"""
用户类
玩家
"""
def __init__(self,id,role_id):
self.id = id #玩家id
self.role_id = role_id
|
normal
|
{
"blob_id": "3b5141a86948df6632612f6c9d7fc0089acc60aa",
"index": 5981,
"step-1": "<mask token>\n\n\nclass Role:\n <mask token>\n <mask token>\n\n\nclass User(Role):\n \"\"\"\n 用户类\n 玩家\n \"\"\"\n\n def __init__(self, id, role_id):\n self.id = id\n self.role_id = role_id\n",
"step-2": "<mask token>\n\n\nclass Role:\n <mask token>\n\n def __init__(self, key_word='', role_id=0):\n self.key_word = key_word\n self.role_id = role_id\n\n\nclass User(Role):\n \"\"\"\n 用户类\n 玩家\n \"\"\"\n\n def __init__(self, id, role_id):\n self.id = id\n self.role_id = role_id\n",
"step-3": "<mask token>\n\n\nclass Role:\n \"\"\"\n 角色类\n 卧底\n 平民\n \"\"\"\n\n def __init__(self, key_word='', role_id=0):\n self.key_word = key_word\n self.role_id = role_id\n\n\nclass User(Role):\n \"\"\"\n 用户类\n 玩家\n \"\"\"\n\n def __init__(self, id, role_id):\n self.id = id\n self.role_id = role_id\n",
"step-4": "import random\n\n\nclass Role:\n \"\"\"\n 角色类\n 卧底\n 平民\n \"\"\"\n\n def __init__(self, key_word='', role_id=0):\n self.key_word = key_word\n self.role_id = role_id\n\n\nclass User(Role):\n \"\"\"\n 用户类\n 玩家\n \"\"\"\n\n def __init__(self, id, role_id):\n self.id = id\n self.role_id = role_id\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport random\n\n\nclass Role:\n\n \"\"\"\n 角色类\n 卧底\n 平民\n \"\"\"\n def __init__(self,key_word=\"\",role_id = 0):\n self.key_word = key_word\n self.role_id = role_id #平民-0;卧底-1;\n\nclass User(Role):\n \"\"\"\n 用户类\n 玩家\n \"\"\"\n def __init__(self,id,role_id):\n self.id = id #玩家id\n self.role_id = role_id\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# -*- coding:utf-8 -*-
__author__ = 'yyp'
__date__ = '2018-5-26 3:42'
'''
Given a string, find the length of the longest substring without repeating characters.
Examples:
Given "abcabcbb", the answer is "abc", which the length is 3.
Given "bbbbb", the answer is "b", with the length of 1.
Given "pwwkew", the answer is "wke", with the length of 3. Note that the answer must be a substring, "pwke" is a subsequence and not a substring.
'''
class Solution:
"""
Time: O(n)
Space:O(1)
"""
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
l, res, freq = 0, 0, [False for _ in range(256)]
for idx, char in enumerate(s):
if freq[ord(char)]:
while s[l] != char:
freq[ord(s[l])] = False
l += 1
l += 1
else:
freq[ord(char)] = True
res = max(idx - l + 1, res)
return res
|
normal
|
{
"blob_id": "b7c43f4242e38318c9e5423ea73e9d9d86759a53",
"index": 4663,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def lengthOfLongestSubstring(self, s):\n \"\"\"\n :type s: str\n :rtype: int\n \"\"\"\n l, res, freq = 0, 0, [(False) for _ in range(256)]\n for idx, char in enumerate(s):\n if freq[ord(char)]:\n while s[l] != char:\n freq[ord(s[l])] = False\n l += 1\n l += 1\n else:\n freq[ord(char)] = True\n res = max(idx - l + 1, res)\n return res\n",
"step-3": "<mask token>\n\n\nclass Solution:\n \"\"\"\n Time: O(n)\n Space:O(1)\n \"\"\"\n\n def lengthOfLongestSubstring(self, s):\n \"\"\"\n :type s: str\n :rtype: int\n \"\"\"\n l, res, freq = 0, 0, [(False) for _ in range(256)]\n for idx, char in enumerate(s):\n if freq[ord(char)]:\n while s[l] != char:\n freq[ord(s[l])] = False\n l += 1\n l += 1\n else:\n freq[ord(char)] = True\n res = max(idx - l + 1, res)\n return res\n",
"step-4": "__author__ = 'yyp'\n__date__ = '2018-5-26 3:42'\n<mask token>\n\n\nclass Solution:\n \"\"\"\n Time: O(n)\n Space:O(1)\n \"\"\"\n\n def lengthOfLongestSubstring(self, s):\n \"\"\"\n :type s: str\n :rtype: int\n \"\"\"\n l, res, freq = 0, 0, [(False) for _ in range(256)]\n for idx, char in enumerate(s):\n if freq[ord(char)]:\n while s[l] != char:\n freq[ord(s[l])] = False\n l += 1\n l += 1\n else:\n freq[ord(char)] = True\n res = max(idx - l + 1, res)\n return res\n",
"step-5": "# -*- coding:utf-8 -*-\n__author__ = 'yyp'\n__date__ = '2018-5-26 3:42'\n\n'''\nGiven a string, find the length of the longest substring without repeating characters.\nExamples:\nGiven \"abcabcbb\", the answer is \"abc\", which the length is 3.\nGiven \"bbbbb\", the answer is \"b\", with the length of 1.\nGiven \"pwwkew\", the answer is \"wke\", with the length of 3. Note that the answer must be a substring, \"pwke\" is a subsequence and not a substring.\n'''\n\n\nclass Solution:\n \"\"\"\n Time: O(n)\n Space:O(1)\n \"\"\"\n\n def lengthOfLongestSubstring(self, s):\n \"\"\"\n :type s: str\n :rtype: int\n \"\"\"\n l, res, freq = 0, 0, [False for _ in range(256)]\n for idx, char in enumerate(s):\n if freq[ord(char)]:\n while s[l] != char:\n freq[ord(s[l])] = False\n l += 1\n l += 1\n else:\n freq[ord(char)] = True\n res = max(idx - l + 1, res)\n return res\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
class Vehicle(object):
count_list = []
def __init__(self, registration_number):
self.registration_number = registration_number
Vehicle.count_list.append(self)
Vehicle.count = len(Vehicle.count_list)
|
normal
|
{
"blob_id": "8b9336113f64a88eeabe6e45021938fac9efd1c6",
"index": 6442,
"step-1": "<mask token>\n",
"step-2": "class Vehicle(object):\n <mask token>\n <mask token>\n",
"step-3": "class Vehicle(object):\n <mask token>\n\n def __init__(self, registration_number):\n self.registration_number = registration_number\n Vehicle.count_list.append(self)\n Vehicle.count = len(Vehicle.count_list)\n",
"step-4": "class Vehicle(object):\n count_list = []\n\n def __init__(self, registration_number):\n self.registration_number = registration_number\n Vehicle.count_list.append(self)\n Vehicle.count = len(Vehicle.count_list)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import unittest
import json
from app.tests.base import BaseTestCase
from app import db
from app.tests.utils import create_room, create_simple_device, create_rgb_device
class TestRoomService(BaseTestCase):
"""Test the room service"""
def test_get_room(self):
room = create_room()
with self.client:
response = self.client.get('/api/rooms/1')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(1, data['data']['id'])
self.assertEqual('Living Room', data['data']['room_name'])
self.assertIn('success', data['status'])
def test_invalid_room(self):
with self.client:
response = self.client.get('api/rooms/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_get_all_rooms(self):
room1 = create_room()
room2 = create_room(room_name="Kitchen")
with self.client:
response = self.client.get('api/rooms')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['rooms']), 2)
self.assertEqual(data['data']['rooms'][0]['room_name'], 'Living Room')
self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')
self.assertIn('success', data['status'])
class TestDeviceService(BaseTestCase):
"""Test the device service"""
def test_get_device(self):
device = create_simple_device()
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(2, data['data']['id'])
self.assertEqual('LED1', data['data']['device_name'])
self.assertEqual('simpledevices', data['data']['type'])
self.assertIn('success', data['status'])
def test_get_invalid_device(self):
with self.client:
response = self.client.get('api/devices/2')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('fail', data['status'])
def test_patch_device(self):
device = create_rgb_device()
patch_data = {'red': 225}
with self.client:
response = self.client.patch(
'/api/devices/1',
data=json.dumps(patch_data),
content_type='application/json'
)
response_data = json.loads(response.data.decode())
self.assertEqual(response_data['data']['device_name'], 'LED Strip1')
self.assertEqual(response.status_code, 200)
self.assertEqual(response_data['data']['red'], 225)
self.assertEqual('rgbleds', response_data['data']['type'])
self.assertIn('success', response_data['status'])
def test_patch_device_invalid_attribute(self):
device = create_rgb_device()
patch_data = {'purple': 225}
with self.client:
response = self.client.patch(
'/api/devices/1',
data=json.dumps(patch_data),
content_type='application/json'
)
response_data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('fail', response_data['status'])
self.assertIn('Attribute does not exist', response_data['message'])
|
normal
|
{
"blob_id": "332e2945e34c861b2132f6b42ef59416a38455a5",
"index": 2542,
"step-1": "<mask token>\n\n\nclass TestRoomService(BaseTestCase):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TestDeviceService(BaseTestCase):\n \"\"\"Test the device service\"\"\"\n\n def test_get_device(self):\n device = create_simple_device()\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(2, data['data']['id'])\n self.assertEqual('LED1', data['data']['device_name'])\n self.assertEqual('simpledevices', data['data']['type'])\n self.assertIn('success', data['status'])\n\n def test_get_invalid_device(self):\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertIn('fail', data['status'])\n\n def test_patch_device(self):\n device = create_rgb_device()\n patch_data = {'red': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response_data['data']['device_name'], 'LED Strip1'\n )\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_data['data']['red'], 225)\n self.assertEqual('rgbleds', response_data['data']['type'])\n self.assertIn('success', response_data['status'])\n\n def test_patch_device_invalid_attribute(self):\n device = create_rgb_device()\n patch_data = {'purple': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertIn('fail', response_data['status'])\n self.assertIn('Attribute does not exist', response_data['message'])\n",
"step-2": "<mask token>\n\n\nclass TestRoomService(BaseTestCase):\n <mask token>\n\n def test_get_room(self):\n room = create_room()\n with self.client:\n response = self.client.get('/api/rooms/1')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(1, data['data']['id'])\n self.assertEqual('Living Room', data['data']['room_name'])\n self.assertIn('success', data['status'])\n <mask token>\n\n def test_get_all_rooms(self):\n room1 = create_room()\n room2 = create_room(room_name='Kitchen')\n with self.client:\n response = self.client.get('api/rooms')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(data['data']['rooms']), 2)\n self.assertEqual(data['data']['rooms'][0]['room_name'],\n 'Living Room')\n self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')\n self.assertIn('success', data['status'])\n\n\nclass TestDeviceService(BaseTestCase):\n \"\"\"Test the device service\"\"\"\n\n def test_get_device(self):\n device = create_simple_device()\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(2, data['data']['id'])\n self.assertEqual('LED1', data['data']['device_name'])\n self.assertEqual('simpledevices', data['data']['type'])\n self.assertIn('success', data['status'])\n\n def test_get_invalid_device(self):\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertIn('fail', data['status'])\n\n def test_patch_device(self):\n device = create_rgb_device()\n patch_data = {'red': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response_data['data']['device_name'], 'LED Strip1'\n )\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_data['data']['red'], 225)\n self.assertEqual('rgbleds', response_data['data']['type'])\n self.assertIn('success', response_data['status'])\n\n def test_patch_device_invalid_attribute(self):\n device = create_rgb_device()\n patch_data = {'purple': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertIn('fail', response_data['status'])\n self.assertIn('Attribute does not exist', response_data['message'])\n",
"step-3": "<mask token>\n\n\nclass TestRoomService(BaseTestCase):\n \"\"\"Test the room service\"\"\"\n\n def test_get_room(self):\n room = create_room()\n with self.client:\n response = self.client.get('/api/rooms/1')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(1, data['data']['id'])\n self.assertEqual('Living Room', data['data']['room_name'])\n self.assertIn('success', data['status'])\n\n def test_invalid_room(self):\n with self.client:\n response = self.client.get('api/rooms/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertIn('fail', data['status'])\n\n def test_get_all_rooms(self):\n room1 = create_room()\n room2 = create_room(room_name='Kitchen')\n with self.client:\n response = self.client.get('api/rooms')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(data['data']['rooms']), 2)\n self.assertEqual(data['data']['rooms'][0]['room_name'],\n 'Living Room')\n self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')\n self.assertIn('success', data['status'])\n\n\nclass TestDeviceService(BaseTestCase):\n \"\"\"Test the device service\"\"\"\n\n def test_get_device(self):\n device = create_simple_device()\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(2, data['data']['id'])\n self.assertEqual('LED1', data['data']['device_name'])\n self.assertEqual('simpledevices', data['data']['type'])\n self.assertIn('success', data['status'])\n\n def test_get_invalid_device(self):\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertIn('fail', data['status'])\n\n def test_patch_device(self):\n device = create_rgb_device()\n patch_data = {'red': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response_data['data']['device_name'], 'LED Strip1'\n )\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_data['data']['red'], 225)\n self.assertEqual('rgbleds', response_data['data']['type'])\n self.assertIn('success', response_data['status'])\n\n def test_patch_device_invalid_attribute(self):\n device = create_rgb_device()\n patch_data = {'purple': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertIn('fail', response_data['status'])\n self.assertIn('Attribute does not exist', response_data['message'])\n",
"step-4": "import unittest\nimport json\nfrom app.tests.base import BaseTestCase\nfrom app import db\nfrom app.tests.utils import create_room, create_simple_device, create_rgb_device\n\n\nclass TestRoomService(BaseTestCase):\n \"\"\"Test the room service\"\"\"\n\n def test_get_room(self):\n room = create_room()\n with self.client:\n response = self.client.get('/api/rooms/1')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(1, data['data']['id'])\n self.assertEqual('Living Room', data['data']['room_name'])\n self.assertIn('success', data['status'])\n\n def test_invalid_room(self):\n with self.client:\n response = self.client.get('api/rooms/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertIn('fail', data['status'])\n\n def test_get_all_rooms(self):\n room1 = create_room()\n room2 = create_room(room_name='Kitchen')\n with self.client:\n response = self.client.get('api/rooms')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(len(data['data']['rooms']), 2)\n self.assertEqual(data['data']['rooms'][0]['room_name'],\n 'Living Room')\n self.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')\n self.assertIn('success', data['status'])\n\n\nclass TestDeviceService(BaseTestCase):\n \"\"\"Test the device service\"\"\"\n\n def test_get_device(self):\n device = create_simple_device()\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 200)\n self.assertEqual(2, data['data']['id'])\n self.assertEqual('LED1', data['data']['device_name'])\n self.assertEqual('simpledevices', data['data']['type'])\n self.assertIn('success', data['status'])\n\n def test_get_invalid_device(self):\n with self.client:\n response = self.client.get('api/devices/2')\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 404)\n self.assertIn('fail', data['status'])\n\n def test_patch_device(self):\n device = create_rgb_device()\n patch_data = {'red': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response_data['data']['device_name'], 'LED Strip1'\n )\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_data['data']['red'], 225)\n self.assertEqual('rgbleds', response_data['data']['type'])\n self.assertIn('success', response_data['status'])\n\n def test_patch_device_invalid_attribute(self):\n device = create_rgb_device()\n patch_data = {'purple': 225}\n with self.client:\n response = self.client.patch('/api/devices/1', data=json.dumps(\n patch_data), content_type='application/json')\n response_data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertIn('fail', response_data['status'])\n self.assertIn('Attribute does not exist', response_data['message'])\n",
"step-5": "import unittest\nimport json\n\nfrom app.tests.base import BaseTestCase\nfrom app import db\nfrom app.tests.utils import create_room, create_simple_device, create_rgb_device\n\nclass TestRoomService(BaseTestCase):\n\t\"\"\"Test the room service\"\"\"\n\n\tdef test_get_room(self):\n\t\troom = create_room()\n\n\t\twith self.client:\n\t\t\tresponse = self.client.get('/api/rooms/1')\n\t\t\tdata = json.loads(response.data.decode())\n\t\t\tself.assertEqual(response.status_code, 200)\n\t\t\tself.assertEqual(1, data['data']['id'])\n\t\t\tself.assertEqual('Living Room', data['data']['room_name'])\n\t\t\tself.assertIn('success', data['status'])\n\n\tdef test_invalid_room(self):\n\t\twith self.client:\n\t\t\tresponse = self.client.get('api/rooms/2')\n\t\t\tdata = json.loads(response.data.decode())\n\t\t\tself.assertEqual(response.status_code, 404)\n\t\t\tself.assertIn('fail', data['status'])\n\n\tdef test_get_all_rooms(self):\n\t\troom1 = create_room()\n\t\troom2 = create_room(room_name=\"Kitchen\")\n\n\t\twith self.client:\n\t\t\tresponse = self.client.get('api/rooms')\n\t\t\tdata = json.loads(response.data.decode())\n\n\t\t\tself.assertEqual(response.status_code, 200)\n\t\t\tself.assertEqual(len(data['data']['rooms']), 2)\n\t\t\tself.assertEqual(data['data']['rooms'][0]['room_name'], 'Living Room')\n\t\t\tself.assertEqual(data['data']['rooms'][1]['room_name'], 'Kitchen')\n\t\t\tself.assertIn('success', data['status'])\n\nclass TestDeviceService(BaseTestCase):\n\t\"\"\"Test the device service\"\"\"\n\n\tdef test_get_device(self):\n\t\tdevice = create_simple_device()\n\n\t\twith self.client:\n\t\t\tresponse = self.client.get('api/devices/2')\n\t\t\tdata = json.loads(response.data.decode())\n\n\t\t\tself.assertEqual(response.status_code, 200)\n\t\t\tself.assertEqual(2, data['data']['id'])\n\t\t\tself.assertEqual('LED1', data['data']['device_name'])\n\t\t\tself.assertEqual('simpledevices', data['data']['type'])\n\t\t\tself.assertIn('success', data['status'])\n\n\tdef test_get_invalid_device(self):\n\t\twith self.client:\n\t\t\tresponse = self.client.get('api/devices/2')\n\t\t\tdata = json.loads(response.data.decode())\n\t\t\tself.assertEqual(response.status_code, 404)\n\t\t\tself.assertIn('fail', data['status'])\n\n\n\tdef test_patch_device(self):\n\t\tdevice = create_rgb_device()\n\t\tpatch_data = {'red': 225}\n\n\t\twith self.client:\n\t\t\tresponse = self.client.patch(\n\t\t\t\t'/api/devices/1',\n\t\t\t\tdata=json.dumps(patch_data),\n\t\t\t\tcontent_type='application/json'\n\t\t\t)\n\t\t\tresponse_data = json.loads(response.data.decode())\n\n\t\t\tself.assertEqual(response_data['data']['device_name'], 'LED Strip1')\n\t\t\tself.assertEqual(response.status_code, 200)\n\t\t\tself.assertEqual(response_data['data']['red'], 225)\n\t\t\tself.assertEqual('rgbleds', response_data['data']['type'])\n\t\t\tself.assertIn('success', response_data['status'])\n\n\tdef test_patch_device_invalid_attribute(self):\n\t\tdevice = create_rgb_device()\n\t\tpatch_data = {'purple': 225}\n\n\t\twith self.client:\n\t\t\tresponse = self.client.patch(\n\t\t\t\t'/api/devices/1',\n\t\t\t\tdata=json.dumps(patch_data),\n\t\t\t\tcontent_type='application/json'\n\t\t\t)\n\t\t\tresponse_data = json.loads(response.data.decode())\n\n\t\t\tself.assertEqual(response.status_code, 400)\n\t\t\tself.assertIn('fail', response_data['status'])\n\t\t\tself.assertIn('Attribute does not exist', response_data['message'])\n\n\n",
"step-ids": [
7,
9,
11,
12,
13
]
}
|
[
7,
9,
11,
12,
13
] |
# Copyright Materialize, Inc. and contributors. All rights reserved.
#
# Use of this software is governed by the Business Source License
# included in the LICENSE file at the root of this repository.
#
# As of the Change Date specified in that file, in accordance with
# the Business Source License, use of this software will be governed
# by the Apache License, Version 2.0.
import re
from pathlib import Path
from typing import Dict, List, Optional, Type
from parameterized import parameterized_class # type: ignore
import materialize.optbench
import materialize.optbench.sql
from materialize.feature_benchmark.action import Action
from materialize.feature_benchmark.executor import Executor
from materialize.feature_benchmark.measurement_source import (
MeasurementSource,
Timestamp,
)
from materialize.feature_benchmark.scenario import Scenario
class OptbenchInit(Action):
def __init__(self, scenario: str, no_indexes: bool = False) -> None:
self._executor: Optional[Executor] = None
self._scenario = scenario
self._no_indexes = no_indexes
def run(self, executor: Optional[Executor] = None) -> None:
e = executor or self._executor
statements = materialize.optbench.sql.parse_from_file(
Path(f"misc/python/materialize/optbench/schema/{self._scenario}.sql")
)
if self._no_indexes:
idx_re = re.compile(r"(create|create\s+default|drop)\s+index\s+")
statements = [
statement
for statement in statements
if not idx_re.match(statement.lower())
]
e._composition.sql("\n".join(statements)) # type: ignore
class OptbenchRun(MeasurementSource):
def __init__(self, optbench_scenario: str, query: int):
self._executor: Optional[Executor] = None
self._optbench_scenario = optbench_scenario
self._query = query
def run(self, executor: Optional[Executor] = None) -> List[Timestamp]:
assert not (executor is None and self._executor is None)
assert not (executor is not None and self._executor is not None)
e = executor or self._executor
queries = materialize.optbench.sql.parse_from_file(
Path(
f"misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql"
)
)
assert 1 <= self._query <= len(queries)
query = queries[self._query - 1]
explain_query = materialize.optbench.sql.Query(query).explain(timing=True)
explain_output = materialize.optbench.sql.ExplainOutput(
e._composition.sql_query(explain_query)[0][0] # type: ignore
)
# Optimization time is in microseconds, divide by 3 to get a more readable number (still in wrong unit)
timestamps = [0, float(explain_output.optimization_time()) / 3] # type: ignore
return timestamps
def name_with_query(cls: Type["OptbenchTPCH"], num: int, params_dict: Dict) -> str:
return f"OptbenchTPCHQ{params_dict['QUERY']:02d}"
@parameterized_class(
[{"QUERY": i} for i in range(1, 23)], class_name_func=name_with_query
)
class OptbenchTPCH(Scenario):
"""Run optbench TPCH for optimizer benchmarks"""
QUERY = 1
def init(self) -> List[Action]:
return [OptbenchInit("tpch")]
def benchmark(self) -> MeasurementSource:
return OptbenchRun("tpch", self.QUERY)
|
normal
|
{
"blob_id": "97ca134ffce404f4b2bc7352d4aac73a7bb764bd",
"index": 5708,
"step-1": "<mask token>\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n <mask token>\n\n\n<mask token>\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-2": "<mask token>\n\n\nclass OptbenchInit(Action):\n <mask token>\n <mask token>\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n queries = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'\n ))\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing\n =True)\n explain_output = materialize.optbench.sql.ExplainOutput(e.\n _composition.sql_query(explain_query)[0][0])\n timestamps = [0, float(explain_output.optimization_time()) / 3]\n return timestamps\n\n\n<mask token>\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-3": "<mask token>\n\n\nclass OptbenchInit(Action):\n\n def __init__(self, scenario: str, no_indexes: bool=False) ->None:\n self._executor: Optional[Executor] = None\n self._scenario = scenario\n self._no_indexes = no_indexes\n\n def run(self, executor: Optional[Executor]=None) ->None:\n e = executor or self._executor\n statements = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/schema/{self._scenario}.sql'))\n if self._no_indexes:\n idx_re = re.compile('(create|create\\\\s+default|drop)\\\\s+index\\\\s+')\n statements = [statement for statement in statements if not\n idx_re.match(statement.lower())]\n e._composition.sql('\\n'.join(statements))\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n queries = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'\n ))\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing\n =True)\n explain_output = materialize.optbench.sql.ExplainOutput(e.\n _composition.sql_query(explain_query)[0][0])\n timestamps = [0, float(explain_output.optimization_time()) / 3]\n return timestamps\n\n\ndef name_with_query(cls: Type['OptbenchTPCH'], num: int, params_dict: Dict\n ) ->str:\n return f\"OptbenchTPCHQ{params_dict['QUERY']:02d}\"\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-4": "import re\nfrom pathlib import Path\nfrom typing import Dict, List, Optional, Type\nfrom parameterized import parameterized_class\nimport materialize.optbench\nimport materialize.optbench.sql\nfrom materialize.feature_benchmark.action import Action\nfrom materialize.feature_benchmark.executor import Executor\nfrom materialize.feature_benchmark.measurement_source import MeasurementSource, Timestamp\nfrom materialize.feature_benchmark.scenario import Scenario\n\n\nclass OptbenchInit(Action):\n\n def __init__(self, scenario: str, no_indexes: bool=False) ->None:\n self._executor: Optional[Executor] = None\n self._scenario = scenario\n self._no_indexes = no_indexes\n\n def run(self, executor: Optional[Executor]=None) ->None:\n e = executor or self._executor\n statements = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/schema/{self._scenario}.sql'))\n if self._no_indexes:\n idx_re = re.compile('(create|create\\\\s+default|drop)\\\\s+index\\\\s+')\n statements = [statement for statement in statements if not\n idx_re.match(statement.lower())]\n e._composition.sql('\\n'.join(statements))\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n queries = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'\n ))\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing\n =True)\n explain_output = materialize.optbench.sql.ExplainOutput(e.\n _composition.sql_query(explain_query)[0][0])\n timestamps = [0, float(explain_output.optimization_time()) / 3]\n return timestamps\n\n\ndef name_with_query(cls: Type['OptbenchTPCH'], num: int, params_dict: Dict\n ) ->str:\n return f\"OptbenchTPCHQ{params_dict['QUERY']:02d}\"\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-5": "# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\n\nimport re\nfrom pathlib import Path\nfrom typing import Dict, List, Optional, Type\n\nfrom parameterized import parameterized_class # type: ignore\n\nimport materialize.optbench\nimport materialize.optbench.sql\nfrom materialize.feature_benchmark.action import Action\nfrom materialize.feature_benchmark.executor import Executor\nfrom materialize.feature_benchmark.measurement_source import (\n MeasurementSource,\n Timestamp,\n)\nfrom materialize.feature_benchmark.scenario import Scenario\n\n\nclass OptbenchInit(Action):\n def __init__(self, scenario: str, no_indexes: bool = False) -> None:\n self._executor: Optional[Executor] = None\n self._scenario = scenario\n self._no_indexes = no_indexes\n\n def run(self, executor: Optional[Executor] = None) -> None:\n e = executor or self._executor\n statements = materialize.optbench.sql.parse_from_file(\n Path(f\"misc/python/materialize/optbench/schema/{self._scenario}.sql\")\n )\n if self._no_indexes:\n idx_re = re.compile(r\"(create|create\\s+default|drop)\\s+index\\s+\")\n statements = [\n statement\n for statement in statements\n if not idx_re.match(statement.lower())\n ]\n e._composition.sql(\"\\n\".join(statements)) # type: ignore\n\n\nclass OptbenchRun(MeasurementSource):\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor] = None) -> List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n\n queries = materialize.optbench.sql.parse_from_file(\n Path(\n f\"misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql\"\n )\n )\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing=True)\n explain_output = materialize.optbench.sql.ExplainOutput(\n e._composition.sql_query(explain_query)[0][0] # type: ignore\n )\n # Optimization time is in microseconds, divide by 3 to get a more readable number (still in wrong unit)\n timestamps = [0, float(explain_output.optimization_time()) / 3] # type: ignore\n return timestamps\n\n\ndef name_with_query(cls: Type[\"OptbenchTPCH\"], num: int, params_dict: Dict) -> str:\n return f\"OptbenchTPCHQ{params_dict['QUERY']:02d}\"\n\n\n@parameterized_class(\n [{\"QUERY\": i} for i in range(1, 23)], class_name_func=name_with_query\n)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n\n QUERY = 1\n\n def init(self) -> List[Action]:\n return [OptbenchInit(\"tpch\")]\n\n def benchmark(self) -> MeasurementSource:\n return OptbenchRun(\"tpch\", self.QUERY)\n",
"step-ids": [
7,
9,
12,
13,
14
]
}
|
[
7,
9,
12,
13,
14
] |
# Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gapit_test_framework import gapit_test, require, require_equal, require_true
from gapit_test_framework import require_not_equal, little_endian_bytes_to_int
from gapit_test_framework import GapitTest, get_read_offset_function
import gapit_test_framework
from vulkan_constants import *
@gapit_test("vkCmdCopyQueryPoolResults_test")
class AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):
def expect(self):
"""1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4 stride: 4 and dstOffset: 16."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 1))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(16, copy_query_pool_results.int_dstOffset)
require_equal(4, copy_query_pool_results.int_stride)
require_equal(0, copy_query_pool_results.int_flags)
@gapit_test("vkCmdCopyQueryPoolResults_test")
class FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest):
def expect(self):
"""2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,
queryCount: 4, stride: 8 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 2))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(4, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(8, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,
copy_query_pool_results.int_flags)
@gapit_test("vkCmdCopyQueryPoolResults_test")
class AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(GapitTest):
def expect(self):
"""3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4, stride: 12 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 3))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(12, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_PARTIAL_BIT
| VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
copy_query_pool_results.int_flags)
|
normal
|
{
"blob_id": "c2f6fa4d9a6e2ee5f0593bef775ce8f811225613",
"index": 2047,
"step-1": "<mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n <mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-2": "<mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n <mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-3": "<mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n\n def expect(self):\n \"\"\"1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4 stride: 4 and dstOffset: 16.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 1))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(16, copy_query_pool_results.int_dstOffset)\n require_equal(4, copy_query_pool_results.int_stride)\n require_equal(0, copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-4": "from gapit_test_framework import gapit_test, require, require_equal, require_true\nfrom gapit_test_framework import require_not_equal, little_endian_bytes_to_int\nfrom gapit_test_framework import GapitTest, get_read_offset_function\nimport gapit_test_framework\nfrom vulkan_constants import *\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n\n def expect(self):\n \"\"\"1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4 stride: 4 and dstOffset: 16.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 1))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(16, copy_query_pool_results.int_dstOffset)\n require_equal(4, copy_query_pool_results.int_stride)\n require_equal(0, copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-5": "# Copyright 2017 Google Inc.\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom gapit_test_framework import gapit_test, require, require_equal, require_true\nfrom gapit_test_framework import require_not_equal, little_endian_bytes_to_int\nfrom gapit_test_framework import GapitTest, get_read_offset_function\nimport gapit_test_framework\nfrom vulkan_constants import *\n\n\n@gapit_test(\"vkCmdCopyQueryPoolResults_test\")\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n\n def expect(self):\n \"\"\"1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4 stride: 4 and dstOffset: 16.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n \"vkCmdCopyQueryPoolResults\", 1))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(16, copy_query_pool_results.int_dstOffset)\n require_equal(4, copy_query_pool_results.int_stride)\n require_equal(0, copy_query_pool_results.int_flags)\n\n\n@gapit_test(\"vkCmdCopyQueryPoolResults_test\")\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n \"vkCmdCopyQueryPoolResults\", 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test(\"vkCmdCopyQueryPoolResults_test\")\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n \"vkCmdCopyQueryPoolResults\", 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT\n | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,\n copy_query_pool_results.int_flags)\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
import random
def take_second(element):
return element[1]
import string
def get_random_name():
name = ""
for i in range(random.randint(5, 15)):
name += random.choice(string.ascii_letters)
return name
imenik = [(777, "zejneba"), (324, "fahro"), (23, "fatih"), (2334, "muamer"), (435, "kerim"),(4568,"zzzzzzz")]
print(sorted(imenik,key=take_second))
for i in range(100000):
novi_element = (random.randint(1, 10000), get_random_name())
imenik.append(novi_element)
imenik.sort(key=take_second)
print(imenik)
name = input('enter a name: ')
min_index = 0
max_index = len(imenik)
previous_guess_name = ""
counter = 0
while True:
mid_index = (max_index + min_index) // 2
guess_score = imenik[mid_index][0]
guess_name = imenik[mid_index][1]
if guess_name == previous_guess_name:
print("Not found")
break
if guess_name == name:
print("your score is", guess_score)
break
elif name > guess_name:
min_index = mid_index
else:
max_index = mid_index
previous_guess_name = guess_name
counter += 1
print("Number of comparisons", counter)
print("after")
found = False
counter = 0
for i in range(len(imenik)):
counter += 1
if imenik[i][1] == name:
print("your score is", guess_score)
found = True
break
if not found:
print("Not found")
print("Number of comparisons after", counter)
|
normal
|
{
"blob_id": "21ef8103a5880a07d8c681b2367c2beef727260f",
"index": 6536,
"step-1": "<mask token>\n\n\ndef take_second(element):\n return element[1]\n\n\n<mask token>\n\n\ndef get_random_name():\n name = ''\n for i in range(random.randint(5, 15)):\n name += random.choice(string.ascii_letters)\n return name\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef take_second(element):\n return element[1]\n\n\n<mask token>\n\n\ndef get_random_name():\n name = ''\n for i in range(random.randint(5, 15)):\n name += random.choice(string.ascii_letters)\n return name\n\n\n<mask token>\nprint(sorted(imenik, key=take_second))\nfor i in range(100000):\n novi_element = random.randint(1, 10000), get_random_name()\n imenik.append(novi_element)\nimenik.sort(key=take_second)\nprint(imenik)\n<mask token>\nwhile True:\n mid_index = (max_index + min_index) // 2\n guess_score = imenik[mid_index][0]\n guess_name = imenik[mid_index][1]\n if guess_name == previous_guess_name:\n print('Not found')\n break\n if guess_name == name:\n print('your score is', guess_score)\n break\n elif name > guess_name:\n min_index = mid_index\n else:\n max_index = mid_index\n previous_guess_name = guess_name\n counter += 1\nprint('Number of comparisons', counter)\nprint('after')\n<mask token>\nfor i in range(len(imenik)):\n counter += 1\n if imenik[i][1] == name:\n print('your score is', guess_score)\n found = True\n break\nif not found:\n print('Not found')\nprint('Number of comparisons after', counter)\n",
"step-3": "<mask token>\n\n\ndef take_second(element):\n return element[1]\n\n\n<mask token>\n\n\ndef get_random_name():\n name = ''\n for i in range(random.randint(5, 15)):\n name += random.choice(string.ascii_letters)\n return name\n\n\nimenik = [(777, 'zejneba'), (324, 'fahro'), (23, 'fatih'), (2334, 'muamer'),\n (435, 'kerim'), (4568, 'zzzzzzz')]\nprint(sorted(imenik, key=take_second))\nfor i in range(100000):\n novi_element = random.randint(1, 10000), get_random_name()\n imenik.append(novi_element)\nimenik.sort(key=take_second)\nprint(imenik)\nname = input('enter a name: ')\nmin_index = 0\nmax_index = len(imenik)\nprevious_guess_name = ''\ncounter = 0\nwhile True:\n mid_index = (max_index + min_index) // 2\n guess_score = imenik[mid_index][0]\n guess_name = imenik[mid_index][1]\n if guess_name == previous_guess_name:\n print('Not found')\n break\n if guess_name == name:\n print('your score is', guess_score)\n break\n elif name > guess_name:\n min_index = mid_index\n else:\n max_index = mid_index\n previous_guess_name = guess_name\n counter += 1\nprint('Number of comparisons', counter)\nprint('after')\nfound = False\ncounter = 0\nfor i in range(len(imenik)):\n counter += 1\n if imenik[i][1] == name:\n print('your score is', guess_score)\n found = True\n break\nif not found:\n print('Not found')\nprint('Number of comparisons after', counter)\n",
"step-4": "import random\n\n\ndef take_second(element):\n return element[1]\n\n\nimport string\n\n\ndef get_random_name():\n name = ''\n for i in range(random.randint(5, 15)):\n name += random.choice(string.ascii_letters)\n return name\n\n\nimenik = [(777, 'zejneba'), (324, 'fahro'), (23, 'fatih'), (2334, 'muamer'),\n (435, 'kerim'), (4568, 'zzzzzzz')]\nprint(sorted(imenik, key=take_second))\nfor i in range(100000):\n novi_element = random.randint(1, 10000), get_random_name()\n imenik.append(novi_element)\nimenik.sort(key=take_second)\nprint(imenik)\nname = input('enter a name: ')\nmin_index = 0\nmax_index = len(imenik)\nprevious_guess_name = ''\ncounter = 0\nwhile True:\n mid_index = (max_index + min_index) // 2\n guess_score = imenik[mid_index][0]\n guess_name = imenik[mid_index][1]\n if guess_name == previous_guess_name:\n print('Not found')\n break\n if guess_name == name:\n print('your score is', guess_score)\n break\n elif name > guess_name:\n min_index = mid_index\n else:\n max_index = mid_index\n previous_guess_name = guess_name\n counter += 1\nprint('Number of comparisons', counter)\nprint('after')\nfound = False\ncounter = 0\nfor i in range(len(imenik)):\n counter += 1\n if imenik[i][1] == name:\n print('your score is', guess_score)\n found = True\n break\nif not found:\n print('Not found')\nprint('Number of comparisons after', counter)\n",
"step-5": "import random\n\n\ndef take_second(element):\n return element[1]\n\n\nimport string\n\n\ndef get_random_name():\n name = \"\"\n for i in range(random.randint(5, 15)):\n name += random.choice(string.ascii_letters)\n return name\n\n\nimenik = [(777, \"zejneba\"), (324, \"fahro\"), (23, \"fatih\"), (2334, \"muamer\"), (435, \"kerim\"),(4568,\"zzzzzzz\")]\n\nprint(sorted(imenik,key=take_second))\nfor i in range(100000):\n novi_element = (random.randint(1, 10000), get_random_name())\n imenik.append(novi_element)\n\nimenik.sort(key=take_second)\nprint(imenik)\n\nname = input('enter a name: ')\n\nmin_index = 0\nmax_index = len(imenik)\n\nprevious_guess_name = \"\"\ncounter = 0\nwhile True:\n\n mid_index = (max_index + min_index) // 2\n guess_score = imenik[mid_index][0]\n guess_name = imenik[mid_index][1]\n\n if guess_name == previous_guess_name:\n print(\"Not found\")\n break\n\n if guess_name == name:\n print(\"your score is\", guess_score)\n break\n elif name > guess_name:\n min_index = mid_index\n else:\n max_index = mid_index\n\n previous_guess_name = guess_name\n counter += 1\n\nprint(\"Number of comparisons\", counter)\n\nprint(\"after\")\nfound = False\ncounter = 0\nfor i in range(len(imenik)):\n counter += 1\n if imenik[i][1] == name:\n print(\"your score is\", guess_score)\n found = True\n break\n\nif not found:\n print(\"Not found\")\n\nprint(\"Number of comparisons after\", counter)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class JiayuanItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
person_id = scrapy.Field()#人员唯一ID
user_info = scrapy.Field()#搜索页面中的年龄与所属城市
main_url = scrapy.Field()#搜索页面中人员入口url
nick_name = scrapy.Field()#搜索页面中人员昵称
heigth = scrapy.Field()#搜索页面中身高
class PersonInfo((scrapy.Item)):
#person_info人员信息表
person_id = scrapy.Field()
buy_car = scrapy.Field()
address = scrapy.Field()
class OtherItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
'''
可以定义另外一个item
'''
user_info = scrapy.Field()#搜索页面中的年龄与所属城市
main_url = scrapy.Field()#搜索页面中人员入口url
nick_name = scrapy.Field()#搜索页面中人员昵称
heigth = scrapy.Field()#搜索页面中身高
|
normal
|
{
"blob_id": "9dbadb2421b04961e8e813831d06abc1ff301566",
"index": 3283,
"step-1": "<mask token>\n\n\nclass PersonInfo(scrapy.Item):\n person_id = scrapy.Field()\n buy_car = scrapy.Field()\n address = scrapy.Field()\n\n\nclass OtherItem(scrapy.Item):\n \"\"\"\n 可以定义另外一个item\n \"\"\"\n user_info = scrapy.Field()\n main_url = scrapy.Field()\n nick_name = scrapy.Field()\n heigth = scrapy.Field()\n",
"step-2": "<mask token>\n\n\nclass JiayuanItem(scrapy.Item):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass PersonInfo(scrapy.Item):\n person_id = scrapy.Field()\n buy_car = scrapy.Field()\n address = scrapy.Field()\n\n\nclass OtherItem(scrapy.Item):\n \"\"\"\n 可以定义另外一个item\n \"\"\"\n user_info = scrapy.Field()\n main_url = scrapy.Field()\n nick_name = scrapy.Field()\n heigth = scrapy.Field()\n",
"step-3": "<mask token>\n\n\nclass JiayuanItem(scrapy.Item):\n person_id = scrapy.Field()\n user_info = scrapy.Field()\n main_url = scrapy.Field()\n nick_name = scrapy.Field()\n heigth = scrapy.Field()\n\n\nclass PersonInfo(scrapy.Item):\n person_id = scrapy.Field()\n buy_car = scrapy.Field()\n address = scrapy.Field()\n\n\nclass OtherItem(scrapy.Item):\n \"\"\"\n 可以定义另外一个item\n \"\"\"\n user_info = scrapy.Field()\n main_url = scrapy.Field()\n nick_name = scrapy.Field()\n heigth = scrapy.Field()\n",
"step-4": "import scrapy\n\n\nclass JiayuanItem(scrapy.Item):\n person_id = scrapy.Field()\n user_info = scrapy.Field()\n main_url = scrapy.Field()\n nick_name = scrapy.Field()\n heigth = scrapy.Field()\n\n\nclass PersonInfo(scrapy.Item):\n person_id = scrapy.Field()\n buy_car = scrapy.Field()\n address = scrapy.Field()\n\n\nclass OtherItem(scrapy.Item):\n \"\"\"\n 可以定义另外一个item\n \"\"\"\n user_info = scrapy.Field()\n main_url = scrapy.Field()\n nick_name = scrapy.Field()\n heigth = scrapy.Field()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\r\n# Define here the models for your scraped items\r\n#\r\n# See documentation in:\r\n# https://doc.scrapy.org/en/latest/topics/items.html\r\n\r\nimport scrapy\r\n\r\n\r\nclass JiayuanItem(scrapy.Item):\r\n # define the fields for your item here like:\r\n # name = scrapy.Field()\r\n person_id = scrapy.Field()#人员唯一ID\r\n user_info = scrapy.Field()#搜索页面中的年龄与所属城市\r\n main_url = scrapy.Field()#搜索页面中人员入口url\r\n nick_name = scrapy.Field()#搜索页面中人员昵称\r\n heigth = scrapy.Field()#搜索页面中身高\r\n \r\nclass PersonInfo((scrapy.Item)):\r\n #person_info人员信息表\r\n person_id = scrapy.Field()\r\n buy_car = scrapy.Field()\r\n address = scrapy.Field()\r\n \r\nclass OtherItem(scrapy.Item):\r\n # define the fields for your item here like:\r\n # name = scrapy.Field()\r\n '''\r\n 可以定义另外一个item\r\n '''\r\n user_info = scrapy.Field()#搜索页面中的年龄与所属城市\r\n main_url = scrapy.Field()#搜索页面中人员入口url\r\n nick_name = scrapy.Field()#搜索页面中人员昵称\r\n heigth = scrapy.Field()#搜索页面中身高",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
from bbdd import *
def usuario():
global usser
usser=input("Introduce un usuario : ")
if len(usser)<5 or len(usser)>15:
print("El usuario debe tener entre 5 y 15 caracteres")
usuario()
elif usser.isalnum()==False:
print("Los valores del usurio deben ser únicamente letras o números")
usuario()
else:
print(True)
def contraseña():
global passw
passw=input("Introduce contraseña: ")
if len(passw)<=9:
print("La contraseña debe tener al menos 10 caractéres")
contraseña()
elif passw.isalnum()==True:
print ("La contraseña debe tener al menos un carácter no alfanumérico")
contraseña()
elif passw.lower() == passw:
print("Debe haber por lo menos una mayúscula")
contraseña()
elif passw.upper()==passw:
print("Debe haber por lo menos una minúscula")
contraseña()
for i in passw:
if i==" ":
print("La contraseña no debe tener espacios en blanco")
contraseña()
print(True)
|
normal
|
{
"blob_id": "ce75c23c6b0862dde797225f53c900b4ebc56428",
"index": 514,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef usuario():\n global usser\n usser = input('Introduce un usuario : ')\n if len(usser) < 5 or len(usser) > 15:\n print('El usuario debe tener entre 5 y 15 caracteres')\n usuario()\n elif usser.isalnum() == False:\n print('Los valores del usurio deben ser únicamente letras o números')\n usuario()\n else:\n print(True)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef usuario():\n global usser\n usser = input('Introduce un usuario : ')\n if len(usser) < 5 or len(usser) > 15:\n print('El usuario debe tener entre 5 y 15 caracteres')\n usuario()\n elif usser.isalnum() == False:\n print('Los valores del usurio deben ser únicamente letras o números')\n usuario()\n else:\n print(True)\n\n\ndef contraseña():\n global passw\n passw = input('Introduce contraseña: ')\n if len(passw) <= 9:\n print('La contraseña debe tener al menos 10 caractéres')\n contraseña()\n elif passw.isalnum() == True:\n print('La contraseña debe tener al menos un carácter no alfanumérico')\n contraseña()\n elif passw.lower() == passw:\n print('Debe haber por lo menos una mayúscula')\n contraseña()\n elif passw.upper() == passw:\n print('Debe haber por lo menos una minúscula')\n contraseña()\n for i in passw:\n if i == ' ':\n print('La contraseña no debe tener espacios en blanco')\n contraseña()\n print(True)\n",
"step-4": "from bbdd import *\n\n\ndef usuario():\n global usser\n usser = input('Introduce un usuario : ')\n if len(usser) < 5 or len(usser) > 15:\n print('El usuario debe tener entre 5 y 15 caracteres')\n usuario()\n elif usser.isalnum() == False:\n print('Los valores del usurio deben ser únicamente letras o números')\n usuario()\n else:\n print(True)\n\n\ndef contraseña():\n global passw\n passw = input('Introduce contraseña: ')\n if len(passw) <= 9:\n print('La contraseña debe tener al menos 10 caractéres')\n contraseña()\n elif passw.isalnum() == True:\n print('La contraseña debe tener al menos un carácter no alfanumérico')\n contraseña()\n elif passw.lower() == passw:\n print('Debe haber por lo menos una mayúscula')\n contraseña()\n elif passw.upper() == passw:\n print('Debe haber por lo menos una minúscula')\n contraseña()\n for i in passw:\n if i == ' ':\n print('La contraseña no debe tener espacios en blanco')\n contraseña()\n print(True)\n",
"step-5": "from bbdd import *\n\n\ndef usuario():\n global usser\n usser=input(\"Introduce un usuario : \")\n if len(usser)<5 or len(usser)>15:\n print(\"El usuario debe tener entre 5 y 15 caracteres\")\n usuario()\n elif usser.isalnum()==False:\n print(\"Los valores del usurio deben ser únicamente letras o números\")\n usuario()\n else:\n print(True)\n\n\n\ndef contraseña():\n global passw\n passw=input(\"Introduce contraseña: \")\n if len(passw)<=9:\n print(\"La contraseña debe tener al menos 10 caractéres\")\n contraseña()\n elif passw.isalnum()==True:\n print (\"La contraseña debe tener al menos un carácter no alfanumérico\")\n contraseña()\n elif passw.lower() == passw:\n print(\"Debe haber por lo menos una mayúscula\")\n contraseña()\n elif passw.upper()==passw:\n print(\"Debe haber por lo menos una minúscula\")\n contraseña()\n\n for i in passw:\n if i==\" \":\n print(\"La contraseña no debe tener espacios en blanco\")\n contraseña()\n print(True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import hashlib
import os
def fileMD(self):
salt_ = os.urandom(32).hex()
hash_object = hashlib.md5()
hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))
print("MD5 Hash: "+hash_object.hexdigest())
|
normal
|
{
"blob_id": "bc9718fa57046888961d1b5245abefa8f752e983",
"index": 8103,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef fileMD(self):\n salt_ = os.urandom(32).hex()\n hash_object = hashlib.md5()\n hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))\n print('MD5 Hash: ' + hash_object.hexdigest())\n",
"step-3": "import hashlib\nimport os\n\n\ndef fileMD(self):\n salt_ = os.urandom(32).hex()\n hash_object = hashlib.md5()\n hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))\n print('MD5 Hash: ' + hash_object.hexdigest())\n",
"step-4": "import hashlib\nimport os\n\n\ndef fileMD(self):\n salt_ = os.urandom(32).hex()\n hash_object = hashlib.md5()\n hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))\n print(\"MD5 Hash: \"+hash_object.hexdigest())",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import json
import os
from django.conf import settings
from django.db import models
from jsonfield import JSONField
class Word(models.Model):
value = models.CharField(
max_length=50,
verbose_name='Слово'
)
spelling = models.CharField(
max_length=250,
verbose_name='Транскрипция'
)
raw_od_article = JSONField(
verbose_name='Сырые данные с OD'
)
is_active = models.BooleanField(
default=True,
verbose_name='Используется'
)
def __str__(self):
return self.value
class Meta:
ordering = ["value"]
verbose_name = "Слово"
verbose_name_plural = "Слова"
class Meaning(models.Model):
word = models.ForeignKey(
Word,
on_delete=models.CASCADE,
verbose_name='Слово'
)
value = models.TextField(
verbose_name='Значение'
)
order = models.PositiveIntegerField(
verbose_name="Порядок",
default=0
)
examples = JSONField(
null=True,
blank=True
)
def __str__(self):
if self.value is None:
return ''
return self.value[:20]
class Meta:
ordering = ["order"]
verbose_name = "Доп. значение"
verbose_name_plural = "Доп. значения"
class Pronunciation(models.Model):
word = models.ForeignKey(
Word,
on_delete=models.CASCADE,
verbose_name='Слово'
)
audio = models.FileField(
upload_to='media/audio',
verbose_name='Произношение'
)
raw_od_data = JSONField(
verbose_name='Сырые данные с OD',
blank=True,
null=True
)
is_active = models.BooleanField(
default=True,
verbose_name='Используется'
)
def __str__(self):
return "Произношение {}".format(self.word)
class Meta:
verbose_name = "Произношение"
verbose_name_plural = "Произношения"
class PronunciationMeta(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class WordLearningState(models.Model):
word = models.ForeignKey(
Word,
on_delete=models.CASCADE,
verbose_name='Слово'
)
user = models.ForeignKey(
"auth.User",
on_delete=models.CASCADE,
verbose_name='Пользователь'
)
is_user_know_meaning = models.BooleanField(
default=False,
verbose_name='Выучил значение'
)
is_user_know_pronunciation = models.BooleanField(
default=False,
verbose_name='Выучил произношение'
)
usage_count = models.PositiveIntegerField(
default=0,
verbose_name='Количество показов'
)
last_usage_date = models.DateTimeField(
auto_now_add=True,
verbose_name='Дата последнего показа'
)
preferred_pronunciation = models.PositiveIntegerField(
default=0,
verbose_name='forvo id препочтительного произношения',
)
training_session = models.BooleanField(
default=False,
blank=False,
verbose_name='Сеанс обучения'
)
def _get_pronunciations_meta(self, word_str):
forvo_meta_path = os.path.join(
settings.BASE_DIR, 'media', 'forvo', '{}.json'.format(word_str)
)
if not os.path.exists(forvo_meta_path):
return
with open(forvo_meta_path, 'r') as f:
data = json.load(f)
return data
def _get_sounds(self, word_str):
ret = []
sounds_path = os.path.join(settings.BASE_DIR, 'media', 'sounds', word_str)
print(sounds_path)
if not os.path.exists(sounds_path):
return []
items = list(os.listdir(sounds_path))
items.sort()
for item in items:
if item.endswith('.mp3'):
ret.append('{}{}/{}/{}'.format(settings.MEDIA_URL, 'sounds', word_str, item))
return ret
def get_pronunciations(self):
word = self.word
forvo_meta = self._get_pronunciations_meta(word.value)
if not forvo_meta:
return []
ret = []
ct = 0
sounds = self._get_sounds(word.value)
slen = len(sounds)
prefered_detected = False
for item in forvo_meta.get('items') or []:
if item.get('code', '') != 'en' or item.get(
'country', '') != 'United States':
continue
if ct > slen-1:
break
sound_file = sounds[ct]
is_best = self.preferred_pronunciation == item['id']
if is_best:
prefered_detected = True
ret.append({
'id': item['id'],
'by': item['username'],
'sex': item['sex'],
'src': sound_file,
'best': is_best
})
ct += 1
if ct == 4:
break
if ret and not prefered_detected:
ret[0]['best'] = True
return ret
def __str__(self):
return "Статистика слова {}".format(self.word)
class Meta:
verbose_name = "Статистика"
verbose_name_plural = "Статистика"
|
normal
|
{
"blob_id": "067e0129b1a9084bbcee28d1973504299b89afdb",
"index": 8911,
"step-1": "<mask token>\n\n\nclass Meaning(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n if self.value is None:\n return ''\n return self.value[:20]\n\n\n class Meta:\n ordering = ['order']\n verbose_name = 'Доп. значение'\n verbose_name_plural = 'Доп. значения'\n\n\nclass Pronunciation(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n audio = models.FileField(upload_to='media/audio', verbose_name=\n 'Произношение')\n raw_od_data = JSONField(verbose_name='Сырые данные с OD', blank=True,\n null=True)\n is_active = models.BooleanField(default=True, verbose_name='Используется')\n\n def __str__(self):\n return 'Произношение {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Произношение'\n verbose_name_plural = 'Произношения'\n\n\nclass PronunciationMeta(object):\n\n def __init__(self, **kwargs):\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n\nclass WordLearningState(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n user = models.ForeignKey('auth.User', on_delete=models.CASCADE,\n verbose_name='Пользователь')\n is_user_know_meaning = models.BooleanField(default=False, verbose_name=\n 'Выучил значение')\n is_user_know_pronunciation = models.BooleanField(default=False,\n verbose_name='Выучил произношение')\n usage_count = models.PositiveIntegerField(default=0, verbose_name=\n 'Количество показов')\n last_usage_date = models.DateTimeField(auto_now_add=True, verbose_name=\n 'Дата последнего показа')\n preferred_pronunciation = models.PositiveIntegerField(default=0,\n verbose_name='forvo id препочтительного произношения')\n training_session = models.BooleanField(default=False, blank=False,\n verbose_name='Сеанс обучения')\n\n def _get_pronunciations_meta(self, word_str):\n forvo_meta_path = os.path.join(settings.BASE_DIR, 'media', 'forvo',\n '{}.json'.format(word_str))\n if not os.path.exists(forvo_meta_path):\n return\n with open(forvo_meta_path, 'r') as f:\n data = json.load(f)\n return data\n\n def _get_sounds(self, word_str):\n ret = []\n sounds_path = os.path.join(settings.BASE_DIR, 'media', 'sounds',\n word_str)\n print(sounds_path)\n if not os.path.exists(sounds_path):\n return []\n items = list(os.listdir(sounds_path))\n items.sort()\n for item in items:\n if item.endswith('.mp3'):\n ret.append('{}{}/{}/{}'.format(settings.MEDIA_URL, 'sounds',\n word_str, item))\n return ret\n\n def get_pronunciations(self):\n word = self.word\n forvo_meta = self._get_pronunciations_meta(word.value)\n if not forvo_meta:\n return []\n ret = []\n ct = 0\n sounds = self._get_sounds(word.value)\n slen = len(sounds)\n prefered_detected = False\n for item in (forvo_meta.get('items') or []):\n if item.get('code', '') != 'en' or item.get('country', ''\n ) != 'United States':\n continue\n if ct > slen - 1:\n break\n sound_file = sounds[ct]\n is_best = self.preferred_pronunciation == item['id']\n if is_best:\n prefered_detected = True\n ret.append({'id': item['id'], 'by': item['username'], 'sex':\n item['sex'], 'src': sound_file, 'best': is_best})\n ct += 1\n if ct == 4:\n break\n if ret and not prefered_detected:\n ret[0]['best'] = True\n return ret\n\n def __str__(self):\n return 'Статистика слова {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Статистика'\n verbose_name_plural = 'Статистика'\n",
"step-2": "<mask token>\n\n\nclass Meaning(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n value = models.TextField(verbose_name='Значение')\n order = models.PositiveIntegerField(verbose_name='Порядок', default=0)\n examples = JSONField(null=True, blank=True)\n\n def __str__(self):\n if self.value is None:\n return ''\n return self.value[:20]\n\n\n class Meta:\n ordering = ['order']\n verbose_name = 'Доп. значение'\n verbose_name_plural = 'Доп. значения'\n\n\nclass Pronunciation(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n audio = models.FileField(upload_to='media/audio', verbose_name=\n 'Произношение')\n raw_od_data = JSONField(verbose_name='Сырые данные с OD', blank=True,\n null=True)\n is_active = models.BooleanField(default=True, verbose_name='Используется')\n\n def __str__(self):\n return 'Произношение {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Произношение'\n verbose_name_plural = 'Произношения'\n\n\nclass PronunciationMeta(object):\n\n def __init__(self, **kwargs):\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n\nclass WordLearningState(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n user = models.ForeignKey('auth.User', on_delete=models.CASCADE,\n verbose_name='Пользователь')\n is_user_know_meaning = models.BooleanField(default=False, verbose_name=\n 'Выучил значение')\n is_user_know_pronunciation = models.BooleanField(default=False,\n verbose_name='Выучил произношение')\n usage_count = models.PositiveIntegerField(default=0, verbose_name=\n 'Количество показов')\n last_usage_date = models.DateTimeField(auto_now_add=True, verbose_name=\n 'Дата последнего показа')\n preferred_pronunciation = models.PositiveIntegerField(default=0,\n verbose_name='forvo id препочтительного произношения')\n training_session = models.BooleanField(default=False, blank=False,\n verbose_name='Сеанс обучения')\n\n def _get_pronunciations_meta(self, word_str):\n forvo_meta_path = os.path.join(settings.BASE_DIR, 'media', 'forvo',\n '{}.json'.format(word_str))\n if not os.path.exists(forvo_meta_path):\n return\n with open(forvo_meta_path, 'r') as f:\n data = json.load(f)\n return data\n\n def _get_sounds(self, word_str):\n ret = []\n sounds_path = os.path.join(settings.BASE_DIR, 'media', 'sounds',\n word_str)\n print(sounds_path)\n if not os.path.exists(sounds_path):\n return []\n items = list(os.listdir(sounds_path))\n items.sort()\n for item in items:\n if item.endswith('.mp3'):\n ret.append('{}{}/{}/{}'.format(settings.MEDIA_URL, 'sounds',\n word_str, item))\n return ret\n\n def get_pronunciations(self):\n word = self.word\n forvo_meta = self._get_pronunciations_meta(word.value)\n if not forvo_meta:\n return []\n ret = []\n ct = 0\n sounds = self._get_sounds(word.value)\n slen = len(sounds)\n prefered_detected = False\n for item in (forvo_meta.get('items') or []):\n if item.get('code', '') != 'en' or item.get('country', ''\n ) != 'United States':\n continue\n if ct > slen - 1:\n break\n sound_file = sounds[ct]\n is_best = self.preferred_pronunciation == item['id']\n if is_best:\n prefered_detected = True\n ret.append({'id': item['id'], 'by': item['username'], 'sex':\n item['sex'], 'src': sound_file, 'best': is_best})\n ct += 1\n if ct == 4:\n break\n if ret and not prefered_detected:\n ret[0]['best'] = True\n return ret\n\n def __str__(self):\n return 'Статистика слова {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Статистика'\n verbose_name_plural = 'Статистика'\n",
"step-3": "<mask token>\n\n\nclass Word(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n ordering = ['value']\n verbose_name = 'Слово'\n verbose_name_plural = 'Слова'\n\n\nclass Meaning(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n value = models.TextField(verbose_name='Значение')\n order = models.PositiveIntegerField(verbose_name='Порядок', default=0)\n examples = JSONField(null=True, blank=True)\n\n def __str__(self):\n if self.value is None:\n return ''\n return self.value[:20]\n\n\n class Meta:\n ordering = ['order']\n verbose_name = 'Доп. значение'\n verbose_name_plural = 'Доп. значения'\n\n\nclass Pronunciation(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n audio = models.FileField(upload_to='media/audio', verbose_name=\n 'Произношение')\n raw_od_data = JSONField(verbose_name='Сырые данные с OD', blank=True,\n null=True)\n is_active = models.BooleanField(default=True, verbose_name='Используется')\n\n def __str__(self):\n return 'Произношение {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Произношение'\n verbose_name_plural = 'Произношения'\n\n\nclass PronunciationMeta(object):\n\n def __init__(self, **kwargs):\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n\nclass WordLearningState(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n user = models.ForeignKey('auth.User', on_delete=models.CASCADE,\n verbose_name='Пользователь')\n is_user_know_meaning = models.BooleanField(default=False, verbose_name=\n 'Выучил значение')\n is_user_know_pronunciation = models.BooleanField(default=False,\n verbose_name='Выучил произношение')\n usage_count = models.PositiveIntegerField(default=0, verbose_name=\n 'Количество показов')\n last_usage_date = models.DateTimeField(auto_now_add=True, verbose_name=\n 'Дата последнего показа')\n preferred_pronunciation = models.PositiveIntegerField(default=0,\n verbose_name='forvo id препочтительного произношения')\n training_session = models.BooleanField(default=False, blank=False,\n verbose_name='Сеанс обучения')\n\n def _get_pronunciations_meta(self, word_str):\n forvo_meta_path = os.path.join(settings.BASE_DIR, 'media', 'forvo',\n '{}.json'.format(word_str))\n if not os.path.exists(forvo_meta_path):\n return\n with open(forvo_meta_path, 'r') as f:\n data = json.load(f)\n return data\n\n def _get_sounds(self, word_str):\n ret = []\n sounds_path = os.path.join(settings.BASE_DIR, 'media', 'sounds',\n word_str)\n print(sounds_path)\n if not os.path.exists(sounds_path):\n return []\n items = list(os.listdir(sounds_path))\n items.sort()\n for item in items:\n if item.endswith('.mp3'):\n ret.append('{}{}/{}/{}'.format(settings.MEDIA_URL, 'sounds',\n word_str, item))\n return ret\n\n def get_pronunciations(self):\n word = self.word\n forvo_meta = self._get_pronunciations_meta(word.value)\n if not forvo_meta:\n return []\n ret = []\n ct = 0\n sounds = self._get_sounds(word.value)\n slen = len(sounds)\n prefered_detected = False\n for item in (forvo_meta.get('items') or []):\n if item.get('code', '') != 'en' or item.get('country', ''\n ) != 'United States':\n continue\n if ct > slen - 1:\n break\n sound_file = sounds[ct]\n is_best = self.preferred_pronunciation == item['id']\n if is_best:\n prefered_detected = True\n ret.append({'id': item['id'], 'by': item['username'], 'sex':\n item['sex'], 'src': sound_file, 'best': is_best})\n ct += 1\n if ct == 4:\n break\n if ret and not prefered_detected:\n ret[0]['best'] = True\n return ret\n\n def __str__(self):\n return 'Статистика слова {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Статистика'\n verbose_name_plural = 'Статистика'\n",
"step-4": "<mask token>\n\n\nclass Word(models.Model):\n value = models.CharField(max_length=50, verbose_name='Слово')\n spelling = models.CharField(max_length=250, verbose_name='Транскрипция')\n raw_od_article = JSONField(verbose_name='Сырые данные с OD')\n is_active = models.BooleanField(default=True, verbose_name='Используется')\n\n def __str__(self):\n return self.value\n\n\n class Meta:\n ordering = ['value']\n verbose_name = 'Слово'\n verbose_name_plural = 'Слова'\n\n\nclass Meaning(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n value = models.TextField(verbose_name='Значение')\n order = models.PositiveIntegerField(verbose_name='Порядок', default=0)\n examples = JSONField(null=True, blank=True)\n\n def __str__(self):\n if self.value is None:\n return ''\n return self.value[:20]\n\n\n class Meta:\n ordering = ['order']\n verbose_name = 'Доп. значение'\n verbose_name_plural = 'Доп. значения'\n\n\nclass Pronunciation(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n audio = models.FileField(upload_to='media/audio', verbose_name=\n 'Произношение')\n raw_od_data = JSONField(verbose_name='Сырые данные с OD', blank=True,\n null=True)\n is_active = models.BooleanField(default=True, verbose_name='Используется')\n\n def __str__(self):\n return 'Произношение {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Произношение'\n verbose_name_plural = 'Произношения'\n\n\nclass PronunciationMeta(object):\n\n def __init__(self, **kwargs):\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n\nclass WordLearningState(models.Model):\n word = models.ForeignKey(Word, on_delete=models.CASCADE, verbose_name=\n 'Слово')\n user = models.ForeignKey('auth.User', on_delete=models.CASCADE,\n verbose_name='Пользователь')\n is_user_know_meaning = models.BooleanField(default=False, verbose_name=\n 'Выучил значение')\n is_user_know_pronunciation = models.BooleanField(default=False,\n verbose_name='Выучил произношение')\n usage_count = models.PositiveIntegerField(default=0, verbose_name=\n 'Количество показов')\n last_usage_date = models.DateTimeField(auto_now_add=True, verbose_name=\n 'Дата последнего показа')\n preferred_pronunciation = models.PositiveIntegerField(default=0,\n verbose_name='forvo id препочтительного произношения')\n training_session = models.BooleanField(default=False, blank=False,\n verbose_name='Сеанс обучения')\n\n def _get_pronunciations_meta(self, word_str):\n forvo_meta_path = os.path.join(settings.BASE_DIR, 'media', 'forvo',\n '{}.json'.format(word_str))\n if not os.path.exists(forvo_meta_path):\n return\n with open(forvo_meta_path, 'r') as f:\n data = json.load(f)\n return data\n\n def _get_sounds(self, word_str):\n ret = []\n sounds_path = os.path.join(settings.BASE_DIR, 'media', 'sounds',\n word_str)\n print(sounds_path)\n if not os.path.exists(sounds_path):\n return []\n items = list(os.listdir(sounds_path))\n items.sort()\n for item in items:\n if item.endswith('.mp3'):\n ret.append('{}{}/{}/{}'.format(settings.MEDIA_URL, 'sounds',\n word_str, item))\n return ret\n\n def get_pronunciations(self):\n word = self.word\n forvo_meta = self._get_pronunciations_meta(word.value)\n if not forvo_meta:\n return []\n ret = []\n ct = 0\n sounds = self._get_sounds(word.value)\n slen = len(sounds)\n prefered_detected = False\n for item in (forvo_meta.get('items') or []):\n if item.get('code', '') != 'en' or item.get('country', ''\n ) != 'United States':\n continue\n if ct > slen - 1:\n break\n sound_file = sounds[ct]\n is_best = self.preferred_pronunciation == item['id']\n if is_best:\n prefered_detected = True\n ret.append({'id': item['id'], 'by': item['username'], 'sex':\n item['sex'], 'src': sound_file, 'best': is_best})\n ct += 1\n if ct == 4:\n break\n if ret and not prefered_detected:\n ret[0]['best'] = True\n return ret\n\n def __str__(self):\n return 'Статистика слова {}'.format(self.word)\n\n\n class Meta:\n verbose_name = 'Статистика'\n verbose_name_plural = 'Статистика'\n",
"step-5": "import json\nimport os\n\nfrom django.conf import settings\nfrom django.db import models\nfrom jsonfield import JSONField\n\n\nclass Word(models.Model):\n value = models.CharField(\n max_length=50,\n verbose_name='Слово'\n )\n spelling = models.CharField(\n max_length=250,\n verbose_name='Транскрипция'\n )\n raw_od_article = JSONField(\n verbose_name='Сырые данные с OD'\n )\n\n is_active = models.BooleanField(\n default=True,\n verbose_name='Используется'\n )\n\n def __str__(self):\n return self.value\n\n class Meta:\n ordering = [\"value\"]\n verbose_name = \"Слово\"\n verbose_name_plural = \"Слова\"\n\n\nclass Meaning(models.Model):\n word = models.ForeignKey(\n Word,\n on_delete=models.CASCADE,\n verbose_name='Слово'\n )\n value = models.TextField(\n verbose_name='Значение'\n )\n order = models.PositiveIntegerField(\n verbose_name=\"Порядок\",\n default=0\n )\n examples = JSONField(\n null=True,\n blank=True\n )\n\n def __str__(self):\n if self.value is None:\n return ''\n return self.value[:20]\n\n class Meta:\n ordering = [\"order\"]\n verbose_name = \"Доп. значение\"\n verbose_name_plural = \"Доп. значения\"\n\n\nclass Pronunciation(models.Model):\n word = models.ForeignKey(\n Word,\n on_delete=models.CASCADE,\n verbose_name='Слово'\n )\n audio = models.FileField(\n upload_to='media/audio',\n verbose_name='Произношение'\n )\n raw_od_data = JSONField(\n verbose_name='Сырые данные с OD',\n blank=True,\n null=True\n )\n is_active = models.BooleanField(\n default=True,\n verbose_name='Используется'\n )\n\n def __str__(self):\n return \"Произношение {}\".format(self.word)\n\n class Meta:\n verbose_name = \"Произношение\"\n verbose_name_plural = \"Произношения\"\n\n\nclass PronunciationMeta(object):\n def __init__(self, **kwargs):\n for k, v in kwargs.items():\n setattr(self, k, v)\n\nclass WordLearningState(models.Model):\n word = models.ForeignKey(\n Word,\n on_delete=models.CASCADE,\n verbose_name='Слово'\n )\n user = models.ForeignKey(\n \"auth.User\",\n on_delete=models.CASCADE,\n verbose_name='Пользователь'\n )\n is_user_know_meaning = models.BooleanField(\n default=False,\n verbose_name='Выучил значение'\n )\n is_user_know_pronunciation = models.BooleanField(\n default=False,\n verbose_name='Выучил произношение'\n )\n usage_count = models.PositiveIntegerField(\n default=0,\n verbose_name='Количество показов'\n )\n last_usage_date = models.DateTimeField(\n auto_now_add=True,\n verbose_name='Дата последнего показа'\n )\n preferred_pronunciation = models.PositiveIntegerField(\n default=0,\n verbose_name='forvo id препочтительного произношения',\n )\n training_session = models.BooleanField(\n default=False,\n blank=False,\n verbose_name='Сеанс обучения'\n )\n\n def _get_pronunciations_meta(self, word_str):\n forvo_meta_path = os.path.join(\n settings.BASE_DIR, 'media', 'forvo', '{}.json'.format(word_str)\n )\n if not os.path.exists(forvo_meta_path):\n return\n with open(forvo_meta_path, 'r') as f:\n data = json.load(f)\n return data\n\n def _get_sounds(self, word_str):\n ret = []\n sounds_path = os.path.join(settings.BASE_DIR, 'media', 'sounds', word_str)\n print(sounds_path)\n if not os.path.exists(sounds_path):\n return []\n items = list(os.listdir(sounds_path))\n items.sort()\n for item in items:\n if item.endswith('.mp3'):\n ret.append('{}{}/{}/{}'.format(settings.MEDIA_URL, 'sounds', word_str, item))\n return ret\n\n def get_pronunciations(self):\n word = self.word\n forvo_meta = self._get_pronunciations_meta(word.value)\n if not forvo_meta:\n return []\n\n ret = []\n ct = 0\n sounds = self._get_sounds(word.value)\n slen = len(sounds)\n prefered_detected = False\n for item in forvo_meta.get('items') or []:\n\n if item.get('code', '') != 'en' or item.get(\n 'country', '') != 'United States':\n continue\n\n if ct > slen-1:\n break\n\n sound_file = sounds[ct]\n\n is_best = self.preferred_pronunciation == item['id']\n\n if is_best:\n prefered_detected = True\n\n ret.append({\n 'id': item['id'],\n 'by': item['username'],\n 'sex': item['sex'],\n 'src': sound_file,\n 'best': is_best\n })\n\n ct += 1\n if ct == 4:\n break\n if ret and not prefered_detected:\n ret[0]['best'] = True\n return ret\n\n def __str__(self):\n return \"Статистика слова {}\".format(self.word)\n\n class Meta:\n verbose_name = \"Статистика\"\n verbose_name_plural = \"Статистика\"\n",
"step-ids": [
13,
14,
15,
17,
19
]
}
|
[
13,
14,
15,
17,
19
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.