code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
from plprofiler_tool import main
from plprofiler import plprofiler
|
normal
|
{
"blob_id": "6b616f5ee0a301b76ad3f7284b47f225a694d33c",
"index": 1281,
"step-1": "<mask token>\n",
"step-2": "from plprofiler_tool import main\nfrom plprofiler import plprofiler\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# Generated by Django 3.0.8 on 2020-07-29 18:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('scenario', '0005_auto_20200729_1149'),
]
operations = [
migrations.RemoveField(
model_name='weapon',
name='vehicle',
),
migrations.DeleteModel(
name='Vehicle',
),
]
|
normal
|
{
"blob_id": "b99093fb13c59d4b9bb0a4f32fb62423d6752118",
"index": 6480,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('scenario', '0005_auto_20200729_1149')]\n operations = [migrations.RemoveField(model_name='weapon', name=\n 'vehicle'), migrations.DeleteModel(name='Vehicle')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('scenario', '0005_auto_20200729_1149')]\n operations = [migrations.RemoveField(model_name='weapon', name=\n 'vehicle'), migrations.DeleteModel(name='Vehicle')]\n",
"step-5": "# Generated by Django 3.0.8 on 2020-07-29 18:30\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('scenario', '0005_auto_20200729_1149'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='weapon',\n name='vehicle',\n ),\n migrations.DeleteModel(\n name='Vehicle',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!c:\Python\python.exe
# Fig 35.16: fig35_16.py
# Program to display CGI environment variables
import os
import cgi
print "Content-type: text/html"
print
print """<!DOCTYPE html PUBLIC
"-//W3C//DTD XHTML 1.0 Transitional//EN"
"DTD/xhtml1-transitional.dtd">"""
print """
<html xmlns = "http://www.w3.org/1999/xhtml" xml:lang="en"
lang="en">
<head><title>Environment Variables</title></head>
<body><table style = "border: 0">"""
rowNumber = 0
for item in os.environ.keys():
rowNumber += 1
if rowNumber % 2 == 0:
backgroundColor = "white"
else:
backgroundColor = "lightgrey"
print """<tr style = "background-color: %s">
<td>%s</td><td>%s</td></tr>""" \
% ( backgroundColor, item,
cgi.escape( os.environ[ item ] ) )
print """</table></body></html>"""
##########################################################################
# (C) Copyright 1992-2004 by Deitel & Associates, Inc. and #
# Pearson Education, Inc. All Rights Reserved. #
# #
# DISCLAIMER: The authors and publisher of this book have used their #
# best efforts in preparing the book. These efforts include the #
# development, research, and testing of the theories and programs #
# to determine their effectiveness. The authors and publisher make #
# no warranty of any kind, expressed or implied, with regard to these #
# programs or to the documentation contained in these books. The authors #
# and publisher shall not be liable in any event for incidental or #
# consequential damages in connection with, or arising out of, the #
# furnishing, performance, or use of these programs. #
##########################################################################
|
normal
|
{
"blob_id": "61b28088e4344d8a94006e5c04c189a44bbb6ff3",
"index": 3334,
"step-1": "#!c:\\Python\\python.exe\r\n# Fig 35.16: fig35_16.py\r\n# Program to display CGI environment variables\r\n\r\nimport os\r\nimport cgi\r\n\r\nprint \"Content-type: text/html\"\r\nprint\r\n\r\nprint \"\"\"<!DOCTYPE html PUBLIC\r\n \"-//W3C//DTD XHTML 1.0 Transitional//EN\"\r\n \"DTD/xhtml1-transitional.dtd\">\"\"\"\r\n\r\nprint \"\"\"\r\n<html xmlns = \"http://www.w3.org/1999/xhtml\" xml:lang=\"en\"\r\n lang=\"en\">\r\n <head><title>Environment Variables</title></head>\r\n <body><table style = \"border: 0\">\"\"\"\r\n\r\nrowNumber = 0\r\n\r\nfor item in os.environ.keys():\r\n rowNumber += 1\r\n\r\n if rowNumber % 2 == 0:\r\n backgroundColor = \"white\"\r\n else:\r\n backgroundColor = \"lightgrey\"\r\n\r\n print \"\"\"<tr style = \"background-color: %s\">\r\n <td>%s</td><td>%s</td></tr>\"\"\" \\\r\n % ( backgroundColor, item,\r\n cgi.escape( os.environ[ item ] ) )\r\n\r\nprint \"\"\"</table></body></html>\"\"\"\r\n\r\n########################################################################## \r\n# (C) Copyright 1992-2004 by Deitel & Associates, Inc. and #\r\n# Pearson Education, Inc. All Rights Reserved. #\r\n# #\r\n# DISCLAIMER: The authors and publisher of this book have used their #\r\n# best efforts in preparing the book. These efforts include the #\r\n# development, research, and testing of the theories and programs #\r\n# to determine their effectiveness. The authors and publisher make #\r\n# no warranty of any kind, expressed or implied, with regard to these #\r\n# programs or to the documentation contained in these books. The authors #\r\n# and publisher shall not be liable in any event for incidental or #\r\n# consequential damages in connection with, or arising out of, the #\r\n# furnishing, performance, or use of these programs. #\r\n##########################################################################",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr1/local/bin/python
import os, sys, re, shutil, random
from tempfile import *
# program location
prog_dir = '/home/jpei/test_promals3d_package/bar/promals_package/bin/'
# program names
promals_web = prog_dir + "progress_for_web.py"
csv_cutoff_g = 5
alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
def run_promals():
csv_cutoff = csv_cutoff_g
# check and parse the command line
cmd_line = sys.argv
if len(cmd_line) <= 1:
promals_help()
sys.exit(1)
elif not os.path.isfile(cmd_line[1]):
print >> sys.stderr, "Error reading input file:", cmd_line[1]
promals_help()
sys.exit(1)
else:
randomstring = ""
infile = os.path.abspath(cmd_line[1])
infiledir = os.path.split(infile)[0]
for x in random.sample(alphabet,40):
randomstring+=x
ranfile = "%s/%s" %(infiledir, randomstring)
try:
fp = open(ranfile, "w")
except:
print >> sys.stderr, "Error:"
print >> sys.stderr, " The directory containing your input file is not writable:", infiledir
print >> sys.stderr, " Input file should be in a writable directory"
sys.exit(1)
fp.close()
os.system("rm -f %s" %ranfile)
cmd_line1 = []
outputfile = ""
blast_dir = ""
resnum = 1
caa_freq = 0.8
for i in range(len(cmd_line)):
arg = cmd_line[i]
if i == 0: arg = prog_dir + 'promals_c'
# change inputfile name to full path name
if i == 1:
arg = os.path.abspath(arg)
inputfile = arg
# change outfile name to full path name
if arg == '-outfile':
if i+1 < len(cmd_line):
cmd_line[i+1] = os.path.abspath(cmd_line[i+1])
outputfile = cmd_line[i+1]
# change blast_dir name to full path name
if arg == '-blast_dir':
if i+1 < len(cmd_line):
cmd_line[i+1] = os.path.abspath(cmd_line[i+1])
#if arg == '-ssw': arg = '-ss_weight'
#if arg == '-aaw': arg = '-score_weight'
#if arg == '-max_homologs': arg = '-max_num_sequences'
#if arg == '-iter_num': arg = '-iter_number'
if arg == '-csv_index':
if i+1 < len(cmd_line):
csv_cutoff = int(cmd_line[i+1])
if (csv_cutoff<0) or (csv_cutoff>9):
csv_cutoff = 5
if arg == "-resnum":
resnum = int(cmd_line[i+1])
if arg == "-caa_freq":
caa_freq = float(sys.argv[i+1])
cmd_line1.append(arg)
if not outputfile:
if re.search("\.fa$", inputfile):
outputfile = re.sub("\.fa$", "", inputfile) + ".promals.aln"
else: outputfile = inputfile + ".promals.aln"
if not blast_dir:
blast_dir = "%s_blast" %inputfile
promals_c = ' '.join(cmd_line1)
promals_c = re.sub("\s+-resnum\s+\S+", " ", promals_c)
promals_c = re.sub("\s+-caa_freq\s+\S+", " ", promals_c)
promals_c = re.sub("\s+-csv_index\s+\S+", " ", promals_c)
if "-blast_dir" not in promals_c:
promals_c += " -blast_dir %s " %blast_dir
outputlogfile = inputfile+".prmls.oUTpUT"
promals_c = promals_c + " > " + outputlogfile
print "promals command:"
print promals_c
print
sys.stdout.flush()
# run programs in a temporary directory to avoid .ncbirc problem
cwd = os.getcwd()
tmpdir = mkdtemp()
os.chdir(tmpdir)
os.system("cp %s.ncbirc ." %prog_dir)
s1 = os.system(promals_c)
if s1 == 0:
print "output alignment file is:", outputfile
print "blast intermediate files are in:", blast_dir
print
else:
print "Error running promals - check log file for details:", outputlogfile
print
print "html file command:"
print "python %s %s %s -cutoff %d -resnum %d -caa_freq %f" %(promals_web, outputfile, outputlogfile, csv_cutoff, resnum, caa_freq)
print
sys.stdout.flush()
s2 = os.system("python %s %s %s -cutoff %d -resnum %d -caa_freq %f 2>/dev/null" %(promals_web, outputfile, outputlogfile, csv_cutoff, resnum, caa_freq) )
if s2 == 0:
print "output html alignment file is:", outputfile + ".html"
print
else:
print "Error generating html file"
print
os.chdir(cwd)
shutil.rmtree(tmpdir)
def promals_help():
help_content = '''
promals with 3D information
command:
promals input_file [options] > input_file.log
python promals input_file [options] > input_file.log
input:
input_file needs to be FASTA format
output:
Two alignment files will be generated. One is in CLUSTAL
format alignment (file name can be specified by option -outfile).
The other file is an html file of colored alignment.
Options:
For alignment strategies:
-id_thr [0, 1] Identity threshold that determined the partition of
fast and slow alignment processes. If two groups of
sequences has average identity above this threshold,
align them in a fast way. Otherwise, use slower but
more accurate way (by profile-profile alignment with
predicted secondary structures and available 3D
constraints). Default: 0.6 (corresponding to 60% identity)
For using 3D information:
-dali [0 or 1] Use DaliLite structural alignment (1) or not use
fast alignment (0) ("DaliLite" executable needs to
be present in bin/ directory). Default: 0 (it is
relatively slow to run DaliLite)
-fast [0 or 1] Use fast structural alignment (1) or not use fast
alignment (0) ("fast" executable needs to be present
in bin/ directory). Default: 1
-tmalign [0 or 1] Use TMalign structural alignment (1) or not use fast
TMalign alignment (0) ("TMalign" executable needs to
be present in bin/ directory). Default: 1
-struct_weight [0, inf[ Weight of structural constraints relative to sequence
constraints. Default: 1.5
For profile scoring:
-ss_weight [0,inf[ Weight of predicted secondary structure in profile-profile
scoring. Default: 0.2
-score_weight [0,inf[ Weight of amino acids in profile-profile scoring.
Default: 0.8
For running PSI-BLAST to get sequence profile:
-iter_number <int> Number of PSI-BLAST iterations for profile generation.
Default: 3
-evalue [0, inf[ PSI-BLAST evalue cutoff for inclusion. Default: 0.001
-low_id_thr [0,1] Remove PSI-BLAST hits with identity to the query less than
this value. Default: 0.2
-blast_dir <file> Directory of running PSI-BLAST and store other intermediate
results.
-clean_blast_before [0 or 1] Remove any file in the directory that stores
intermediate results (specified by -blast_dir option) before
running PSI-BLAST. Default: 0.
-clean_blast_after [0 or 1] Remove any file in the PSI-BLAST directory after running
PSI-BLAST. Default: 0
For output:
-outfile <file> The name of output alignment file.
-blocksize <int> Number of letters in clustal-format alignment blocks.
Default: 70
-resnum [0 or 1] In colored html alignment, show residue numbers for
alignment blocks. Default: 1
-caa_freq [0, 1] In colored html alignment, show amino acid consensus
symbol if the fraction of a class of residues is higher
than this threshold. Default: 0.8
'''
print help_content
if __name__ == '__main__':
run_promals()
|
normal
|
{
"blob_id": "b9386cf8c17b28fd1fea6e587ca4401de247cbea",
"index": 7779,
"step-1": "#!/usr1/local/bin/python\n\nimport os, sys, re, shutil, random\nfrom tempfile import *\n\n\n# program location\nprog_dir = '/home/jpei/test_promals3d_package/bar/promals_package/bin/'\n\n# program names\npromals_web = prog_dir + \"progress_for_web.py\"\n\ncsv_cutoff_g = 5\n\nalphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'\n\ndef run_promals():\n\n\tcsv_cutoff = csv_cutoff_g\n\t# check and parse the command line\n\tcmd_line = sys.argv\n\tif len(cmd_line) <= 1: \n\t\tpromals_help()\n\t\tsys.exit(1)\n\telif not os.path.isfile(cmd_line[1]):\n print >> sys.stderr, \"Error reading input file:\", cmd_line[1]\n\t\tpromals_help()\n\t\tsys.exit(1)\n else:\n randomstring = \"\"\n infile = os.path.abspath(cmd_line[1])\n infiledir = os.path.split(infile)[0]\n for x in random.sample(alphabet,40):\n randomstring+=x\n ranfile = \"%s/%s\" %(infiledir, randomstring)\n try:\n fp = open(ranfile, \"w\")\n except:\n print >> sys.stderr, \"Error:\"\n print >> sys.stderr, \" The directory containing your input file is not writable:\", infiledir\n print >> sys.stderr, \" Input file should be in a writable directory\"\n sys.exit(1)\n fp.close()\n os.system(\"rm -f %s\" %ranfile)\n\n\tcmd_line1 = []\n\toutputfile = \"\"\n blast_dir = \"\"\n resnum = 1\n caa_freq = 0.8\n\tfor i in range(len(cmd_line)):\n\t\targ = cmd_line[i]\n\t\tif i == 0: arg = prog_dir + 'promals_c'\n # change inputfile name to full path name\n\t\tif i == 1: \n arg = os.path.abspath(arg)\n inputfile = arg\n # change outfile name to full path name\n\t\tif arg == '-outfile':\n\t\t\tif i+1 < len(cmd_line): \n cmd_line[i+1] = os.path.abspath(cmd_line[i+1])\n outputfile = cmd_line[i+1]\n # change blast_dir name to full path name\n\t\tif arg == '-blast_dir':\n\t\t\tif i+1 < len(cmd_line): \n cmd_line[i+1] = os.path.abspath(cmd_line[i+1])\n\t\t#if arg == '-ssw': arg = '-ss_weight'\n\t\t#if arg == '-aaw': arg = '-score_weight'\n\t\t#if arg == '-max_homologs': arg = '-max_num_sequences'\n\t\t#if arg == '-iter_num': arg = '-iter_number'\n\t\tif arg == '-csv_index': \n\t\t\tif i+1 < len(cmd_line):\n\t\t\t\tcsv_cutoff = int(cmd_line[i+1])\n\t\t\t\tif (csv_cutoff<0) or (csv_cutoff>9):\n\t\t\t\t\tcsv_cutoff = 5\n if arg == \"-resnum\":\n resnum = int(cmd_line[i+1])\n if arg == \"-caa_freq\":\n caa_freq = float(sys.argv[i+1])\n\t\tcmd_line1.append(arg)\n\t\n\tif not outputfile:\n\t\tif re.search(\"\\.fa$\", inputfile):\n\t\t\toutputfile = re.sub(\"\\.fa$\", \"\", inputfile) + \".promals.aln\"\n else: outputfile = inputfile + \".promals.aln\"\n\tif not blast_dir:\n blast_dir = \"%s_blast\" %inputfile\n\t\n\tpromals_c = ' '.join(cmd_line1)\n promals_c = re.sub(\"\\s+-resnum\\s+\\S+\", \" \", promals_c)\n promals_c = re.sub(\"\\s+-caa_freq\\s+\\S+\", \" \", promals_c)\n promals_c = re.sub(\"\\s+-csv_index\\s+\\S+\", \" \", promals_c)\n if \"-blast_dir\" not in promals_c:\n promals_c += \" -blast_dir %s \" %blast_dir\n\toutputlogfile = inputfile+\".prmls.oUTpUT\"\n\tpromals_c = promals_c + \" > \" + outputlogfile\n print \"promals command:\"\n\tprint promals_c\n print\n sys.stdout.flush()\n\t\n\t# run programs in a temporary directory to avoid .ncbirc problem\n cwd = os.getcwd()\n tmpdir = mkdtemp()\n os.chdir(tmpdir)\n os.system(\"cp %s.ncbirc .\" %prog_dir)\n\ts1 = os.system(promals_c)\n if s1 == 0:\n print \"output alignment file is:\", outputfile\n print \"blast intermediate files are in:\", blast_dir\n print\n else:\n print \"Error running promals - check log file for details:\", outputlogfile\n print\n print \"html file command:\"\n\tprint \"python %s %s %s -cutoff %d -resnum %d -caa_freq %f\" %(promals_web, outputfile, outputlogfile, csv_cutoff, resnum, caa_freq) \n print\n sys.stdout.flush()\n\ts2 = os.system(\"python %s %s %s -cutoff %d -resnum %d -caa_freq %f 2>/dev/null\" %(promals_web, outputfile, outputlogfile, csv_cutoff, resnum, caa_freq) )\n if s2 == 0:\n print \"output html alignment file is:\", outputfile + \".html\"\n print\n else:\n print \"Error generating html file\"\n print\n\n os.chdir(cwd)\n shutil.rmtree(tmpdir)\n\ndef promals_help():\n\n help_content = '''\n\npromals with 3D information\n \n command: \n promals input_file [options] > input_file.log\n python promals input_file [options] > input_file.log\n\n input:\n input_file needs to be FASTA format\n\n output: \n Two alignment files will be generated. One is in CLUSTAL \n format alignment (file name can be specified by option -outfile). \n The other file is an html file of colored alignment.\n \n Options:\n\n For alignment strategies:\n -id_thr [0, 1] Identity threshold that determined the partition of\n fast and slow alignment processes. If two groups of\n sequences has average identity above this threshold,\n align them in a fast way. Otherwise, use slower but\n more accurate way (by profile-profile alignment with\n predicted secondary structures and available 3D \n constraints). Default: 0.6 (corresponding to 60% identity)\n\n For using 3D information:\n -dali [0 or 1] Use DaliLite structural alignment (1) or not use \n fast alignment (0) (\"DaliLite\" executable needs to \n be present in bin/ directory). Default: 0 (it is \n relatively slow to run DaliLite)\n -fast [0 or 1] Use fast structural alignment (1) or not use fast \n alignment (0) (\"fast\" executable needs to be present \n in bin/ directory). Default: 1\n -tmalign [0 or 1] Use TMalign structural alignment (1) or not use fast \n TMalign alignment (0) (\"TMalign\" executable needs to \n be present in bin/ directory). Default: 1\n -struct_weight [0, inf[ Weight of structural constraints relative to sequence \n constraints. Default: 1.5\n\n For profile scoring:\n -ss_weight [0,inf[ Weight of predicted secondary structure in profile-profile \n scoring. Default: 0.2\n -score_weight [0,inf[ Weight of amino acids in profile-profile scoring. \n Default: 0.8\n\n For running PSI-BLAST to get sequence profile:\n -iter_number <int> Number of PSI-BLAST iterations for profile generation. \n Default: 3\n -evalue [0, inf[ PSI-BLAST evalue cutoff for inclusion. Default: 0.001\n -low_id_thr [0,1] Remove PSI-BLAST hits with identity to the query less than \n this value. Default: 0.2\n -blast_dir <file> Directory of running PSI-BLAST and store other intermediate \n results.\n -clean_blast_before [0 or 1] Remove any file in the directory that stores \n intermediate results (specified by -blast_dir option) before\n running PSI-BLAST. Default: 0. \n -clean_blast_after [0 or 1] Remove any file in the PSI-BLAST directory after running\n PSI-BLAST. Default: 0\n\n For output:\n -outfile <file> The name of output alignment file.\n -blocksize <int> Number of letters in clustal-format alignment blocks. \n Default: 70\n -resnum [0 or 1] In colored html alignment, show residue numbers for \n alignment blocks. Default: 1\n -caa_freq [0, 1] In colored html alignment, show amino acid consensus\n symbol if the fraction of a class of residues is higher\n than this threshold. Default: 0.8\n\n '''\n\n print help_content\n\n\nif __name__ == '__main__':\n\n\trun_promals()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
# coding: utf-8
from sklearn.metrics import confusion_matrix
import numpy as np
import pandas as pd
df = pd.read_csv('orb.csv')
d = pd.pivot_table(df,index='col1',columns='col2',values='result')
d.fillna(0,inplace=True)
|
normal
|
{
"blob_id": "ce65a672cae26bdb8ec8cb04eabfe1877f9cd7d4",
"index": 9558,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nd.fillna(0, inplace=True)\n",
"step-3": "<mask token>\ndf = pd.read_csv('orb.csv')\nd = pd.pivot_table(df, index='col1', columns='col2', values='result')\nd.fillna(0, inplace=True)\n",
"step-4": "from sklearn.metrics import confusion_matrix\nimport numpy as np\nimport pandas as pd\ndf = pd.read_csv('orb.csv')\nd = pd.pivot_table(df, index='col1', columns='col2', values='result')\nd.fillna(0, inplace=True)\n",
"step-5": "#!/usr/bin/env python\n# coding: utf-8\n\nfrom sklearn.metrics import confusion_matrix\nimport numpy as np\nimport pandas as pd\n\n\ndf = pd.read_csv('orb.csv')\nd = pd.pivot_table(df,index='col1',columns='col2',values='result')\nd.fillna(0,inplace=True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
import cv2
import time
cap = cv2.VideoCapture(0)
ret, frame = cap.read()
average_stack = np.float32(np.copy(frame))/255
frames = 1.0
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
frame = np.float32(frame)/255
average_stack = average_stack * frames + frame
frames += 1.0
average_stack = average_stack/frames
# Display the resulting frame
cv2.imshow('frame',np.uint8(average_stack*255))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "7fd89272d3d3584f35fd8f552cb7b14e57b7ed1b",
"index": 1591,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n ret, frame = cap.read()\n frame = np.float32(frame) / 255\n average_stack = average_stack * frames + frame\n frames += 1.0\n average_stack = average_stack / frames\n cv2.imshow('frame', np.uint8(average_stack * 255))\n if cv2.waitKey(1) & 255 == ord('q'):\n break\ncap.release()\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\ncap = cv2.VideoCapture(0)\nret, frame = cap.read()\naverage_stack = np.float32(np.copy(frame)) / 255\nframes = 1.0\nwhile True:\n ret, frame = cap.read()\n frame = np.float32(frame) / 255\n average_stack = average_stack * frames + frame\n frames += 1.0\n average_stack = average_stack / frames\n cv2.imshow('frame', np.uint8(average_stack * 255))\n if cv2.waitKey(1) & 255 == ord('q'):\n break\ncap.release()\ncv2.destroyAllWindows()\n",
"step-4": "import numpy as np\nimport cv2\nimport time\ncap = cv2.VideoCapture(0)\nret, frame = cap.read()\naverage_stack = np.float32(np.copy(frame)) / 255\nframes = 1.0\nwhile True:\n ret, frame = cap.read()\n frame = np.float32(frame) / 255\n average_stack = average_stack * frames + frame\n frames += 1.0\n average_stack = average_stack / frames\n cv2.imshow('frame', np.uint8(average_stack * 255))\n if cv2.waitKey(1) & 255 == ord('q'):\n break\ncap.release()\ncv2.destroyAllWindows()\n",
"step-5": "import numpy as np\nimport cv2\nimport time\n\ncap = cv2.VideoCapture(0)\nret, frame = cap.read()\naverage_stack = np.float32(np.copy(frame))/255\nframes = 1.0\n\nwhile(True):\n # Capture frame-by-frame\n ret, frame = cap.read()\n frame = np.float32(frame)/255\n\n average_stack = average_stack * frames + frame\n frames += 1.0\n average_stack = average_stack/frames\n\n # Display the resulting frame\n cv2.imshow('frame',np.uint8(average_stack*255))\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n# When everything done, release the capture\ncap.release()\ncv2.destroyAllWindows()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python3
import pandas as pd
import csv
def get_apriori_input(input_file,output_file,sample_col="Sample",gene_id_col="Gene_ID"):
df=pd.read_csv(input_file,sep="\t")
sample_names=df[sample_col].unique()
with open(output_file,"w") as out:
csv_writer=csv.writer(out,delimiter="\t")
for sample_name in sample_names:
bool=df[sample_col]==sample_name
df_sample=df[bool]
gene_ids=df_sample[gene_id_col]
gene_string=",".join(gene_ids)
csv_writer.writerow([sample_name,gene_string])
if __name__ == "__main__":
import sys
program,input_file,output_file,sample_col,gene_id_col=sys.argv
get_apriori_input(input_file,output_file,sample_col,gene_id_col)
|
normal
|
{
"blob_id": "e14bea6376c8649bf9c9c5759d530af773664cd4",
"index": 891,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_apriori_input(input_file, output_file, sample_col='Sample',\n gene_id_col='Gene_ID'):\n df = pd.read_csv(input_file, sep='\\t')\n sample_names = df[sample_col].unique()\n with open(output_file, 'w') as out:\n csv_writer = csv.writer(out, delimiter='\\t')\n for sample_name in sample_names:\n bool = df[sample_col] == sample_name\n df_sample = df[bool]\n gene_ids = df_sample[gene_id_col]\n gene_string = ','.join(gene_ids)\n csv_writer.writerow([sample_name, gene_string])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_apriori_input(input_file, output_file, sample_col='Sample',\n gene_id_col='Gene_ID'):\n df = pd.read_csv(input_file, sep='\\t')\n sample_names = df[sample_col].unique()\n with open(output_file, 'w') as out:\n csv_writer = csv.writer(out, delimiter='\\t')\n for sample_name in sample_names:\n bool = df[sample_col] == sample_name\n df_sample = df[bool]\n gene_ids = df_sample[gene_id_col]\n gene_string = ','.join(gene_ids)\n csv_writer.writerow([sample_name, gene_string])\n\n\nif __name__ == '__main__':\n import sys\n program, input_file, output_file, sample_col, gene_id_col = sys.argv\n get_apriori_input(input_file, output_file, sample_col, gene_id_col)\n",
"step-4": "import pandas as pd\nimport csv\n\n\ndef get_apriori_input(input_file, output_file, sample_col='Sample',\n gene_id_col='Gene_ID'):\n df = pd.read_csv(input_file, sep='\\t')\n sample_names = df[sample_col].unique()\n with open(output_file, 'w') as out:\n csv_writer = csv.writer(out, delimiter='\\t')\n for sample_name in sample_names:\n bool = df[sample_col] == sample_name\n df_sample = df[bool]\n gene_ids = df_sample[gene_id_col]\n gene_string = ','.join(gene_ids)\n csv_writer.writerow([sample_name, gene_string])\n\n\nif __name__ == '__main__':\n import sys\n program, input_file, output_file, sample_col, gene_id_col = sys.argv\n get_apriori_input(input_file, output_file, sample_col, gene_id_col)\n",
"step-5": "#!/usr/bin/env python3\nimport pandas as pd\nimport csv\ndef get_apriori_input(input_file,output_file,sample_col=\"Sample\",gene_id_col=\"Gene_ID\"):\n df=pd.read_csv(input_file,sep=\"\\t\")\n sample_names=df[sample_col].unique()\n with open(output_file,\"w\") as out:\n csv_writer=csv.writer(out,delimiter=\"\\t\")\n for sample_name in sample_names:\n bool=df[sample_col]==sample_name\n df_sample=df[bool]\n gene_ids=df_sample[gene_id_col]\n gene_string=\",\".join(gene_ids)\n csv_writer.writerow([sample_name,gene_string])\n\n\nif __name__ == \"__main__\":\n import sys\n program,input_file,output_file,sample_col,gene_id_col=sys.argv\n get_apriori_input(input_file,output_file,sample_col,gene_id_col)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#a list of functions/Classes to be inported when a user imports * from swarmpose
__all__ = ['Swarmpose']
|
normal
|
{
"blob_id": "e375501e6b815530e61af9181d4cade83d4588ca",
"index": 8762,
"step-1": "<mask token>\n",
"step-2": "__all__ = ['Swarmpose']\n",
"step-3": "#a list of functions/Classes to be inported when a user imports * from swarmpose\n__all__ = ['Swarmpose']",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import json
# No llego a solucionarlo entero.
#Aparcamientos que estan cubiertos en el centro de deportes .
from pprint import pprint
with open('Aparcamientos.json') as data_file:
data = json.load(data_file)
for x in data['docs']:
if x['TIPOLOGIA'] == 'Cubierto':
print(x['NOMBRE'])
elif x['TIPOLOGIA'] == 'Pabellón de deportes':
print(x['NOMBRE'])
print(x['TIPOLOGIA'])
|
normal
|
{
"blob_id": "d111f93144a1d2790470365d0ca31bcea17713d7",
"index": 8766,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('Aparcamientos.json') as data_file:\n data = json.load(data_file)\nfor x in data['docs']:\n if x['TIPOLOGIA'] == 'Cubierto':\n print(x['NOMBRE'])\n elif x['TIPOLOGIA'] == 'Pabellón de deportes':\n print(x['NOMBRE'])\n print(x['TIPOLOGIA'])\n",
"step-3": "import json\nfrom pprint import pprint\nwith open('Aparcamientos.json') as data_file:\n data = json.load(data_file)\nfor x in data['docs']:\n if x['TIPOLOGIA'] == 'Cubierto':\n print(x['NOMBRE'])\n elif x['TIPOLOGIA'] == 'Pabellón de deportes':\n print(x['NOMBRE'])\n print(x['TIPOLOGIA'])\n",
"step-4": "import json\n# No llego a solucionarlo entero.\n#Aparcamientos que estan cubiertos en el centro de deportes .\nfrom pprint import pprint\n\nwith open('Aparcamientos.json') as data_file: \n data = json.load(data_file)\nfor x in data['docs']:\n\tif x['TIPOLOGIA'] == 'Cubierto':\n\t\tprint(x['NOMBRE'])\n\telif x['TIPOLOGIA'] == 'Pabellón de deportes':\n\t\tprint(x['NOMBRE'])\n\t\tprint(x['TIPOLOGIA'])\n\n\n\n\t\t\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import streamlit as st
from streamlit.components.v1 import components
from streamlit.report_thread import get_report_ctx
from util.session import *
from multipage import MultiPage
from pages import register
def app(page):
if not login_status():
title_container = st.empty()
remail_input_container = st.empty()
rpw_input_container = st.empty()
rregister_button_container = st.empty()
# title_container.write("Register")
email = remail_input_container.text_input("Email ")
password = rpw_input_container.text_input("Password ", type="password")
rregister_button = rregister_button_container.button('Register')
if rregister_button:
title_container.empty()
remail_input_container.empty()
rpw_input_container.empty()
rregister_button_container.empty()
login()
page.app()
st.experimental_rerun()
|
normal
|
{
"blob_id": "41cfd558824b6561114a48a694b1e6e6a7cb8c05",
"index": 7,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef app(page):\n if not login_status():\n title_container = st.empty()\n remail_input_container = st.empty()\n rpw_input_container = st.empty()\n rregister_button_container = st.empty()\n email = remail_input_container.text_input('Email ')\n password = rpw_input_container.text_input('Password ', type='password')\n rregister_button = rregister_button_container.button('Register')\n if rregister_button:\n title_container.empty()\n remail_input_container.empty()\n rpw_input_container.empty()\n rregister_button_container.empty()\n login()\n page.app()\n st.experimental_rerun()\n",
"step-3": "import streamlit as st\nfrom streamlit.components.v1 import components\nfrom streamlit.report_thread import get_report_ctx\nfrom util.session import *\nfrom multipage import MultiPage\nfrom pages import register\n\n\ndef app(page):\n if not login_status():\n title_container = st.empty()\n remail_input_container = st.empty()\n rpw_input_container = st.empty()\n rregister_button_container = st.empty()\n email = remail_input_container.text_input('Email ')\n password = rpw_input_container.text_input('Password ', type='password')\n rregister_button = rregister_button_container.button('Register')\n if rregister_button:\n title_container.empty()\n remail_input_container.empty()\n rpw_input_container.empty()\n rregister_button_container.empty()\n login()\n page.app()\n st.experimental_rerun()\n",
"step-4": "import streamlit as st\nfrom streamlit.components.v1 import components\nfrom streamlit.report_thread import get_report_ctx\nfrom util.session import *\nfrom multipage import MultiPage\nfrom pages import register\n\ndef app(page):\n if not login_status():\n title_container = st.empty()\n remail_input_container = st.empty()\n rpw_input_container = st.empty()\n rregister_button_container = st.empty()\n\n # title_container.write(\"Register\")\n email = remail_input_container.text_input(\"Email \")\n password = rpw_input_container.text_input(\"Password \", type=\"password\")\n rregister_button = rregister_button_container.button('Register')\n\n if rregister_button:\n title_container.empty()\n remail_input_container.empty()\n rpw_input_container.empty()\n rregister_button_container.empty()\n login()\n page.app()\n st.experimental_rerun()",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
__author__ = 'Jager'
from equipment import Equipment
class Weapon (Equipment):
def __init__(self, name, power):
super(Weapon, self).__init__(name)
self.power = power
@staticmethod
def fromJSON(jsonstr):
obj = Equipment.fromJSON(jsonstr)
return Weapon(obj["name"], obj["power"])
def __str__(self):
return "{}: Power({})".format(self.name, self.power)
|
normal
|
{
"blob_id": "276d7ac493ddcb327dbce279d9f4bc8a74c98245",
"index": 5749,
"step-1": "<mask token>\n\n\nclass Weapon(Equipment):\n\n def __init__(self, name, power):\n super(Weapon, self).__init__(name)\n self.power = power\n <mask token>\n\n def __str__(self):\n return '{}: Power({})'.format(self.name, self.power)\n",
"step-2": "<mask token>\n\n\nclass Weapon(Equipment):\n\n def __init__(self, name, power):\n super(Weapon, self).__init__(name)\n self.power = power\n\n @staticmethod\n def fromJSON(jsonstr):\n obj = Equipment.fromJSON(jsonstr)\n return Weapon(obj['name'], obj['power'])\n\n def __str__(self):\n return '{}: Power({})'.format(self.name, self.power)\n",
"step-3": "__author__ = 'Jager'\n<mask token>\n\n\nclass Weapon(Equipment):\n\n def __init__(self, name, power):\n super(Weapon, self).__init__(name)\n self.power = power\n\n @staticmethod\n def fromJSON(jsonstr):\n obj = Equipment.fromJSON(jsonstr)\n return Weapon(obj['name'], obj['power'])\n\n def __str__(self):\n return '{}: Power({})'.format(self.name, self.power)\n",
"step-4": "__author__ = 'Jager'\nfrom equipment import Equipment\n\n\nclass Weapon(Equipment):\n\n def __init__(self, name, power):\n super(Weapon, self).__init__(name)\n self.power = power\n\n @staticmethod\n def fromJSON(jsonstr):\n obj = Equipment.fromJSON(jsonstr)\n return Weapon(obj['name'], obj['power'])\n\n def __str__(self):\n return '{}: Power({})'.format(self.name, self.power)\n",
"step-5": "__author__ = 'Jager'\nfrom equipment import Equipment\n\n\nclass Weapon (Equipment):\n def __init__(self, name, power):\n super(Weapon, self).__init__(name)\n self.power = power\n\n @staticmethod\n def fromJSON(jsonstr):\n obj = Equipment.fromJSON(jsonstr)\n return Weapon(obj[\"name\"], obj[\"power\"])\n\n def __str__(self):\n return \"{}: Power({})\".format(self.name, self.power)",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys,os,traceback
from PIL import Image
class ResizeImageBuilder:
def __init__(self):
# print(self.__class__)
pass
def setOriginImagePath(self, filePath):
try:
img = Image.open(filePath)
# img = img.convert('RGB')
# size = 32, 32
# img.thumbnail(size)
print('origin image mode:', img.mode)
img = img.convert('RGB')
print('target image mode:', img.mode)
# img.show()
self.baseImage = img
return None
except (BaseException,e):
return str(filePath + " open error: " + traceback.format_exc(e))
def createImageWithOriginImage(self, img, imageSize):
return img.resize((imageSize, imageSize),Image.ANTIALIAS)
def saveImageWithPath(self, img, savePath):
img.save(savePath)
def createImage(self, savePath, imageSize):
if self.baseImage == None:
print('error: self.baseImage == None, please call setOriginImagePath() before createImage()')
return
try:
newimg = self.createImageWithOriginImage(self.baseImage, imageSize)
self.saveImageWithPath(newimg, savePath)
# print('done')
except (BaseException,e):
return 'createImage error: ' + traceback.format_exc(e)
def main():
# builder = ResizeImageBuilder()
# builder.setOriginImagePath(originImagePath)
# builder.createImage(path1, size1)
# builder.createImage(path2, size2)
pass
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "47119f46cdbbb7306aef8237d4f56f0f10690ae4",
"index": 9245,
"step-1": "<mask token>\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n <mask token>\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\ndef main():\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys, os, traceback\nfrom PIL import Image\n\n\nclass ResizeImageBuilder:\n\n def __init__(self):\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n self.baseImage = img\n return None\n except (BaseException, e):\n return str(filePath + ' open error: ' + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize), Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print(\n 'error: self.baseImage == None, please call setOriginImagePath() before createImage()'\n )\n return\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n except (BaseException, e):\n return 'createImage error: ' + traceback.format_exc(e)\n\n\ndef main():\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport sys,os,traceback\nfrom PIL import Image\n\nclass ResizeImageBuilder:\n def __init__(self):\n # print(self.__class__)\n pass\n\n def setOriginImagePath(self, filePath):\n try:\n img = Image.open(filePath)\n # img = img.convert('RGB')\n # size = 32, 32\n # img.thumbnail(size)\n print('origin image mode:', img.mode)\n img = img.convert('RGB')\n print('target image mode:', img.mode)\n # img.show()\n self.baseImage = img\n return None\n except (BaseException,e):\n return str(filePath + \" open error: \" + traceback.format_exc(e))\n\n def createImageWithOriginImage(self, img, imageSize):\n return img.resize((imageSize, imageSize),Image.ANTIALIAS)\n\n def saveImageWithPath(self, img, savePath):\n img.save(savePath)\n\n def createImage(self, savePath, imageSize):\n if self.baseImage == None:\n print('error: self.baseImage == None, please call setOriginImagePath() before createImage()')\n return\n\n try:\n newimg = self.createImageWithOriginImage(self.baseImage, imageSize)\n self.saveImageWithPath(newimg, savePath)\n # print('done')\n except (BaseException,e):\n return 'createImage error: ' + traceback.format_exc(e)\n\ndef main():\n # builder = ResizeImageBuilder()\n # builder.setOriginImagePath(originImagePath)\n # builder.createImage(path1, size1)\n # builder.createImage(path2, size2)\n pass\n\nif __name__ == '__main__':\n main()",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
from string import Template
import os
#-----template objects-----
#for putting a template inside an ifdef guard
TIfGuard = Template("""if(${condition})
${innerbody}
endif()\n""")
#For minimum cmake version and project name
TProjectSettings = Template("""cmake_minimum_required (VERSION ${MinCmakeVer})
project(${Name})
set_property(GLOBAL PROPERTY USE_FOLDERS ${UseFolders})
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)\n""")
#for including a definition
TDefinition = Template("add_definitions(-D${definition})")
#include directories
TIncludeDirectory = Template('include_directories("${dir}")')
#for globbing source files in a dir
TSourceGlob = Template('FILE(GLOB ${source_id} "${dir}/*.c*")')
#for globbing header files in a dir
THeaderGlob = Template('FILE(GLOB ${header_id} "${dir}/*.h*")')
#template for source group (so they appear in VS filters etc.
TSourceGroup = Template('source_group("${folder}" FILES $${${files}})\n')
#for outputting an executable
TExecutable = Template("add_executable(${project} $${SOURCES} $${HEADERS})\n")
#for outputting a shared library
TSharedLib = Template("add_library(${project} SHARED $${SOURCES} $${HEADERS})\n")
#for outputting a static library
TStaticLib = Template("add_library(${project} STATIC $${SOURCES} $${HEADERS})\n")
#for outputting a collection of code files to an object file
TObjectLib = Template("add_library(${project} OBJECT $${SOURCES}")
#template for appending a cmake variable to another cmake variable
TAppendVariable = Template("set( ${var} $${${var}} $${${appendedval}})\n")
#template for appending a python variable to a cmake variable
TAppendPythonVariable = Template("set( ${var} $${${var}} ${appendedval})\n")
#template for setting cmake variable
TMakeVariable = Template('set (${var} ${value})\n')
#template for adding a link directory
TLinkDirectory = Template('link_directories("${dir}")')
#template for targeting link libs
TTargetLinkLibs = Template("""if(NOT LIBS STREQUAL "")
target_link_libraries(${name} $${LIBS})
endif()
""")
#for linking a framework on the mac
TLinkFramework = Template("""find_library(${framework}_LIB ${framework})
MARK_AS_ADVANCED(${framework}_LIB)
set(LIBS $${LIBS} $${${framework}_LIB})""")
#for linking a system library
TLinkSystemLib = Template("""find_package(${framework} REQUIRED)
include_directories($${${framework_upper}_INCLUDE_DIRS})
set(LIBS $${LIBS} $${${framework_upper}_LIBRARIES})""")
#for linking objects into this module
TLinkObject = Template("set(LIBS $${LIBS} $<TARGET_OBJECTS>:${object})")
#template for exectuable output
TExecutableOutput = Template('set(EXECUTABLE_OUTPUT_PATH "${dir}")\n')
#template for exectuable output
TRuntimeOutput = Template('set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${dir}")\n')
#template for library output
TLibraryoutput = Template('set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${dir}")\nset(LIBRARY_OUTPUT_PATH "${dir}")\n')
#template for including a submodule
TSubmoduleInclude = Template('add_subdirectory(${dir})')
#-----Helper Functions----
def WriteToFile(f, output, condition = False, conditionID = ""):
f.write(output if not condition else WrapInGuard(conditionID, output))
def InsertEnvVariable(s):
return Template(s).substitute(os.environ)
def ContainsEnvVariable(s):
return ("$" in s)
#removes all characters that may cause issues with cmake
#such as ${} characters for environment variables
def Strip(s):
chars = "${}"
for i in range(0,len(chars)):
s=s.replace(chars[i],"")
return s
#-----Write Functions-----
#Puts innerbody into TIfGuard template with the given condition
#then returns the string
def WrapInGuard(condition, innerbody):
return TIfGuard.substitute(dict(condition=condition, innerbody=innerbody))
def WriteProjectSettings(f, section):
#defaults
if "UseFolders" not in section.data: section.data["UseFolders"] = "OFF"
#output
output = TProjectSettings.substitute(section.data)
f.write(output)
#writes required CMAKE variables to the file
def WriteRequiredVariables(f):
#all required variables go here to initialise
variables = [
dict(var="INCLUDES", value='""'),
dict(var="SOURCES", value='""'),
dict(var="LIBS", value='""')
]
#write them to file
for v in variables:
f.write(TMakeVariable.substitute(v))
#definitions such as #defines
def WriteDefinitions(f, sections):
#first write the one which is not platform specific
for s in sections:
defs = s.data[":"]
#gather definitions to be output
output = ""
for d in defs:
output += TDefinition.substitute(dict(definition=d)) + "\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
#project include directories
def WriteIncludeDirectories(f, rootDir, sections):
#first write the one which is not platform specific
for s in sections:
dirs = s.data[":"]
#gather definitions to be output
output = ""
for d in dirs:
localDir = d if d.startswith("/") else "/"+d
headerID = Strip(localDir.replace('/','_'))
#insert any environment variables
if ContainsEnvVariable(d):
d = InsertEnvVariable(d)
else:
d = rootDir + localDir
#add include directory
output = TIncludeDirectory.substitute(dict(dir=d)) + "\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
#glob all header files
output = THeaderGlob.substitute(dict(dir=d, header_id=headerID)) + "\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
#append to HEADERS variable
output = TAppendVariable.substitute(dict(var="HEADERS", appendedval=headerID))
WriteToFile(f,output, s.HasCondition(), s.condition)
#make source group so they appear in filters
localDir = Strip(localDir.replace('/','\\\\'))
output = TSourceGroup.substitute(dict(folder="Header Files" + localDir, files=headerID))
WriteToFile(f,output, s.HasCondition(), s.condition)
#project source directories
def WriteSourceDirectories(f, rootDir, sections):
#first write the one which is not platform specific
for s in sections:
dirs = s.data[":"]
output = ""
for d in dirs:
localDir = d if d.startswith("/") else "/"+d
sourceID = Strip(localDir.replace('/','_'))
#insert any environment variables
if ContainsEnvVariable(d):
d = InsertEnvVariable(d)
else:
d = rootDir + localDir
#glob all source files
output = TSourceGlob.substitute(dict(dir=d, source_id=sourceID)) + "\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
#append globbed source files to SOURCES cmake variable
output = TAppendVariable.substitute(dict(var="SOURCES", appendedval=sourceID))
WriteToFile(f,output, s.HasCondition(), s.condition)
#make source group so they appear in filters
localDir = Strip(localDir.replace('/','\\\\'))
output = TSourceGroup.substitute(dict(folder="Source Files" + localDir, files=sourceID))
WriteToFile(f,output, s.HasCondition(), s.condition)
#includes local library directories
def WriteProjectLibDirectories(f, rootDir, sections):
#first write the one which is not platform specific
for s in sections:
dirs = s.data[":"]
output = ""
for d in dirs:
#insert any environment variables
if ContainsEnvVariable(d):
d = InsertEnvVariable(d)
else:
d = d if d.startswith('/') else "/"+d
d = rootDir + d
#include lib directory
output = TLinkDirectory.substitute(dict(dir=d)) + "\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
#adds all libs to the LIBS cmake var
def WriteLinkLibs(f, rootDir, sections):
#first write the one which is not platform specific
for s in sections:
libs = s.data[":"]
output = ""
for l in libs:
if "-framework" in l:
frameworkName = l.replace("-framework ", "")
frameworkName = frameworkName.strip()
output = TLinkFramework.substitute(dict(framework=frameworkName)) +"\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
elif "-system" in l:
systemLibName = l.replace("-system ", "")
systemLibName = systemLibName.strip()
output = TLinkSystemLib.substitute(dict(framework=systemLibName,framework_upper=systemLibName.upper())) +"\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
elif "-object" in l:
objectLibName = l.replace("-object ", "")
objectLibName = objectLibName.strip()
output = TLinkObject.substitute(dict(object=objectLibName)) +"\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
else:
#add to LIBS cmake var
output = TAppendPythonVariable.substitute(dict(var="LIBS", appendedval=l))
WriteToFile(f,output, s.HasCondition(), s.condition)
#Writes the cmake runtime/lib etc. outputs
def WriteOutputs(f, rootDir, sections):
for s in sections:
if "Executable" in s.data:
runtime = s.data["Executable"]
#insert any environment variables
if ContainsEnvVariable(runtime):
runtime = InsertEnvVariable(runtime)
else:
runtime = runtime if runtime.startswith('/') else "/"+runtime
runtime = rootDir + runtime
output = TRuntimeOutput.substitute(dict(dir=runtime))
WriteToFile(f,output, s.HasCondition(), s.condition)
if "Runtime" in s.data:
runtime = s.data["Runtime"]
#insert any environment variables
if ContainsEnvVariable(runtime):
runtime = InsertEnvVariable(runtime)
else:
runtime = runtime if runtime.startswith('/') else "/"+runtime
runtime = rootDir + runtime
output = TExecutableOutput.substitute(dict(dir=runtime))
WriteToFile(f,output, s.HasCondition(), s.condition)
if "Libs" in s.data:
print("LIBS OUTPUT BEING SET")
statics = s.data["Libs"]
#insert any environment variables
if ContainsEnvVariable(statics):
statics = InsertEnvVariable(statics)
else:
statics = statics if statics.startswith('/') else "/"+statics
statics = rootDir + statics
output = TLibraryoutput.substitute(dict(dir=statics))
WriteToFile(f,output, s.HasCondition(), s.condition)
#Writes the module output section of the CmakeLists file
def WriteModuleOutput(f, rootDir, m):
name = m.settings.data["Name"] #name of lib/exe
t = m.settings.data["Type"] #build type (lib/exe)
if "exe" in t:
f.write(TExecutable.substitute(dict(project=name)))
f.write(TTargetLinkLibs.substitute(dict(name=name)))
elif "shared" in t:
f.write(TSharedLib.substitute(dict(project=name)))
f.write(TTargetLinkLibs.substitute(dict(name=name)))
elif "static" in t:
f.write(TStaticLib.substitute(dict(project=name)))
f.write(TTargetLinkLibs.substitute(dict(name=name)))
elif "object" in t:
f.write(TObjectLib.substitute(dict(project=name)))
f.write(TTargetLinkLibs.substitute(dict(name=name)))
return None
#writes the include for a submodule
def WriteSubmoduleIncludes(f, rootDir, sections):
for s in sections:
submods = s.data[":"]
for sm in submods:
sm = sm if sm.startswith('/') else "/"+sm
output = TSubmoduleInclude.substitute(dict(dir=rootDir+sm)) + "\n"
WriteToFile(f,output, s.HasCondition(), s.condition)
|
normal
|
{
"blob_id": "8cba57e3552e0072720fe42fa1949534f29d71b5",
"index": 1562,
"step-1": "<mask token>\n\n\ndef WriteToFile(f, output, condition=False, conditionID=''):\n f.write(output if not condition else WrapInGuard(conditionID, output))\n\n\n<mask token>\n\n\ndef WrapInGuard(condition, innerbody):\n return TIfGuard.substitute(dict(condition=condition, innerbody=innerbody))\n\n\n<mask token>\n\n\ndef WriteRequiredVariables(f):\n variables = [dict(var='INCLUDES', value='\"\"'), dict(var='SOURCES',\n value='\"\"'), dict(var='LIBS', value='\"\"')]\n for v in variables:\n f.write(TMakeVariable.substitute(v))\n\n\ndef WriteDefinitions(f, sections):\n for s in sections:\n defs = s.data[':']\n output = ''\n for d in defs:\n output += TDefinition.substitute(dict(definition=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\n<mask token>\n\n\ndef WriteProjectLibDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = d if d.startswith('/') else '/' + d\n d = rootDir + d\n output = TLinkDirectory.substitute(dict(dir=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteLinkLibs(f, rootDir, sections):\n for s in sections:\n libs = s.data[':']\n output = ''\n for l in libs:\n if '-framework' in l:\n frameworkName = l.replace('-framework ', '')\n frameworkName = frameworkName.strip()\n output = TLinkFramework.substitute(dict(framework=\n frameworkName)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-system' in l:\n systemLibName = l.replace('-system ', '')\n systemLibName = systemLibName.strip()\n output = TLinkSystemLib.substitute(dict(framework=\n systemLibName, framework_upper=systemLibName.upper())\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-object' in l:\n objectLibName = l.replace('-object ', '')\n objectLibName = objectLibName.strip()\n output = TLinkObject.substitute(dict(object=objectLibName)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n else:\n output = TAppendPythonVariable.substitute(dict(var='LIBS',\n appendedval=l))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteOutputs(f, rootDir, sections):\n for s in sections:\n if 'Executable' in s.data:\n runtime = s.data['Executable']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TRuntimeOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Runtime' in s.data:\n runtime = s.data['Runtime']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TExecutableOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Libs' in s.data:\n print('LIBS OUTPUT BEING SET')\n statics = s.data['Libs']\n if ContainsEnvVariable(statics):\n statics = InsertEnvVariable(statics)\n else:\n statics = statics if statics.startswith('/') else '/' + statics\n statics = rootDir + statics\n output = TLibraryoutput.substitute(dict(dir=statics))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteModuleOutput(f, rootDir, m):\n name = m.settings.data['Name']\n t = m.settings.data['Type']\n if 'exe' in t:\n f.write(TExecutable.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'shared' in t:\n f.write(TSharedLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'static' in t:\n f.write(TStaticLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'object' in t:\n f.write(TObjectLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef WriteToFile(f, output, condition=False, conditionID=''):\n f.write(output if not condition else WrapInGuard(conditionID, output))\n\n\n<mask token>\n\n\ndef Strip(s):\n chars = '${}'\n for i in range(0, len(chars)):\n s = s.replace(chars[i], '')\n return s\n\n\ndef WrapInGuard(condition, innerbody):\n return TIfGuard.substitute(dict(condition=condition, innerbody=innerbody))\n\n\ndef WriteProjectSettings(f, section):\n if 'UseFolders' not in section.data:\n section.data['UseFolders'] = 'OFF'\n output = TProjectSettings.substitute(section.data)\n f.write(output)\n\n\ndef WriteRequiredVariables(f):\n variables = [dict(var='INCLUDES', value='\"\"'), dict(var='SOURCES',\n value='\"\"'), dict(var='LIBS', value='\"\"')]\n for v in variables:\n f.write(TMakeVariable.substitute(v))\n\n\ndef WriteDefinitions(f, sections):\n for s in sections:\n defs = s.data[':']\n output = ''\n for d in defs:\n output += TDefinition.substitute(dict(definition=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteIncludeDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n localDir = d if d.startswith('/') else '/' + d\n headerID = Strip(localDir.replace('/', '_'))\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = rootDir + localDir\n output = TIncludeDirectory.substitute(dict(dir=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = THeaderGlob.substitute(dict(dir=d, header_id=headerID)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = TAppendVariable.substitute(dict(var='HEADERS',\n appendedval=headerID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n localDir = Strip(localDir.replace('/', '\\\\\\\\'))\n output = TSourceGroup.substitute(dict(folder='Header Files' +\n localDir, files=headerID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteSourceDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n localDir = d if d.startswith('/') else '/' + d\n sourceID = Strip(localDir.replace('/', '_'))\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = rootDir + localDir\n output = TSourceGlob.substitute(dict(dir=d, source_id=sourceID)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = TAppendVariable.substitute(dict(var='SOURCES',\n appendedval=sourceID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n localDir = Strip(localDir.replace('/', '\\\\\\\\'))\n output = TSourceGroup.substitute(dict(folder='Source Files' +\n localDir, files=sourceID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteProjectLibDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = d if d.startswith('/') else '/' + d\n d = rootDir + d\n output = TLinkDirectory.substitute(dict(dir=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteLinkLibs(f, rootDir, sections):\n for s in sections:\n libs = s.data[':']\n output = ''\n for l in libs:\n if '-framework' in l:\n frameworkName = l.replace('-framework ', '')\n frameworkName = frameworkName.strip()\n output = TLinkFramework.substitute(dict(framework=\n frameworkName)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-system' in l:\n systemLibName = l.replace('-system ', '')\n systemLibName = systemLibName.strip()\n output = TLinkSystemLib.substitute(dict(framework=\n systemLibName, framework_upper=systemLibName.upper())\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-object' in l:\n objectLibName = l.replace('-object ', '')\n objectLibName = objectLibName.strip()\n output = TLinkObject.substitute(dict(object=objectLibName)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n else:\n output = TAppendPythonVariable.substitute(dict(var='LIBS',\n appendedval=l))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteOutputs(f, rootDir, sections):\n for s in sections:\n if 'Executable' in s.data:\n runtime = s.data['Executable']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TRuntimeOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Runtime' in s.data:\n runtime = s.data['Runtime']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TExecutableOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Libs' in s.data:\n print('LIBS OUTPUT BEING SET')\n statics = s.data['Libs']\n if ContainsEnvVariable(statics):\n statics = InsertEnvVariable(statics)\n else:\n statics = statics if statics.startswith('/') else '/' + statics\n statics = rootDir + statics\n output = TLibraryoutput.substitute(dict(dir=statics))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteModuleOutput(f, rootDir, m):\n name = m.settings.data['Name']\n t = m.settings.data['Type']\n if 'exe' in t:\n f.write(TExecutable.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'shared' in t:\n f.write(TSharedLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'static' in t:\n f.write(TStaticLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'object' in t:\n f.write(TObjectLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef WriteToFile(f, output, condition=False, conditionID=''):\n f.write(output if not condition else WrapInGuard(conditionID, output))\n\n\ndef InsertEnvVariable(s):\n return Template(s).substitute(os.environ)\n\n\ndef ContainsEnvVariable(s):\n return '$' in s\n\n\ndef Strip(s):\n chars = '${}'\n for i in range(0, len(chars)):\n s = s.replace(chars[i], '')\n return s\n\n\ndef WrapInGuard(condition, innerbody):\n return TIfGuard.substitute(dict(condition=condition, innerbody=innerbody))\n\n\ndef WriteProjectSettings(f, section):\n if 'UseFolders' not in section.data:\n section.data['UseFolders'] = 'OFF'\n output = TProjectSettings.substitute(section.data)\n f.write(output)\n\n\ndef WriteRequiredVariables(f):\n variables = [dict(var='INCLUDES', value='\"\"'), dict(var='SOURCES',\n value='\"\"'), dict(var='LIBS', value='\"\"')]\n for v in variables:\n f.write(TMakeVariable.substitute(v))\n\n\ndef WriteDefinitions(f, sections):\n for s in sections:\n defs = s.data[':']\n output = ''\n for d in defs:\n output += TDefinition.substitute(dict(definition=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteIncludeDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n localDir = d if d.startswith('/') else '/' + d\n headerID = Strip(localDir.replace('/', '_'))\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = rootDir + localDir\n output = TIncludeDirectory.substitute(dict(dir=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = THeaderGlob.substitute(dict(dir=d, header_id=headerID)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = TAppendVariable.substitute(dict(var='HEADERS',\n appendedval=headerID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n localDir = Strip(localDir.replace('/', '\\\\\\\\'))\n output = TSourceGroup.substitute(dict(folder='Header Files' +\n localDir, files=headerID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteSourceDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n localDir = d if d.startswith('/') else '/' + d\n sourceID = Strip(localDir.replace('/', '_'))\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = rootDir + localDir\n output = TSourceGlob.substitute(dict(dir=d, source_id=sourceID)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = TAppendVariable.substitute(dict(var='SOURCES',\n appendedval=sourceID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n localDir = Strip(localDir.replace('/', '\\\\\\\\'))\n output = TSourceGroup.substitute(dict(folder='Source Files' +\n localDir, files=sourceID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteProjectLibDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = d if d.startswith('/') else '/' + d\n d = rootDir + d\n output = TLinkDirectory.substitute(dict(dir=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteLinkLibs(f, rootDir, sections):\n for s in sections:\n libs = s.data[':']\n output = ''\n for l in libs:\n if '-framework' in l:\n frameworkName = l.replace('-framework ', '')\n frameworkName = frameworkName.strip()\n output = TLinkFramework.substitute(dict(framework=\n frameworkName)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-system' in l:\n systemLibName = l.replace('-system ', '')\n systemLibName = systemLibName.strip()\n output = TLinkSystemLib.substitute(dict(framework=\n systemLibName, framework_upper=systemLibName.upper())\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-object' in l:\n objectLibName = l.replace('-object ', '')\n objectLibName = objectLibName.strip()\n output = TLinkObject.substitute(dict(object=objectLibName)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n else:\n output = TAppendPythonVariable.substitute(dict(var='LIBS',\n appendedval=l))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteOutputs(f, rootDir, sections):\n for s in sections:\n if 'Executable' in s.data:\n runtime = s.data['Executable']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TRuntimeOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Runtime' in s.data:\n runtime = s.data['Runtime']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TExecutableOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Libs' in s.data:\n print('LIBS OUTPUT BEING SET')\n statics = s.data['Libs']\n if ContainsEnvVariable(statics):\n statics = InsertEnvVariable(statics)\n else:\n statics = statics if statics.startswith('/') else '/' + statics\n statics = rootDir + statics\n output = TLibraryoutput.substitute(dict(dir=statics))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteModuleOutput(f, rootDir, m):\n name = m.settings.data['Name']\n t = m.settings.data['Type']\n if 'exe' in t:\n f.write(TExecutable.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'shared' in t:\n f.write(TSharedLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'static' in t:\n f.write(TStaticLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'object' in t:\n f.write(TObjectLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n return None\n\n\ndef WriteSubmoduleIncludes(f, rootDir, sections):\n for s in sections:\n submods = s.data[':']\n for sm in submods:\n sm = sm if sm.startswith('/') else '/' + sm\n output = TSubmoduleInclude.substitute(dict(dir=rootDir + sm)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n",
"step-4": "<mask token>\nTIfGuard = Template(\"\"\"if(${condition})\n${innerbody}\nendif()\n\"\"\")\nTProjectSettings = Template(\n \"\"\"cmake_minimum_required (VERSION ${MinCmakeVer})\nproject(${Name})\nset_property(GLOBAL PROPERTY USE_FOLDERS ${UseFolders})\nset(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)\n\"\"\"\n )\nTDefinition = Template('add_definitions(-D${definition})')\nTIncludeDirectory = Template('include_directories(\"${dir}\")')\nTSourceGlob = Template('FILE(GLOB ${source_id} \"${dir}/*.c*\")')\nTHeaderGlob = Template('FILE(GLOB ${header_id} \"${dir}/*.h*\")')\nTSourceGroup = Template('source_group(\"${folder}\" FILES $${${files}})\\n')\nTExecutable = Template('add_executable(${project} $${SOURCES} $${HEADERS})\\n')\nTSharedLib = Template(\n 'add_library(${project} SHARED $${SOURCES} $${HEADERS})\\n')\nTStaticLib = Template(\n 'add_library(${project} STATIC $${SOURCES} $${HEADERS})\\n')\nTObjectLib = Template('add_library(${project} OBJECT $${SOURCES}')\nTAppendVariable = Template('set( ${var} $${${var}} $${${appendedval}})\\n')\nTAppendPythonVariable = Template('set( ${var} $${${var}} ${appendedval})\\n')\nTMakeVariable = Template('set (${var} ${value})\\n')\nTLinkDirectory = Template('link_directories(\"${dir}\")')\nTTargetLinkLibs = Template(\n \"\"\"if(NOT LIBS STREQUAL \"\")\ntarget_link_libraries(${name} $${LIBS})\nendif()\n\"\"\"\n )\nTLinkFramework = Template(\n \"\"\"find_library(${framework}_LIB ${framework})\nMARK_AS_ADVANCED(${framework}_LIB)\nset(LIBS $${LIBS} $${${framework}_LIB})\"\"\"\n )\nTLinkSystemLib = Template(\n \"\"\"find_package(${framework} REQUIRED)\ninclude_directories($${${framework_upper}_INCLUDE_DIRS})\nset(LIBS $${LIBS} $${${framework_upper}_LIBRARIES})\"\"\"\n )\nTLinkObject = Template('set(LIBS $${LIBS} $<TARGET_OBJECTS>:${object})')\nTExecutableOutput = Template('set(EXECUTABLE_OUTPUT_PATH \"${dir}\")\\n')\nTRuntimeOutput = Template('set(CMAKE_RUNTIME_OUTPUT_DIRECTORY \"${dir}\")\\n')\nTLibraryoutput = Template(\n \"\"\"set(CMAKE_LIBRARY_OUTPUT_DIRECTORY \"${dir}\")\nset(LIBRARY_OUTPUT_PATH \"${dir}\")\n\"\"\"\n )\nTSubmoduleInclude = Template('add_subdirectory(${dir})')\n\n\ndef WriteToFile(f, output, condition=False, conditionID=''):\n f.write(output if not condition else WrapInGuard(conditionID, output))\n\n\ndef InsertEnvVariable(s):\n return Template(s).substitute(os.environ)\n\n\ndef ContainsEnvVariable(s):\n return '$' in s\n\n\ndef Strip(s):\n chars = '${}'\n for i in range(0, len(chars)):\n s = s.replace(chars[i], '')\n return s\n\n\ndef WrapInGuard(condition, innerbody):\n return TIfGuard.substitute(dict(condition=condition, innerbody=innerbody))\n\n\ndef WriteProjectSettings(f, section):\n if 'UseFolders' not in section.data:\n section.data['UseFolders'] = 'OFF'\n output = TProjectSettings.substitute(section.data)\n f.write(output)\n\n\ndef WriteRequiredVariables(f):\n variables = [dict(var='INCLUDES', value='\"\"'), dict(var='SOURCES',\n value='\"\"'), dict(var='LIBS', value='\"\"')]\n for v in variables:\n f.write(TMakeVariable.substitute(v))\n\n\ndef WriteDefinitions(f, sections):\n for s in sections:\n defs = s.data[':']\n output = ''\n for d in defs:\n output += TDefinition.substitute(dict(definition=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteIncludeDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n localDir = d if d.startswith('/') else '/' + d\n headerID = Strip(localDir.replace('/', '_'))\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = rootDir + localDir\n output = TIncludeDirectory.substitute(dict(dir=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = THeaderGlob.substitute(dict(dir=d, header_id=headerID)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = TAppendVariable.substitute(dict(var='HEADERS',\n appendedval=headerID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n localDir = Strip(localDir.replace('/', '\\\\\\\\'))\n output = TSourceGroup.substitute(dict(folder='Header Files' +\n localDir, files=headerID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteSourceDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n localDir = d if d.startswith('/') else '/' + d\n sourceID = Strip(localDir.replace('/', '_'))\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = rootDir + localDir\n output = TSourceGlob.substitute(dict(dir=d, source_id=sourceID)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n output = TAppendVariable.substitute(dict(var='SOURCES',\n appendedval=sourceID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n localDir = Strip(localDir.replace('/', '\\\\\\\\'))\n output = TSourceGroup.substitute(dict(folder='Source Files' +\n localDir, files=sourceID))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteProjectLibDirectories(f, rootDir, sections):\n for s in sections:\n dirs = s.data[':']\n output = ''\n for d in dirs:\n if ContainsEnvVariable(d):\n d = InsertEnvVariable(d)\n else:\n d = d if d.startswith('/') else '/' + d\n d = rootDir + d\n output = TLinkDirectory.substitute(dict(dir=d)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteLinkLibs(f, rootDir, sections):\n for s in sections:\n libs = s.data[':']\n output = ''\n for l in libs:\n if '-framework' in l:\n frameworkName = l.replace('-framework ', '')\n frameworkName = frameworkName.strip()\n output = TLinkFramework.substitute(dict(framework=\n frameworkName)) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-system' in l:\n systemLibName = l.replace('-system ', '')\n systemLibName = systemLibName.strip()\n output = TLinkSystemLib.substitute(dict(framework=\n systemLibName, framework_upper=systemLibName.upper())\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n elif '-object' in l:\n objectLibName = l.replace('-object ', '')\n objectLibName = objectLibName.strip()\n output = TLinkObject.substitute(dict(object=objectLibName)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n else:\n output = TAppendPythonVariable.substitute(dict(var='LIBS',\n appendedval=l))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteOutputs(f, rootDir, sections):\n for s in sections:\n if 'Executable' in s.data:\n runtime = s.data['Executable']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TRuntimeOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Runtime' in s.data:\n runtime = s.data['Runtime']\n if ContainsEnvVariable(runtime):\n runtime = InsertEnvVariable(runtime)\n else:\n runtime = runtime if runtime.startswith('/') else '/' + runtime\n runtime = rootDir + runtime\n output = TExecutableOutput.substitute(dict(dir=runtime))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n if 'Libs' in s.data:\n print('LIBS OUTPUT BEING SET')\n statics = s.data['Libs']\n if ContainsEnvVariable(statics):\n statics = InsertEnvVariable(statics)\n else:\n statics = statics if statics.startswith('/') else '/' + statics\n statics = rootDir + statics\n output = TLibraryoutput.substitute(dict(dir=statics))\n WriteToFile(f, output, s.HasCondition(), s.condition)\n\n\ndef WriteModuleOutput(f, rootDir, m):\n name = m.settings.data['Name']\n t = m.settings.data['Type']\n if 'exe' in t:\n f.write(TExecutable.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'shared' in t:\n f.write(TSharedLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'static' in t:\n f.write(TStaticLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n elif 'object' in t:\n f.write(TObjectLib.substitute(dict(project=name)))\n f.write(TTargetLinkLibs.substitute(dict(name=name)))\n return None\n\n\ndef WriteSubmoduleIncludes(f, rootDir, sections):\n for s in sections:\n submods = s.data[':']\n for sm in submods:\n sm = sm if sm.startswith('/') else '/' + sm\n output = TSubmoduleInclude.substitute(dict(dir=rootDir + sm)\n ) + '\\n'\n WriteToFile(f, output, s.HasCondition(), s.condition)\n",
"step-5": "from string import Template\nimport os\n\n#-----template objects-----\n\n#for putting a template inside an ifdef guard\nTIfGuard = Template(\"\"\"if(${condition})\n${innerbody}\nendif()\\n\"\"\")\n\n#For minimum cmake version and project name\nTProjectSettings = Template(\"\"\"cmake_minimum_required (VERSION ${MinCmakeVer})\nproject(${Name})\nset_property(GLOBAL PROPERTY USE_FOLDERS ${UseFolders})\nset(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)\\n\"\"\")\n\n\n#for including a definition\nTDefinition = Template(\"add_definitions(-D${definition})\")\n\n#include directories\nTIncludeDirectory = Template('include_directories(\"${dir}\")')\n\n#for globbing source files in a dir\nTSourceGlob = Template('FILE(GLOB ${source_id} \"${dir}/*.c*\")')\n\n#for globbing header files in a dir\nTHeaderGlob = Template('FILE(GLOB ${header_id} \"${dir}/*.h*\")')\n\n#template for source group (so they appear in VS filters etc.\nTSourceGroup = Template('source_group(\"${folder}\" FILES $${${files}})\\n')\n\n#for outputting an executable\nTExecutable = Template(\"add_executable(${project} $${SOURCES} $${HEADERS})\\n\")\n\n#for outputting a shared library\nTSharedLib = Template(\"add_library(${project} SHARED $${SOURCES} $${HEADERS})\\n\")\n\n#for outputting a static library\nTStaticLib = Template(\"add_library(${project} STATIC $${SOURCES} $${HEADERS})\\n\")\n\n#for outputting a collection of code files to an object file\nTObjectLib = Template(\"add_library(${project} OBJECT $${SOURCES}\")\n\n#template for appending a cmake variable to another cmake variable\nTAppendVariable = Template(\"set( ${var} $${${var}} $${${appendedval}})\\n\")\n\n#template for appending a python variable to a cmake variable\nTAppendPythonVariable = Template(\"set( ${var} $${${var}} ${appendedval})\\n\")\n\n#template for setting cmake variable\nTMakeVariable = Template('set (${var} ${value})\\n')\n\n#template for adding a link directory\nTLinkDirectory = Template('link_directories(\"${dir}\")')\n\n#template for targeting link libs\nTTargetLinkLibs = Template(\"\"\"if(NOT LIBS STREQUAL \"\")\ntarget_link_libraries(${name} $${LIBS})\nendif()\n\"\"\")\n\n#for linking a framework on the mac\nTLinkFramework = Template(\"\"\"find_library(${framework}_LIB ${framework})\nMARK_AS_ADVANCED(${framework}_LIB)\nset(LIBS $${LIBS} $${${framework}_LIB})\"\"\")\n\n#for linking a system library\nTLinkSystemLib = Template(\"\"\"find_package(${framework} REQUIRED)\ninclude_directories($${${framework_upper}_INCLUDE_DIRS})\nset(LIBS $${LIBS} $${${framework_upper}_LIBRARIES})\"\"\")\n\n#for linking objects into this module\nTLinkObject = Template(\"set(LIBS $${LIBS} $<TARGET_OBJECTS>:${object})\")\n\n#template for exectuable output\nTExecutableOutput = Template('set(EXECUTABLE_OUTPUT_PATH \"${dir}\")\\n')\n\n#template for exectuable output\nTRuntimeOutput = Template('set(CMAKE_RUNTIME_OUTPUT_DIRECTORY \"${dir}\")\\n')\n\n#template for library output\nTLibraryoutput = Template('set(CMAKE_LIBRARY_OUTPUT_DIRECTORY \"${dir}\")\\nset(LIBRARY_OUTPUT_PATH \"${dir}\")\\n')\n\n#template for including a submodule\nTSubmoduleInclude = Template('add_subdirectory(${dir})')\n\n#-----Helper Functions----\ndef WriteToFile(f, output, condition = False, conditionID = \"\"):\n\tf.write(output if not condition else WrapInGuard(conditionID, output))\n\ndef InsertEnvVariable(s):\n\treturn Template(s).substitute(os.environ)\n\ndef ContainsEnvVariable(s):\n\treturn (\"$\" in s)\n\n#removes all characters that may cause issues with cmake\n#such as ${} characters for environment variables\ndef Strip(s):\n\tchars = \"${}\"\n\tfor i in range(0,len(chars)):\n\t\ts=s.replace(chars[i],\"\")\n\treturn s\n\n#-----Write Functions-----\n#Puts innerbody into TIfGuard template with the given condition\n#then returns the string\ndef WrapInGuard(condition, innerbody):\n\treturn TIfGuard.substitute(dict(condition=condition, innerbody=innerbody))\n\t\ndef WriteProjectSettings(f, section):\n\t#defaults\n\tif \"UseFolders\" not in section.data: section.data[\"UseFolders\"] = \"OFF\"\n\t\n\t#output\n\toutput = TProjectSettings.substitute(section.data)\n\tf.write(output)\n\t\n#writes required CMAKE variables to the file\ndef WriteRequiredVariables(f):\n\t#all required variables go here to initialise\n\tvariables = [\n\t\tdict(var=\"INCLUDES\", value='\"\"'), \n\t\tdict(var=\"SOURCES\", value='\"\"'), \n\t\tdict(var=\"LIBS\", value='\"\"') \n\t\t]\n\t\n\t#write them to file\t\n\tfor v in variables:\n\t\tf.write(TMakeVariable.substitute(v))\n\t\n#definitions such as #defines \t\ndef WriteDefinitions(f, sections):\n\t#first write the one which is not platform specific\n\tfor s in sections:\n\t\tdefs = s.data[\":\"]\n\t\t\n\t\t#gather definitions to be output\n\t\toutput = \"\"\n\t\tfor d in defs:\n\t\t\toutput += TDefinition.substitute(dict(definition=d)) + \"\\n\"\n\t\t\n\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\n#project include directories\ndef WriteIncludeDirectories(f, rootDir, sections):\n\t#first write the one which is not platform specific\n\tfor s in sections:\n\t\tdirs = s.data[\":\"]\n\t\t\n\t\t#gather definitions to be output\n\t\toutput = \"\"\n\t\tfor d in dirs:\n\t\t\tlocalDir = d if d.startswith(\"/\") else \"/\"+d\n\t\t\theaderID = Strip(localDir.replace('/','_'))\n\t\t\t\n\t\t\t#insert any environment variables\n\t\t\tif ContainsEnvVariable(d):\n\t\t\t\td = InsertEnvVariable(d)\n\t\t\telse:\n\t\t\t\td = rootDir + localDir\n\t\t\t\t\n\t\t\t#add include directory\n\t\t\toutput = TIncludeDirectory.substitute(dict(dir=d)) + \"\\n\"\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\t#glob all header files\n\t\t\toutput = THeaderGlob.substitute(dict(dir=d, header_id=headerID)) + \"\\n\"\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\t#append to HEADERS variable\n\t\t\toutput = TAppendVariable.substitute(dict(var=\"HEADERS\", appendedval=headerID))\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\t#make source group so they appear in filters\n\t\t\tlocalDir = Strip(localDir.replace('/','\\\\\\\\'))\n\t\t\toutput = TSourceGroup.substitute(dict(folder=\"Header Files\" + localDir, files=headerID))\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\n#project source directories\ndef WriteSourceDirectories(f, rootDir, sections):\n\t#first write the one which is not platform specific\n\tfor s in sections:\n\t\tdirs = s.data[\":\"]\n\n\t\toutput = \"\"\n\t\tfor d in dirs:\n\t\t\tlocalDir = d if d.startswith(\"/\") else \"/\"+d\n\t\t\tsourceID = Strip(localDir.replace('/','_'))\n\t\t\t\n\t\t\t#insert any environment variables\n\t\t\tif ContainsEnvVariable(d):\n\t\t\t\td = InsertEnvVariable(d)\n\t\t\telse:\n\t\t\t\td = rootDir + localDir\n\t\t\t\t\n\t\t\t#glob all source files\n\t\t\toutput = TSourceGlob.substitute(dict(dir=d, source_id=sourceID)) + \"\\n\"\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\t#append globbed source files to SOURCES cmake variable\n\t\t\toutput = TAppendVariable.substitute(dict(var=\"SOURCES\", appendedval=sourceID))\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\t#make source group so they appear in filters\n\t\t\tlocalDir = Strip(localDir.replace('/','\\\\\\\\'))\n\t\t\toutput = TSourceGroup.substitute(dict(folder=\"Source Files\" + localDir, files=sourceID))\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\n#includes local library directories \ndef WriteProjectLibDirectories(f, rootDir, sections):\n\t#first write the one which is not platform specific\n\tfor s in sections:\n\t\tdirs = s.data[\":\"]\n\n\t\toutput = \"\"\n\t\tfor d in dirs:\n\t\t\t#insert any environment variables\n\t\t\tif ContainsEnvVariable(d):\n\t\t\t\td = InsertEnvVariable(d)\n\t\t\telse:\n\t\t\t\td = d if d.startswith('/') else \"/\"+d\n\t\t\t\td = rootDir + d\n\t\t\t\t\n\t\t\t#include lib directory\n\t\t\toutput = TLinkDirectory.substitute(dict(dir=d)) + \"\\n\"\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\n#adds all libs to the LIBS cmake var\ndef WriteLinkLibs(f, rootDir, sections):\n\t#first write the one which is not platform specific\n\tfor s in sections:\n\t\tlibs = s.data[\":\"]\n\n\t\toutput = \"\"\n\t\tfor l in libs:\n\t\t\tif \"-framework\" in l:\n\t\t\t\tframeworkName = l.replace(\"-framework \", \"\")\n\t\t\t\tframeworkName = frameworkName.strip()\n\t\t\t\t\n\t\t\t\toutput = TLinkFramework.substitute(dict(framework=frameworkName)) +\"\\n\"\n\t\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\t\n\t\t\telif \"-system\" in l:\n\t\t\t\tsystemLibName = l.replace(\"-system \", \"\")\n\t\t\t\tsystemLibName = systemLibName.strip()\n\t\t\t\t\n\t\t\t\toutput = TLinkSystemLib.substitute(dict(framework=systemLibName,framework_upper=systemLibName.upper())) +\"\\n\"\n\t\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\telif \"-object\" in l:\n\t\t\t\tobjectLibName = l.replace(\"-object \", \"\")\n\t\t\t\tobjectLibName = objectLibName.strip()\n\t\t\t\t\n\t\t\t\toutput = TLinkObject.substitute(dict(object=objectLibName)) +\"\\n\"\n\t\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\telse:\n\t\t\t\t#add to LIBS cmake var\n\t\t\t\toutput = TAppendPythonVariable.substitute(dict(var=\"LIBS\", appendedval=l))\n\t\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\t\n\t\t\t\n#Writes the cmake runtime/lib etc. outputs\ndef WriteOutputs(f, rootDir, sections):\n\tfor s in sections:\n\t\tif \"Executable\" in s.data:\n\t\t\truntime = s.data[\"Executable\"]\n\t\t\t#insert any environment variables\n\t\t\tif ContainsEnvVariable(runtime):\n\t\t\t\truntime = InsertEnvVariable(runtime)\n\t\t\telse:\n\t\t\t\truntime = runtime if runtime.startswith('/') else \"/\"+runtime\n\t\t\t\truntime = rootDir + runtime\n\t\t\toutput = TRuntimeOutput.substitute(dict(dir=runtime))\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\tif \"Runtime\" in s.data:\n\t\t\truntime = s.data[\"Runtime\"]\n\t\t\t#insert any environment variables\n\t\t\tif ContainsEnvVariable(runtime):\n\t\t\t\truntime = InsertEnvVariable(runtime)\n\t\t\telse:\n\t\t\t\truntime = runtime if runtime.startswith('/') else \"/\"+runtime\n\t\t\t\truntime = rootDir + runtime\n\t\t\toutput = TExecutableOutput.substitute(dict(dir=runtime))\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\tif \"Libs\" in s.data:\n\t\t\tprint(\"LIBS OUTPUT BEING SET\")\n\t\t\tstatics = s.data[\"Libs\"]\n\t\t\t#insert any environment variables\n\t\t\tif ContainsEnvVariable(statics):\n\t\t\t\tstatics = InsertEnvVariable(statics)\n\t\t\telse:\n\t\t\t\tstatics = statics if statics.startswith('/') else \"/\"+statics\n\t\t\t\tstatics = rootDir + statics\n\t\t\toutput = TLibraryoutput.substitute(dict(dir=statics))\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)\n\t\t\t\n\t\t\t\n#Writes the module output section of the CmakeLists file\ndef WriteModuleOutput(f, rootDir, m):\n\tname = m.settings.data[\"Name\"]\t#name of lib/exe\n\tt = m.settings.data[\"Type\"]\t#build type (lib/exe)\n\tif \"exe\" in t:\n\t\tf.write(TExecutable.substitute(dict(project=name)))\n\t\tf.write(TTargetLinkLibs.substitute(dict(name=name)))\n\telif \"shared\" in t:\n\t\tf.write(TSharedLib.substitute(dict(project=name)))\n\t\tf.write(TTargetLinkLibs.substitute(dict(name=name)))\n\telif \"static\" in t:\n\t\tf.write(TStaticLib.substitute(dict(project=name)))\n\t\tf.write(TTargetLinkLibs.substitute(dict(name=name)))\n\telif \"object\" in t:\n\t\tf.write(TObjectLib.substitute(dict(project=name)))\n\t\tf.write(TTargetLinkLibs.substitute(dict(name=name)))\n\treturn None\n\t\n\n#writes the include for a submodule\ndef WriteSubmoduleIncludes(f, rootDir, sections):\n\tfor s in sections:\n\t\tsubmods = s.data[\":\"]\n\t\t\n\t\tfor sm in submods:\n\t\t\tsm = sm if sm.startswith('/') else \"/\"+sm\n\t\t\t\n\t\t\toutput = TSubmoduleInclude.substitute(dict(dir=rootDir+sm)) + \"\\n\"\n\t\t\tWriteToFile(f,output, s.HasCondition(), s.condition)",
"step-ids": [
8,
12,
15,
16,
18
]
}
|
[
8,
12,
15,
16,
18
] |
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from interface_app.models import TestTask, TestCase
from interface_app.extend.task_run import run_cases
import os
import json
from interface_app.apps import TASK_PATH, RUN_TASK_FILE
"""
说明:接口任务文件,返回HTML页面
"""
# 获取任务列表
def task_manage(request):
testtasks = TestTask.objects.all()
if request.method == "GET":
return render(request, "task_manage.html", {
"type": "list",
"testtasks": testtasks,
})
else:
return HttpResponse("404")
# 创建任务
def add_task(request):
if request.method == "GET":
return render(request, "add_task.html", {
"type": "add",
})
else:
return HttpResponse("404")
# 运行任务
def run_task(request, tid):
if request.method == "GET":
task_obj = TestTask.objects.get(id=tid)
cases_list = task_obj.cases.split(",")
cases_list.pop(-1)
task_obj.status = 1 # 修改状态
task_obj.save()
print(cases_list)
# run_cases() #运行函数
all_cases_dict = {}
for case_id in cases_list:
case_obj = TestCase.objects.get(id=case_id)
case_dict = {
"url": case_obj.url,
"method": case_obj.req_method,
"type_": case_obj.req_type,
"header": case_obj.req_header,
"parameter": case_obj.req_parameter,
"assert_": case_obj.resp_assert
}
all_cases_dict[case_obj.id] = case_dict
print(all_cases_dict)
cases_str = json.dumps(all_cases_dict)
cases_data_file = TASK_PATH + "cases_data.json"
print(cases_data_file)
with open(cases_data_file, "w+") as f:
f.write(cases_str)
# 运行测试
os.system("python3 " + RUN_TASK_FILE)
return HttpResponseRedirect("/interface/task_manage")
else:
return HttpResponse("404")
# 如何去运行这些用例?--单元测试框架 + 数据驱动
# unittest + ddt
|
normal
|
{
"blob_id": "8be70543a7aa177d9ad48fb736228b1ffba5df16",
"index": 6179,
"step-1": "<mask token>\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-2": "<mask token>\n\n\ndef add_task(request):\n if request.method == 'GET':\n return render(request, 'add_task.html', {'type': 'add'})\n else:\n return HttpResponse('404')\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-3": "<mask token>\n\n\ndef task_manage(request):\n testtasks = TestTask.objects.all()\n if request.method == 'GET':\n return render(request, 'task_manage.html', {'type': 'list',\n 'testtasks': testtasks})\n else:\n return HttpResponse('404')\n\n\ndef add_task(request):\n if request.method == 'GET':\n return render(request, 'add_task.html', {'type': 'add'})\n else:\n return HttpResponse('404')\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom interface_app.models import TestTask, TestCase\nfrom interface_app.extend.task_run import run_cases\nimport os\nimport json\nfrom interface_app.apps import TASK_PATH, RUN_TASK_FILE\n<mask token>\n\n\ndef task_manage(request):\n testtasks = TestTask.objects.all()\n if request.method == 'GET':\n return render(request, 'task_manage.html', {'type': 'list',\n 'testtasks': testtasks})\n else:\n return HttpResponse('404')\n\n\ndef add_task(request):\n if request.method == 'GET':\n return render(request, 'add_task.html', {'type': 'add'})\n else:\n return HttpResponse('404')\n\n\ndef run_task(request, tid):\n if request.method == 'GET':\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(',')\n cases_list.pop(-1)\n task_obj.status = 1\n task_obj.save()\n print(cases_list)\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {'url': case_obj.url, 'method': case_obj.req_method,\n 'type_': case_obj.req_type, 'header': case_obj.req_header,\n 'parameter': case_obj.req_parameter, 'assert_': case_obj.\n resp_assert}\n all_cases_dict[case_obj.id] = case_dict\n print(all_cases_dict)\n cases_str = json.dumps(all_cases_dict)\n cases_data_file = TASK_PATH + 'cases_data.json'\n print(cases_data_file)\n with open(cases_data_file, 'w+') as f:\n f.write(cases_str)\n os.system('python3 ' + RUN_TASK_FILE)\n return HttpResponseRedirect('/interface/task_manage')\n else:\n return HttpResponse('404')\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom interface_app.models import TestTask, TestCase\nfrom interface_app.extend.task_run import run_cases\nimport os \nimport json\nfrom interface_app.apps import TASK_PATH, RUN_TASK_FILE\n\n\n\"\"\"\n说明:接口任务文件,返回HTML页面\n\"\"\"\n\n# 获取任务列表\ndef task_manage(request):\n testtasks = TestTask.objects.all()\n \n if request.method == \"GET\":\n return render(request, \"task_manage.html\", {\n \"type\": \"list\",\n \"testtasks\": testtasks,\n })\n else:\n return HttpResponse(\"404\")\n\n\n# 创建任务\ndef add_task(request):\n if request.method == \"GET\":\n return render(request, \"add_task.html\", {\n \"type\": \"add\",\n })\n else:\n return HttpResponse(\"404\")\n\n\n# 运行任务\ndef run_task(request, tid):\n if request.method == \"GET\":\n task_obj = TestTask.objects.get(id=tid)\n cases_list = task_obj.cases.split(\",\")\n cases_list.pop(-1)\n\n task_obj.status = 1 # 修改状态\n task_obj.save()\n\n \n print(cases_list)\n # run_cases() #运行函数\n all_cases_dict = {}\n for case_id in cases_list:\n case_obj = TestCase.objects.get(id=case_id)\n case_dict = {\n \"url\": case_obj.url,\n \"method\": case_obj.req_method,\n \"type_\": case_obj.req_type,\n \"header\": case_obj.req_header,\n \"parameter\": case_obj.req_parameter,\n \"assert_\": case_obj.resp_assert\n } \n all_cases_dict[case_obj.id] = case_dict\n\n print(all_cases_dict)\n\n cases_str = json.dumps(all_cases_dict)\n\n cases_data_file = TASK_PATH + \"cases_data.json\"\n print(cases_data_file)\n\n with open(cases_data_file, \"w+\") as f:\n f.write(cases_str)\n\n # 运行测试\n os.system(\"python3 \" + RUN_TASK_FILE)\n \n return HttpResponseRedirect(\"/interface/task_manage\")\n else:\n return HttpResponse(\"404\")\n\n\n# 如何去运行这些用例?--单元测试框架 + 数据驱动\n\n# unittest + ddt\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import os
#defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":50}
defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":"POP_SIZE"}
conditions = [{},{"K":10}, {"N":100, "MUT_RATE":.01}, {"MUT_RATE":.005}, {"MUT_RATE": .1}, {"POP_SIZE":20}, {"POP_SIZE":2000}, {"SELECTION":1}, {"SELECTION":1, "FILTER_LENGTH":1000}, {"CHANGE_RATE":500}, {"CHANGE_RATE":500, "CHANGE_TYPE":1}]
seed = 0
for condition in conditions:
print(condition)
command = ["./nk_oee -MODES_RESOLUTION 10 -SEED", seed]
dir_name = []
for var in defaults:
if var not in condition:
condition[var] = defaults[var]
for var in condition:
while condition[var] in condition:
condition[var] = condition[condition[var]]
command.append("-"+var)
dir_name.append("".join(var.split("_"))) # Underscores in variable names will screw up parsing later
val = str(condition[var])
command.append(val)
dir_name.append(val)
str_dir_name = "_".join(dir_name)
if not os.path.exists(str_dir_name):
os.mkdir(str_dir_name)
for i in range(30):
if os.path.exists(str_dir_name+"/"+str(i)+"/command.sh"):
continue
seed += 1
command[1] = str(seed)
print(command)
os.mkdir(str_dir_name+"/"+str(i))
with open(str_dir_name+"/"+str(i)+"/command.sh", "w") as infile:
infile.write(" ".join(command))
|
normal
|
{
"blob_id": "a826f33361ec59824f3c4a83d01e94c6b307b0a9",
"index": 9144,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-3": "<mask token>\ndefaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,\n 'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':\n 5000, 'FILTER_LENGTH': 'POP_SIZE'}\nconditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE': \n 0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {\n 'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {\n 'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]\nseed = 0\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-4": "import os\ndefaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,\n 'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':\n 5000, 'FILTER_LENGTH': 'POP_SIZE'}\nconditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE': \n 0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {\n 'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {\n 'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]\nseed = 0\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-5": "import os\n\n\n#defaults = {\"N\":20, \"K\":3, \"POP_SIZE\":200, \"MUT_RATE\":.05, \"TOURNAMENT_SIZE\":2, \"SELECTION\":0, \"CHANGE_RATE\":100000, \"MAX_GENS\": 5000, \"FILTER_LENGTH\":50}\ndefaults = {\"N\":20, \"K\":3, \"POP_SIZE\":200, \"MUT_RATE\":.05, \"TOURNAMENT_SIZE\":2, \"SELECTION\":0, \"CHANGE_RATE\":100000, \"MAX_GENS\": 5000, \"FILTER_LENGTH\":\"POP_SIZE\"}\nconditions = [{},{\"K\":10}, {\"N\":100, \"MUT_RATE\":.01}, {\"MUT_RATE\":.005}, {\"MUT_RATE\": .1}, {\"POP_SIZE\":20}, {\"POP_SIZE\":2000}, {\"SELECTION\":1}, {\"SELECTION\":1, \"FILTER_LENGTH\":1000}, {\"CHANGE_RATE\":500}, {\"CHANGE_RATE\":500, \"CHANGE_TYPE\":1}]\n\nseed = 0\n\nfor condition in conditions:\n print(condition)\n command = [\"./nk_oee -MODES_RESOLUTION 10 -SEED\", seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n\n command.append(\"-\"+var)\n dir_name.append(\"\".join(var.split(\"_\"))) # Underscores in variable names will screw up parsing later\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n\n \n str_dir_name = \"_\".join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n \n for i in range(30):\n if os.path.exists(str_dir_name+\"/\"+str(i)+\"/command.sh\"):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name+\"/\"+str(i))\n with open(str_dir_name+\"/\"+str(i)+\"/command.sh\", \"w\") as infile:\n infile.write(\" \".join(command))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
a = 1
b = a
print(a)
print(b)
a = 2
print(a)
print(b)
# 全部大写字符代表常量
USER_NAME = "常量"
print(USER_NAME)
print(USER_NAME)
|
normal
|
{
"blob_id": "1cc9a7bbe1bda06ce76fa8ec1cdc17c7b2fde73b",
"index": 4051,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(a)\nprint(b)\n<mask token>\nprint(a)\nprint(b)\n<mask token>\nprint(USER_NAME)\nprint(USER_NAME)\n",
"step-3": "a = 1\nb = a\nprint(a)\nprint(b)\na = 2\nprint(a)\nprint(b)\nUSER_NAME = '常量'\nprint(USER_NAME)\nprint(USER_NAME)\n",
"step-4": "\na = 1\nb = a\nprint(a)\nprint(b)\n\na = 2\nprint(a)\nprint(b)\n\n# 全部大写字符代表常量\n\nUSER_NAME = \"常量\"\nprint(USER_NAME)\n\nprint(USER_NAME)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import pickle
import select
import socket
import sys
from threading import Thread
from typing import Dict, Tuple
import pygame
from pygame.locals import *
import c
from models import *
class Game:
location: list[int, int] = [c.WIDTH / 2, c.HEIGHT / 2]
velocity: list[int, int] = [0, 0]
current_player: Player = None
other_players: Dict[str, Tuple[Player, Tuple[int, int]]] = {}
connection: socket.socket
font: pygame.font.Font
def __init__(self):
pygame.init()
self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.screen = pygame.display.set_mode((c.WIDTH, c.HEIGHT))
pygame.display.set_caption('Socket Game')
self.clock = pygame.time.Clock()
self.screen.fill('white')
self.font = pygame.font.SysFont(None, c.FONT_SIZE)
def start(self):
self.connect_to_server()
while True:
self.game_loop()
def connect_to_server(self):
self.connection.connect((c.HOST, c.PORT))
def listen_to_server(self):
ins, outs, ex = select.select([self.connection], [], [], 0)
for inm in ins:
received_data = inm.recv(c.BUFFSIZE)
event: Event = pickle.loads(received_data)
print("<<<", event)
if isinstance(event, CurrentPlayerEvent):
pygame.display.set_caption(f'Socket Game - {event.player.nickname}')
self.current_player = event.player
elif isinstance(event, PlayerDidMoveEvent):
self.update_player(event.player, event.location)
elif isinstance(event, PlayerJoinedEvent):
self.update_player(event.player)
def update_player(self, player: Player, location=(c.WIDTH / 2, c.HEIGHT / 2)):
self.other_players[player.nickname] = (player, location)
def update_server(self):
if self.current_player is not None:
self.connection.send(pickle.dumps(PlayerDidMoveEvent(self.current_player, (
self.location[0], self.location[1],
))))
def game_loop(self):
self.listen_to_server()
self.event_handling()
self.update_location()
self.render()
self.update_server()
self.clock.tick(60)
def update_location(self):
oldx, oldy = self.location
vx, vy = self.velocity
newx, newy = oldx + vx, oldy + vy
if newx > c.WIDTH - c.PLAYER_SIZE:
newx = c.WIDTH - c.PLAYER_SIZE
if newx < 0:
newx = 0
if newy > c.HEIGHT - c.PLAYER_SIZE:
newy = c.HEIGHT - c.PLAYER_SIZE
if newy < 0:
newy = 0
self.location = [newx, newy]
def render_player(self, player: Player, location: Tuple[int, int]):
x, y = location
img = self.font.render(player.nickname, True, player.color)
pygame.draw.rect(self.screen, player.color, (x, y, c.PLAYER_SIZE, c.PLAYER_SIZE))
self.screen.blit(img, (x, y - img.get_height()))
def render(self):
self.screen.fill((255, 255, 255))
if self.current_player is not None:
self.render_player(self.current_player, (self.location[0], self.location[1]))
for nickname, (player, location) in self.other_players.items():
self.render_player(player, location)
pygame.display.flip()
def event_handling(self):
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_LEFT: self.velocity[0] = -c.MOVEMENT_SPEED
if event.key == K_RIGHT: self.velocity[0] = c.MOVEMENT_SPEED
if event.key == K_UP: self.velocity[1] = -c.MOVEMENT_SPEED
if event.key == K_DOWN: self.velocity[1] = c.MOVEMENT_SPEED
if event.type == KEYUP:
if event.key == K_LEFT: self.velocity[0] = 0
if event.key == K_RIGHT: self.velocity[0] = 0
if event.key == K_UP: self.velocity[1] = 0
if event.key == K_DOWN: self.velocity[1] = 0
if __name__ == "__main__":
s = Game()
s.start()
|
normal
|
{
"blob_id": "418798369578e80ecbf82da802b23dc6ca922569",
"index": 7107,
"step-1": "<mask token>\n\n\nclass Game:\n location: list[int, int] = [c.WIDTH / 2, c.HEIGHT / 2]\n velocity: list[int, int] = [0, 0]\n current_player: Player = None\n other_players: Dict[str, Tuple[Player, Tuple[int, int]]] = {}\n connection: socket.socket\n font: pygame.font.Font\n <mask token>\n <mask token>\n\n def connect_to_server(self):\n self.connection.connect((c.HOST, c.PORT))\n\n def listen_to_server(self):\n ins, outs, ex = select.select([self.connection], [], [], 0)\n for inm in ins:\n received_data = inm.recv(c.BUFFSIZE)\n event: Event = pickle.loads(received_data)\n print('<<<', event)\n if isinstance(event, CurrentPlayerEvent):\n pygame.display.set_caption(\n f'Socket Game - {event.player.nickname}')\n self.current_player = event.player\n elif isinstance(event, PlayerDidMoveEvent):\n self.update_player(event.player, event.location)\n elif isinstance(event, PlayerJoinedEvent):\n self.update_player(event.player)\n\n def update_player(self, player: Player, location=(c.WIDTH / 2, c.HEIGHT /\n 2)):\n self.other_players[player.nickname] = player, location\n\n def update_server(self):\n if self.current_player is not None:\n self.connection.send(pickle.dumps(PlayerDidMoveEvent(self.\n current_player, (self.location[0], self.location[1]))))\n\n def game_loop(self):\n self.listen_to_server()\n self.event_handling()\n self.update_location()\n self.render()\n self.update_server()\n self.clock.tick(60)\n\n def update_location(self):\n oldx, oldy = self.location\n vx, vy = self.velocity\n newx, newy = oldx + vx, oldy + vy\n if newx > c.WIDTH - c.PLAYER_SIZE:\n newx = c.WIDTH - c.PLAYER_SIZE\n if newx < 0:\n newx = 0\n if newy > c.HEIGHT - c.PLAYER_SIZE:\n newy = c.HEIGHT - c.PLAYER_SIZE\n if newy < 0:\n newy = 0\n self.location = [newx, newy]\n\n def render_player(self, player: Player, location: Tuple[int, int]):\n x, y = location\n img = self.font.render(player.nickname, True, player.color)\n pygame.draw.rect(self.screen, player.color, (x, y, c.PLAYER_SIZE, c\n .PLAYER_SIZE))\n self.screen.blit(img, (x, y - img.get_height()))\n\n def render(self):\n self.screen.fill((255, 255, 255))\n if self.current_player is not None:\n self.render_player(self.current_player, (self.location[0], self\n .location[1]))\n for nickname, (player, location) in self.other_players.items():\n self.render_player(player, location)\n pygame.display.flip()\n\n def event_handling(self):\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n if event.type == KEYDOWN:\n if event.key == K_LEFT:\n self.velocity[0] = -c.MOVEMENT_SPEED\n if event.key == K_RIGHT:\n self.velocity[0] = c.MOVEMENT_SPEED\n if event.key == K_UP:\n self.velocity[1] = -c.MOVEMENT_SPEED\n if event.key == K_DOWN:\n self.velocity[1] = c.MOVEMENT_SPEED\n if event.type == KEYUP:\n if event.key == K_LEFT:\n self.velocity[0] = 0\n if event.key == K_RIGHT:\n self.velocity[0] = 0\n if event.key == K_UP:\n self.velocity[1] = 0\n if event.key == K_DOWN:\n self.velocity[1] = 0\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Game:\n location: list[int, int] = [c.WIDTH / 2, c.HEIGHT / 2]\n velocity: list[int, int] = [0, 0]\n current_player: Player = None\n other_players: Dict[str, Tuple[Player, Tuple[int, int]]] = {}\n connection: socket.socket\n font: pygame.font.Font\n\n def __init__(self):\n pygame.init()\n self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.screen = pygame.display.set_mode((c.WIDTH, c.HEIGHT))\n pygame.display.set_caption('Socket Game')\n self.clock = pygame.time.Clock()\n self.screen.fill('white')\n self.font = pygame.font.SysFont(None, c.FONT_SIZE)\n <mask token>\n\n def connect_to_server(self):\n self.connection.connect((c.HOST, c.PORT))\n\n def listen_to_server(self):\n ins, outs, ex = select.select([self.connection], [], [], 0)\n for inm in ins:\n received_data = inm.recv(c.BUFFSIZE)\n event: Event = pickle.loads(received_data)\n print('<<<', event)\n if isinstance(event, CurrentPlayerEvent):\n pygame.display.set_caption(\n f'Socket Game - {event.player.nickname}')\n self.current_player = event.player\n elif isinstance(event, PlayerDidMoveEvent):\n self.update_player(event.player, event.location)\n elif isinstance(event, PlayerJoinedEvent):\n self.update_player(event.player)\n\n def update_player(self, player: Player, location=(c.WIDTH / 2, c.HEIGHT /\n 2)):\n self.other_players[player.nickname] = player, location\n\n def update_server(self):\n if self.current_player is not None:\n self.connection.send(pickle.dumps(PlayerDidMoveEvent(self.\n current_player, (self.location[0], self.location[1]))))\n\n def game_loop(self):\n self.listen_to_server()\n self.event_handling()\n self.update_location()\n self.render()\n self.update_server()\n self.clock.tick(60)\n\n def update_location(self):\n oldx, oldy = self.location\n vx, vy = self.velocity\n newx, newy = oldx + vx, oldy + vy\n if newx > c.WIDTH - c.PLAYER_SIZE:\n newx = c.WIDTH - c.PLAYER_SIZE\n if newx < 0:\n newx = 0\n if newy > c.HEIGHT - c.PLAYER_SIZE:\n newy = c.HEIGHT - c.PLAYER_SIZE\n if newy < 0:\n newy = 0\n self.location = [newx, newy]\n\n def render_player(self, player: Player, location: Tuple[int, int]):\n x, y = location\n img = self.font.render(player.nickname, True, player.color)\n pygame.draw.rect(self.screen, player.color, (x, y, c.PLAYER_SIZE, c\n .PLAYER_SIZE))\n self.screen.blit(img, (x, y - img.get_height()))\n\n def render(self):\n self.screen.fill((255, 255, 255))\n if self.current_player is not None:\n self.render_player(self.current_player, (self.location[0], self\n .location[1]))\n for nickname, (player, location) in self.other_players.items():\n self.render_player(player, location)\n pygame.display.flip()\n\n def event_handling(self):\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n if event.type == KEYDOWN:\n if event.key == K_LEFT:\n self.velocity[0] = -c.MOVEMENT_SPEED\n if event.key == K_RIGHT:\n self.velocity[0] = c.MOVEMENT_SPEED\n if event.key == K_UP:\n self.velocity[1] = -c.MOVEMENT_SPEED\n if event.key == K_DOWN:\n self.velocity[1] = c.MOVEMENT_SPEED\n if event.type == KEYUP:\n if event.key == K_LEFT:\n self.velocity[0] = 0\n if event.key == K_RIGHT:\n self.velocity[0] = 0\n if event.key == K_UP:\n self.velocity[1] = 0\n if event.key == K_DOWN:\n self.velocity[1] = 0\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Game:\n location: list[int, int] = [c.WIDTH / 2, c.HEIGHT / 2]\n velocity: list[int, int] = [0, 0]\n current_player: Player = None\n other_players: Dict[str, Tuple[Player, Tuple[int, int]]] = {}\n connection: socket.socket\n font: pygame.font.Font\n\n def __init__(self):\n pygame.init()\n self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.screen = pygame.display.set_mode((c.WIDTH, c.HEIGHT))\n pygame.display.set_caption('Socket Game')\n self.clock = pygame.time.Clock()\n self.screen.fill('white')\n self.font = pygame.font.SysFont(None, c.FONT_SIZE)\n\n def start(self):\n self.connect_to_server()\n while True:\n self.game_loop()\n\n def connect_to_server(self):\n self.connection.connect((c.HOST, c.PORT))\n\n def listen_to_server(self):\n ins, outs, ex = select.select([self.connection], [], [], 0)\n for inm in ins:\n received_data = inm.recv(c.BUFFSIZE)\n event: Event = pickle.loads(received_data)\n print('<<<', event)\n if isinstance(event, CurrentPlayerEvent):\n pygame.display.set_caption(\n f'Socket Game - {event.player.nickname}')\n self.current_player = event.player\n elif isinstance(event, PlayerDidMoveEvent):\n self.update_player(event.player, event.location)\n elif isinstance(event, PlayerJoinedEvent):\n self.update_player(event.player)\n\n def update_player(self, player: Player, location=(c.WIDTH / 2, c.HEIGHT /\n 2)):\n self.other_players[player.nickname] = player, location\n\n def update_server(self):\n if self.current_player is not None:\n self.connection.send(pickle.dumps(PlayerDidMoveEvent(self.\n current_player, (self.location[0], self.location[1]))))\n\n def game_loop(self):\n self.listen_to_server()\n self.event_handling()\n self.update_location()\n self.render()\n self.update_server()\n self.clock.tick(60)\n\n def update_location(self):\n oldx, oldy = self.location\n vx, vy = self.velocity\n newx, newy = oldx + vx, oldy + vy\n if newx > c.WIDTH - c.PLAYER_SIZE:\n newx = c.WIDTH - c.PLAYER_SIZE\n if newx < 0:\n newx = 0\n if newy > c.HEIGHT - c.PLAYER_SIZE:\n newy = c.HEIGHT - c.PLAYER_SIZE\n if newy < 0:\n newy = 0\n self.location = [newx, newy]\n\n def render_player(self, player: Player, location: Tuple[int, int]):\n x, y = location\n img = self.font.render(player.nickname, True, player.color)\n pygame.draw.rect(self.screen, player.color, (x, y, c.PLAYER_SIZE, c\n .PLAYER_SIZE))\n self.screen.blit(img, (x, y - img.get_height()))\n\n def render(self):\n self.screen.fill((255, 255, 255))\n if self.current_player is not None:\n self.render_player(self.current_player, (self.location[0], self\n .location[1]))\n for nickname, (player, location) in self.other_players.items():\n self.render_player(player, location)\n pygame.display.flip()\n\n def event_handling(self):\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n if event.type == KEYDOWN:\n if event.key == K_LEFT:\n self.velocity[0] = -c.MOVEMENT_SPEED\n if event.key == K_RIGHT:\n self.velocity[0] = c.MOVEMENT_SPEED\n if event.key == K_UP:\n self.velocity[1] = -c.MOVEMENT_SPEED\n if event.key == K_DOWN:\n self.velocity[1] = c.MOVEMENT_SPEED\n if event.type == KEYUP:\n if event.key == K_LEFT:\n self.velocity[0] = 0\n if event.key == K_RIGHT:\n self.velocity[0] = 0\n if event.key == K_UP:\n self.velocity[1] = 0\n if event.key == K_DOWN:\n self.velocity[1] = 0\n\n\n<mask token>\n",
"step-4": "import pickle\nimport select\nimport socket\nimport sys\nfrom threading import Thread\nfrom typing import Dict, Tuple\nimport pygame\nfrom pygame.locals import *\nimport c\nfrom models import *\n\n\nclass Game:\n location: list[int, int] = [c.WIDTH / 2, c.HEIGHT / 2]\n velocity: list[int, int] = [0, 0]\n current_player: Player = None\n other_players: Dict[str, Tuple[Player, Tuple[int, int]]] = {}\n connection: socket.socket\n font: pygame.font.Font\n\n def __init__(self):\n pygame.init()\n self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.screen = pygame.display.set_mode((c.WIDTH, c.HEIGHT))\n pygame.display.set_caption('Socket Game')\n self.clock = pygame.time.Clock()\n self.screen.fill('white')\n self.font = pygame.font.SysFont(None, c.FONT_SIZE)\n\n def start(self):\n self.connect_to_server()\n while True:\n self.game_loop()\n\n def connect_to_server(self):\n self.connection.connect((c.HOST, c.PORT))\n\n def listen_to_server(self):\n ins, outs, ex = select.select([self.connection], [], [], 0)\n for inm in ins:\n received_data = inm.recv(c.BUFFSIZE)\n event: Event = pickle.loads(received_data)\n print('<<<', event)\n if isinstance(event, CurrentPlayerEvent):\n pygame.display.set_caption(\n f'Socket Game - {event.player.nickname}')\n self.current_player = event.player\n elif isinstance(event, PlayerDidMoveEvent):\n self.update_player(event.player, event.location)\n elif isinstance(event, PlayerJoinedEvent):\n self.update_player(event.player)\n\n def update_player(self, player: Player, location=(c.WIDTH / 2, c.HEIGHT /\n 2)):\n self.other_players[player.nickname] = player, location\n\n def update_server(self):\n if self.current_player is not None:\n self.connection.send(pickle.dumps(PlayerDidMoveEvent(self.\n current_player, (self.location[0], self.location[1]))))\n\n def game_loop(self):\n self.listen_to_server()\n self.event_handling()\n self.update_location()\n self.render()\n self.update_server()\n self.clock.tick(60)\n\n def update_location(self):\n oldx, oldy = self.location\n vx, vy = self.velocity\n newx, newy = oldx + vx, oldy + vy\n if newx > c.WIDTH - c.PLAYER_SIZE:\n newx = c.WIDTH - c.PLAYER_SIZE\n if newx < 0:\n newx = 0\n if newy > c.HEIGHT - c.PLAYER_SIZE:\n newy = c.HEIGHT - c.PLAYER_SIZE\n if newy < 0:\n newy = 0\n self.location = [newx, newy]\n\n def render_player(self, player: Player, location: Tuple[int, int]):\n x, y = location\n img = self.font.render(player.nickname, True, player.color)\n pygame.draw.rect(self.screen, player.color, (x, y, c.PLAYER_SIZE, c\n .PLAYER_SIZE))\n self.screen.blit(img, (x, y - img.get_height()))\n\n def render(self):\n self.screen.fill((255, 255, 255))\n if self.current_player is not None:\n self.render_player(self.current_player, (self.location[0], self\n .location[1]))\n for nickname, (player, location) in self.other_players.items():\n self.render_player(player, location)\n pygame.display.flip()\n\n def event_handling(self):\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n if event.type == KEYDOWN:\n if event.key == K_LEFT:\n self.velocity[0] = -c.MOVEMENT_SPEED\n if event.key == K_RIGHT:\n self.velocity[0] = c.MOVEMENT_SPEED\n if event.key == K_UP:\n self.velocity[1] = -c.MOVEMENT_SPEED\n if event.key == K_DOWN:\n self.velocity[1] = c.MOVEMENT_SPEED\n if event.type == KEYUP:\n if event.key == K_LEFT:\n self.velocity[0] = 0\n if event.key == K_RIGHT:\n self.velocity[0] = 0\n if event.key == K_UP:\n self.velocity[1] = 0\n if event.key == K_DOWN:\n self.velocity[1] = 0\n\n\nif __name__ == '__main__':\n s = Game()\n s.start()\n",
"step-5": "import pickle\nimport select\nimport socket\nimport sys\nfrom threading import Thread\nfrom typing import Dict, Tuple\n\nimport pygame\nfrom pygame.locals import *\n\nimport c\nfrom models import *\n\n\nclass Game:\n location: list[int, int] = [c.WIDTH / 2, c.HEIGHT / 2]\n velocity: list[int, int] = [0, 0]\n current_player: Player = None\n other_players: Dict[str, Tuple[Player, Tuple[int, int]]] = {}\n connection: socket.socket\n font: pygame.font.Font\n\n def __init__(self):\n pygame.init()\n self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.screen = pygame.display.set_mode((c.WIDTH, c.HEIGHT))\n pygame.display.set_caption('Socket Game')\n self.clock = pygame.time.Clock()\n self.screen.fill('white')\n self.font = pygame.font.SysFont(None, c.FONT_SIZE)\n\n def start(self):\n self.connect_to_server()\n while True:\n self.game_loop()\n\n def connect_to_server(self):\n self.connection.connect((c.HOST, c.PORT))\n\n def listen_to_server(self):\n ins, outs, ex = select.select([self.connection], [], [], 0)\n for inm in ins:\n received_data = inm.recv(c.BUFFSIZE)\n event: Event = pickle.loads(received_data)\n print(\"<<<\", event)\n if isinstance(event, CurrentPlayerEvent):\n pygame.display.set_caption(f'Socket Game - {event.player.nickname}')\n self.current_player = event.player\n elif isinstance(event, PlayerDidMoveEvent):\n self.update_player(event.player, event.location)\n elif isinstance(event, PlayerJoinedEvent):\n self.update_player(event.player)\n\n def update_player(self, player: Player, location=(c.WIDTH / 2, c.HEIGHT / 2)):\n self.other_players[player.nickname] = (player, location)\n\n def update_server(self):\n if self.current_player is not None:\n self.connection.send(pickle.dumps(PlayerDidMoveEvent(self.current_player, (\n self.location[0], self.location[1],\n ))))\n\n def game_loop(self):\n self.listen_to_server()\n self.event_handling()\n self.update_location()\n self.render()\n self.update_server()\n self.clock.tick(60)\n\n def update_location(self):\n oldx, oldy = self.location\n vx, vy = self.velocity\n newx, newy = oldx + vx, oldy + vy\n if newx > c.WIDTH - c.PLAYER_SIZE:\n newx = c.WIDTH - c.PLAYER_SIZE\n if newx < 0:\n newx = 0\n\n if newy > c.HEIGHT - c.PLAYER_SIZE:\n newy = c.HEIGHT - c.PLAYER_SIZE\n if newy < 0:\n newy = 0\n\n self.location = [newx, newy]\n\n def render_player(self, player: Player, location: Tuple[int, int]):\n x, y = location\n img = self.font.render(player.nickname, True, player.color)\n pygame.draw.rect(self.screen, player.color, (x, y, c.PLAYER_SIZE, c.PLAYER_SIZE))\n self.screen.blit(img, (x, y - img.get_height()))\n\n def render(self):\n self.screen.fill((255, 255, 255))\n if self.current_player is not None:\n self.render_player(self.current_player, (self.location[0], self.location[1]))\n for nickname, (player, location) in self.other_players.items():\n self.render_player(player, location)\n\n pygame.display.flip()\n\n def event_handling(self):\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n if event.type == KEYDOWN:\n if event.key == K_LEFT: self.velocity[0] = -c.MOVEMENT_SPEED\n if event.key == K_RIGHT: self.velocity[0] = c.MOVEMENT_SPEED\n if event.key == K_UP: self.velocity[1] = -c.MOVEMENT_SPEED\n if event.key == K_DOWN: self.velocity[1] = c.MOVEMENT_SPEED\n if event.type == KEYUP:\n if event.key == K_LEFT: self.velocity[0] = 0\n if event.key == K_RIGHT: self.velocity[0] = 0\n if event.key == K_UP: self.velocity[1] = 0\n if event.key == K_DOWN: self.velocity[1] = 0\n\n\nif __name__ == \"__main__\":\n s = Game()\n s.start()\n",
"step-ids": [
10,
11,
12,
14,
15
]
}
|
[
10,
11,
12,
14,
15
] |
# vim:sw=4 ts=4 et:
# Copyright (c) 2015 Torchbox Ltd.
# [email protected] 2017-12-07
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely. This software is provided 'as-is', without any express or implied
# warranty.
#
from django import forms
from .utils import render_markdown
from .widgets import MarkdownTextarea
try:
from wagtail.core.blocks import TextBlock
except ImportError:
from wagtail.wagtailcore.blocks import TextBlock
class MarkdownBlock(TextBlock):
def __init__(self, required=True, help_text=None, **kwargs):
self.field = forms.CharField(
required=required, help_text=help_text, widget=MarkdownTextarea()
)
super(MarkdownBlock, self).__init__(**kwargs)
def render_basic(self, value, context=None):
return render_markdown(value, context)
|
normal
|
{
"blob_id": "6f271e6cfb03977d52c50562c3c394b962c9af83",
"index": 7538,
"step-1": "<mask token>\n\n\nclass MarkdownBlock(TextBlock):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MarkdownBlock(TextBlock):\n\n def __init__(self, required=True, help_text=None, **kwargs):\n self.field = forms.CharField(required=required, help_text=help_text,\n widget=MarkdownTextarea())\n super(MarkdownBlock, self).__init__(**kwargs)\n\n def render_basic(self, value, context=None):\n return render_markdown(value, context)\n",
"step-3": "<mask token>\ntry:\n from wagtail.core.blocks import TextBlock\nexcept ImportError:\n from wagtail.wagtailcore.blocks import TextBlock\n\n\nclass MarkdownBlock(TextBlock):\n\n def __init__(self, required=True, help_text=None, **kwargs):\n self.field = forms.CharField(required=required, help_text=help_text,\n widget=MarkdownTextarea())\n super(MarkdownBlock, self).__init__(**kwargs)\n\n def render_basic(self, value, context=None):\n return render_markdown(value, context)\n",
"step-4": "from django import forms\nfrom .utils import render_markdown\nfrom .widgets import MarkdownTextarea\ntry:\n from wagtail.core.blocks import TextBlock\nexcept ImportError:\n from wagtail.wagtailcore.blocks import TextBlock\n\n\nclass MarkdownBlock(TextBlock):\n\n def __init__(self, required=True, help_text=None, **kwargs):\n self.field = forms.CharField(required=required, help_text=help_text,\n widget=MarkdownTextarea())\n super(MarkdownBlock, self).__init__(**kwargs)\n\n def render_basic(self, value, context=None):\n return render_markdown(value, context)\n",
"step-5": "# vim:sw=4 ts=4 et:\n# Copyright (c) 2015 Torchbox Ltd.\n# [email protected] 2017-12-07\n#\n# Permission is granted to anyone to use this software for any purpose,\n# including commercial applications, and to alter it and redistribute it\n# freely. This software is provided 'as-is', without any express or implied\n# warranty.\n#\nfrom django import forms\n\nfrom .utils import render_markdown\nfrom .widgets import MarkdownTextarea\n\ntry:\n from wagtail.core.blocks import TextBlock\nexcept ImportError:\n from wagtail.wagtailcore.blocks import TextBlock\n\n\nclass MarkdownBlock(TextBlock):\n def __init__(self, required=True, help_text=None, **kwargs):\n self.field = forms.CharField(\n required=required, help_text=help_text, widget=MarkdownTextarea()\n )\n super(MarkdownBlock, self).__init__(**kwargs)\n\n def render_basic(self, value, context=None):\n return render_markdown(value, context)\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
from Adafruit_LSM9DS0 import Adafruit_LSM9DS0
import math
imu = Adafruit_LSM9DS0()
pi = 3.14159265358979323846 # Written here to increase performance/ speed
r2d = 57.2957795 # 1 radian in degrees
loop = 0.05 #
tuning = 0.98 # Constant for tuning Complimentary filter
# Converting accelerometer readings to degrees
ax = #x
ay = #y
az = #z
xAngle = math.atan( ax / ( math.sqrt( ay**2 + az**2 )))
yAngle = math.atan( ay / ( math.sqrt( ax**2 + az**2 )))
zAngle = math.atan( sqrt( ax**2 + ay**2 ) / az)
|
normal
|
{
"blob_id": "973a58013160cbc71ca46f570bde61eaff87f6a7",
"index": 7489,
"step-1": "from Adafruit_LSM9DS0 import Adafruit_LSM9DS0\nimport math\n\nimu = Adafruit_LSM9DS0()\n\npi = 3.14159265358979323846 # Written here to increase performance/ speed\nr2d = 57.2957795 # 1 radian in degrees\nloop = 0.05 #\ntuning = 0.98 # Constant for tuning Complimentary filter\n\n# Converting accelerometer readings to degrees\nax = #x\nay = #y\naz = #z\n\n xAngle = math.atan( ax / ( math.sqrt( ay**2 + az**2 )))\n yAngle = math.atan( ay / ( math.sqrt( ax**2 + az**2 )))\n zAngle = math.atan( sqrt( ax**2 + ay**2 ) / az)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import sys
import os
def main():
parser = argparse.ArgumentParser(
description='Create the symbol specifying the location of test fixtures.')
parser.add_argument('--fixtures_location_file', type=str, required=True)
parser.add_argument('--fixtures_location', type=str, required=True)
args = parser.parse_args()
with open(args.fixtures_location_file, 'w') as file:
file.write('namespace flutter {namespace testing {const char* GetFixturesPath() {return "%s";}}}'
% args.fixtures_location)
if __name__ == '__main__':
sys.exit(main())
|
normal
|
{
"blob_id": "d5c6582547df540ffc9c73d10a3405ec97487bba",
"index": 4513,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Create the symbol specifying the location of test fixtures.')\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n args = parser.parse_args()\n with open(args.fixtures_location_file, 'w') as file:\n file.write(\n 'namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Create the symbol specifying the location of test fixtures.')\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n args = parser.parse_args()\n with open(args.fixtures_location_file, 'w') as file:\n file.write(\n 'namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-4": "import argparse\nimport subprocess\nimport sys\nimport os\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Create the symbol specifying the location of test fixtures.')\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n args = parser.parse_args()\n with open(args.fixtures_location_file, 'w') as file:\n file.write(\n 'namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-5": "#!/usr/bin/env python\n# Copyright 2013 The Flutter Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\nimport argparse\nimport subprocess\nimport sys\nimport os\n\n\ndef main():\n parser = argparse.ArgumentParser(\n description='Create the symbol specifying the location of test fixtures.')\n\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n\n args = parser.parse_args()\n\n with open(args.fixtures_location_file, 'w') as file:\n file.write('namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import Context
from books.models import book,Author
def index(request):
book_list=book.objects.all()
c=Context({"book_list":book_list})
return render_to_response("index.html",c)
|
normal
|
{
"blob_id": "441d224c37e0eae531c17db0e903b3344c570516",
"index": 9867,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef index(request):\n book_list = book.objects.all()\n c = Context({'book_list': book_list})\n return render_to_response('index.html', c)\n",
"step-3": "from django.http import HttpResponse\nfrom django.shortcuts import render_to_response\nfrom django.template import Context\nfrom books.models import book, Author\n\n\ndef index(request):\n book_list = book.objects.all()\n c = Context({'book_list': book_list})\n return render_to_response('index.html', c)\n",
"step-4": "from django.http import HttpResponse\nfrom django.shortcuts import render_to_response\nfrom django.template import Context\nfrom books.models import book,Author\ndef index(request):\n book_list=book.objects.all()\n c=Context({\"book_list\":book_list})\n return render_to_response(\"index.html\",c)\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# pylint: disable=W0621,C0114,C0116,W0212,W0613
import io
import textwrap
from typing import cast, Any, Dict
import toml
import pytest
from dae.testing import convert_to_tab_separated
from dae.configuration.gpf_config_parser import GPFConfigParser
from dae.configuration.schemas.person_sets import person_set_collections_schema
from dae.pedigrees.loader import FamiliesLoader
from dae.person_sets import PersonSetCollection
from impala_storage.schema1.impala_variants import ImpalaVariants
@pytest.fixture
def families_fixture():
ped_content = io.StringIO(convert_to_tab_separated(
"""
familyId personId dadId momId sex status role
f1 mom1 0 0 2 1 mom
f1 dad1 0 0 1 1 dad
f1 prb1 dad1 mom1 1 2 prb
f1 sib1 dad1 mom1 2 2 sib
f1 sib2 dad1 mom1 2 2 sib
f2 grmom2 0 0 2 0 maternal_grandmother
f2 grdad2 0 0 1 0 maternal_grandfather
f2 mom2 grdad2 grmom2 2 1 mom
f2 dad2 0 0 1 1 dad
f2 prb2 dad2 mom2 1 2 prb
f2 sib2_3 dad2 mom2 2 2 sib
"""))
families = FamiliesLoader(ped_content).load()
assert families is not None
return families
def get_person_set_collections_config(content: str):
return GPFConfigParser.process_config(
cast(Dict[str, Any], toml.loads(content)),
{"person_set_collections": person_set_collections_schema},
).person_set_collections
@pytest.fixture
def status_collection(families_fixture):
content = textwrap.dedent(
"""
[person_set_collections]
selected_person_set_collections = ["status"]
status.id = "status"
status.name = "Affected Status"
status.sources = [{ from = "pedigree", source = "status" }]
status.domain = [
{
id = "affected",
name = "Affected",
values = ["affected"],
color = "#aabbcc"
},
{
id = "unaffected",
name = "Unaffected",
values = ["unaffected"],
color = "#ffffff"
},
]
status.default = {id = "unknown",name = "Unknown",color = "#aaaaaa"}
""")
config = get_person_set_collections_config(content)
collection = PersonSetCollection.from_families(
config.status, families_fixture)
return collection
def test_status_person_set_collection(status_collection):
assert status_collection is not None
psc = status_collection
assert len(psc.person_sets) == 3
assert len(psc.person_sets["unknown"].persons) == 2
assert len(psc.person_sets["affected"].persons) == 5
assert len(psc.person_sets["unaffected"].persons) == 4
def test_status_person_set_collection_all_selected(
status_collection):
query = ImpalaVariants.build_person_set_collection_query(
status_collection,
("status", {"affected", "unaffected", "unknown"})
)
assert query == ()
def test_status_person_set_collection_some_selected_no_default(
status_collection):
query = ImpalaVariants.build_person_set_collection_query(
status_collection,
("status", {"affected"})
)
assert query == ([{"status": "affected"}], [])
def test_status_person_set_collection_some_selected_and_default(
status_collection):
query = ImpalaVariants.build_person_set_collection_query(
status_collection,
("status", {"affected", "unknown"})
)
assert query == ([], [{"status": "unaffected"}])
@pytest.fixture
def status_sex_collection(families_fixture):
config = get_person_set_collections_config(textwrap.dedent("""
[person_set_collections]
selected_person_set_collections = ["status_sex"]
status_sex.id = "status_sex"
status_sex.name = "Affected Status and Sex"
status_sex.sources = [
{ from = "pedigree", source = "status" },
{ from = "pedigree", source = "sex" },
]
status_sex.domain = [
{ id = "affected_male", name = "Affected Male",
values = ["affected", "M"], color = "#ffffff" },
{ id = "affected_female", name = "Affected Female",
values = ["affected", "F"], color = "#ffffff" },
{ id = "unaffected_male", name = "Unaffected Male",
values = ["unaffected", "M"], color = "#ffffff" },
{ id = "unaffected_female", name = "Unaffected Female",
values = ["unaffected", "F"], color = "#ffffff" },
]
status_sex.default = { id="other", name="Other", color="#aaaaaa"}
"""))
return PersonSetCollection.from_families(
config.status_sex, families_fixture
)
def test_status_sex_person_set_collection_all_selected(
status_sex_collection):
query = ImpalaVariants.build_person_set_collection_query(
status_sex_collection,
("status_sex", {
"affected_male", "affected_female",
"unaffected_male", "unaffected_female",
"other"})
)
assert query == ()
def test_status_sex_person_set_collection_some_selected_no_default(
status_sex_collection):
query = ImpalaVariants.build_person_set_collection_query(
status_sex_collection,
("status_sex", {
"affected_male", "affected_female"})
)
assert query == (
[
{"sex": "F", "status": "affected"},
{"sex": "M", "status": "affected"},
], [])
query = ImpalaVariants.build_person_set_collection_query(
status_sex_collection,
("status_sex", {
"unaffected_male", "unaffected_female"})
)
assert query == (
[
{"sex": "F", "status": "unaffected"},
{"sex": "M", "status": "unaffected"}
], [])
query = ImpalaVariants.build_person_set_collection_query(
status_sex_collection,
("status_sex", {
"affected_male", "unaffected_female"})
)
assert query == ([
{"sex": "M", "status": "affected"},
{"sex": "F", "status": "unaffected"},
], [])
def test_status_sex_person_set_collection_some_selected_with_default(
status_sex_collection):
query = ImpalaVariants.build_person_set_collection_query(
status_sex_collection,
("status_sex", {
"affected_male", "affected_female", "other"})
)
assert query == ([], [
{"sex": "F", "status": "unaffected"},
{"sex": "M", "status": "unaffected"},
])
query = ImpalaVariants.build_person_set_collection_query(
status_sex_collection,
("status_sex", {
"unaffected_male", "unaffected_female", "other"}))
assert query == ([], [
{"sex": "F", "status": "affected"},
{"sex": "M", "status": "affected"},
])
query = ImpalaVariants.build_person_set_collection_query(
status_sex_collection,
("status_sex", {
"affected_male", "unaffected_female", "other"})
)
assert query == ([], [
{"sex": "F", "status": "affected"},
{"sex": "M", "status": "unaffected"},
])
|
normal
|
{
"blob_id": "6c8f690e1b43d459535238e24cccc8aa118e2d57",
"index": 3038,
"step-1": "<mask token>\n\n\[email protected]\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\n<mask token>\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\n<mask token>\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\[email protected]\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\n<mask token>\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n",
"step-2": "<mask token>\n\n\[email protected]\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\n<mask token>\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\n<mask token>\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\[email protected]\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\n<mask token>\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female'}))\n assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F',\n 'status': 'unaffected'}], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n",
"step-3": "<mask token>\n\n\[email protected]\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\[email protected]\ndef status_collection(families_fixture):\n content = textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status\"]\n status.id = \"status\"\n status.name = \"Affected Status\"\n status.sources = [{ from = \"pedigree\", source = \"status\" }]\n status.domain = [\n {\n id = \"affected\",\n name = \"Affected\",\n values = [\"affected\"],\n color = \"#aabbcc\"\n },\n {\n id = \"unaffected\",\n name = \"Unaffected\",\n values = [\"unaffected\"],\n color = \"#ffffff\"\n },\n ]\n status.default = {id = \"unknown\",name = \"Unknown\",color = \"#aaaaaa\"}\n\n \"\"\"\n )\n config = get_person_set_collections_config(content)\n collection = PersonSetCollection.from_families(config.status,\n families_fixture)\n return collection\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\ndef test_status_person_set_collection_some_selected_no_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected'}))\n assert query == ([{'status': 'affected'}], [])\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\[email protected]\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\n<mask token>\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female'}))\n assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F',\n 'status': 'unaffected'}], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n",
"step-4": "<mask token>\n\n\[email protected]\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\[email protected]\ndef status_collection(families_fixture):\n content = textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status\"]\n status.id = \"status\"\n status.name = \"Affected Status\"\n status.sources = [{ from = \"pedigree\", source = \"status\" }]\n status.domain = [\n {\n id = \"affected\",\n name = \"Affected\",\n values = [\"affected\"],\n color = \"#aabbcc\"\n },\n {\n id = \"unaffected\",\n name = \"Unaffected\",\n values = [\"unaffected\"],\n color = \"#ffffff\"\n },\n ]\n status.default = {id = \"unknown\",name = \"Unknown\",color = \"#aaaaaa\"}\n\n \"\"\"\n )\n config = get_person_set_collections_config(content)\n collection = PersonSetCollection.from_families(config.status,\n families_fixture)\n return collection\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\ndef test_status_person_set_collection_some_selected_no_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected'}))\n assert query == ([{'status': 'affected'}], [])\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\[email protected]\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\ndef test_status_sex_person_set_collection_all_selected(status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'unaffected_male', 'unaffected_female', 'other'}))\n assert query == ()\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female'}))\n assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F',\n 'status': 'unaffected'}], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n",
"step-5": "# pylint: disable=W0621,C0114,C0116,W0212,W0613\nimport io\nimport textwrap\nfrom typing import cast, Any, Dict\n\nimport toml\nimport pytest\n\nfrom dae.testing import convert_to_tab_separated\nfrom dae.configuration.gpf_config_parser import GPFConfigParser\nfrom dae.configuration.schemas.person_sets import person_set_collections_schema\nfrom dae.pedigrees.loader import FamiliesLoader\nfrom dae.person_sets import PersonSetCollection\n\nfrom impala_storage.schema1.impala_variants import ImpalaVariants\n\n\[email protected]\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(\n cast(Dict[str, Any], toml.loads(content)),\n {\"person_set_collections\": person_set_collections_schema},\n ).person_set_collections\n\n\[email protected]\ndef status_collection(families_fixture):\n content = textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status\"]\n status.id = \"status\"\n status.name = \"Affected Status\"\n status.sources = [{ from = \"pedigree\", source = \"status\" }]\n status.domain = [\n {\n id = \"affected\",\n name = \"Affected\",\n values = [\"affected\"],\n color = \"#aabbcc\"\n },\n {\n id = \"unaffected\",\n name = \"Unaffected\",\n values = [\"unaffected\"],\n color = \"#ffffff\"\n },\n ]\n status.default = {id = \"unknown\",name = \"Unknown\",color = \"#aaaaaa\"}\n\n \"\"\")\n\n config = get_person_set_collections_config(content)\n\n collection = PersonSetCollection.from_families(\n config.status, families_fixture)\n return collection\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets[\"unknown\"].persons) == 2\n assert len(psc.person_sets[\"affected\"].persons) == 5\n assert len(psc.person_sets[\"unaffected\"].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(\n status_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_collection,\n (\"status\", {\"affected\", \"unaffected\", \"unknown\"})\n )\n\n assert query == ()\n\n\ndef test_status_person_set_collection_some_selected_no_default(\n status_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_collection,\n (\"status\", {\"affected\"})\n )\n\n assert query == ([{\"status\": \"affected\"}], [])\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_collection,\n (\"status\", {\"affected\", \"unknown\"})\n )\n\n assert query == ([], [{\"status\": \"unaffected\"}])\n\n\[email protected]\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"))\n\n return PersonSetCollection.from_families(\n config.status_sex, families_fixture\n )\n\n\ndef test_status_sex_person_set_collection_all_selected(\n status_sex_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"affected_female\",\n \"unaffected_male\", \"unaffected_female\",\n \"other\"})\n )\n\n assert query == ()\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"affected_female\"})\n )\n\n assert query == (\n [\n {\"sex\": \"F\", \"status\": \"affected\"},\n {\"sex\": \"M\", \"status\": \"affected\"},\n ], [])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"unaffected_male\", \"unaffected_female\"})\n )\n\n assert query == (\n [\n {\"sex\": \"F\", \"status\": \"unaffected\"},\n {\"sex\": \"M\", \"status\": \"unaffected\"}\n ], [])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"unaffected_female\"})\n )\n\n assert query == ([\n {\"sex\": \"M\", \"status\": \"affected\"},\n {\"sex\": \"F\", \"status\": \"unaffected\"},\n ], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"affected_female\", \"other\"})\n )\n\n assert query == ([], [\n {\"sex\": \"F\", \"status\": \"unaffected\"},\n {\"sex\": \"M\", \"status\": \"unaffected\"},\n ])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"unaffected_male\", \"unaffected_female\", \"other\"}))\n\n assert query == ([], [\n {\"sex\": \"F\", \"status\": \"affected\"},\n {\"sex\": \"M\", \"status\": \"affected\"},\n ])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"unaffected_female\", \"other\"})\n )\n\n assert query == ([], [\n {\"sex\": \"F\", \"status\": \"affected\"},\n {\"sex\": \"M\", \"status\": \"unaffected\"},\n ])\n",
"step-ids": [
7,
8,
10,
11,
13
]
}
|
[
7,
8,
10,
11,
13
] |
from sqlalchemy import (Column, Integer, Float, String, ForeignKey)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship
from .meta import Base, BaseModel
class Stock(Base, BaseModel):
__tablename__ = 'stock'
name = Column(String(255), nullable=False)
starting_price = Column(Float, nullable=False)
current_price = Column(Float, nullable=False)
max_price = Column(Float, nullable=True)
min_price = Column(Float, nullable=True)
starting_stock = Column(Integer, nullable=True)
current_stock = Column(Integer, nullable=True)
stock_type_id = Column(UUID(as_uuid=True), ForeignKey('stock_type.id'))
stock_type = relationship('StockType', back_ref='stocks')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {
"id": self.id,
"name": self.name,
"starting_price": self.starting_price,
"current_price": self.current_price,
"max_price": self.max_price,
"min_price": self.min_price,
"starting_stock": self.starting_stock,
"current_stock": self.current_stock
}
class StockType(Base, BaseModel):
__tablename__ = 'stock_type'
name = Column(String(255), nullable=False)
stocks = relationship('Stock', back_ref='stock_type')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {
"id": self.id,
"name": self.name
}
|
normal
|
{
"blob_id": "7251d32918b16166e9b7c9613726e6dc51d6fea4",
"index": 3834,
"step-1": "<mask token>\n\n\nclass StockType(Base, BaseModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-2": "<mask token>\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-3": "<mask token>\n\n\nclass Stock(Base, BaseModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name, 'starting_price': self.\n starting_price, 'current_price': self.current_price,\n 'max_price': self.max_price, 'min_price': self.min_price,\n 'starting_stock': self.starting_stock, 'current_stock': self.\n current_stock}\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-4": "<mask token>\n\n\nclass Stock(Base, BaseModel):\n __tablename__ = 'stock'\n name = Column(String(255), nullable=False)\n starting_price = Column(Float, nullable=False)\n current_price = Column(Float, nullable=False)\n max_price = Column(Float, nullable=True)\n min_price = Column(Float, nullable=True)\n starting_stock = Column(Integer, nullable=True)\n current_stock = Column(Integer, nullable=True)\n stock_type_id = Column(UUID(as_uuid=True), ForeignKey('stock_type.id'))\n stock_type = relationship('StockType', back_ref='stocks')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name, 'starting_price': self.\n starting_price, 'current_price': self.current_price,\n 'max_price': self.max_price, 'min_price': self.min_price,\n 'starting_stock': self.starting_stock, 'current_stock': self.\n current_stock}\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-5": "from sqlalchemy import (Column, Integer, Float, String, ForeignKey)\nfrom sqlalchemy.dialects.postgresql import UUID\nfrom sqlalchemy.orm import relationship\n\nfrom .meta import Base, BaseModel\n\n\nclass Stock(Base, BaseModel):\n __tablename__ = 'stock'\n\n name = Column(String(255), nullable=False)\n starting_price = Column(Float, nullable=False)\n current_price = Column(Float, nullable=False)\n max_price = Column(Float, nullable=True)\n min_price = Column(Float, nullable=True)\n starting_stock = Column(Integer, nullable=True)\n current_stock = Column(Integer, nullable=True)\n\n stock_type_id = Column(UUID(as_uuid=True), ForeignKey('stock_type.id'))\n stock_type = relationship('StockType', back_ref='stocks')\n\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {\n \"id\": self.id,\n \"name\": self.name,\n \"starting_price\": self.starting_price,\n \"current_price\": self.current_price,\n \"max_price\": self.max_price,\n \"min_price\": self.min_price,\n \"starting_stock\": self.starting_stock,\n \"current_stock\": self.current_stock\n }\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {\n \"id\": self.id,\n \"name\": self.name\n }\n",
"step-ids": [
2,
3,
5,
6,
8
]
}
|
[
2,
3,
5,
6,
8
] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'ghou'
from datetime import datetime
bGameValid = True
dAskUserInfo = {}
gAccMode = 0
#============UserSyncResource2.py===================
#============前端资源热更白名单测试功能================
#============去读配置表config.xml==================
#============大于配置标号的热更内容只有白名单可见=======
gWhiteTestResourceVersion = None
#============评审版本热更过滤========================
#============去读配置表config.xml==================
#============等于配置标号的热更内容都不可见=============
gInvalidClientVersion = None # 非法的客户端版本号
|
normal
|
{
"blob_id": "2e075c3ee6b245b1ffd0bb8c4e205199f794da76",
"index": 5725,
"step-1": "<mask token>\n",
"step-2": "__author__ = 'ghou'\n<mask token>\nbGameValid = True\ndAskUserInfo = {}\ngAccMode = 0\ngWhiteTestResourceVersion = None\ngInvalidClientVersion = None\n",
"step-3": "__author__ = 'ghou'\nfrom datetime import datetime\nbGameValid = True\ndAskUserInfo = {}\ngAccMode = 0\ngWhiteTestResourceVersion = None\ngInvalidClientVersion = None\n",
"step-4": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n\n__author__ = 'ghou'\n\nfrom datetime import datetime\n\nbGameValid = True\ndAskUserInfo = {}\ngAccMode = 0\n\n\n\n#============UserSyncResource2.py===================\n\n#============前端资源热更白名单测试功能================\n#============去读配置表config.xml==================\n#============大于配置标号的热更内容只有白名单可见=======\ngWhiteTestResourceVersion = None\n\n#============评审版本热更过滤========================\n#============去读配置表config.xml==================\n#============等于配置标号的热更内容都不可见=============\ngInvalidClientVersion = None # 非法的客户端版本号",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from pyplasm import *
import random as r
def gen_windows(plan_grid, n, m, window_model):
return STRUCT([
T([1,2])([j,i])(
gen_cube_windows(plan_grid, window_model)(i, j, n, m))
for i in range(n)
for j in range(m)
if plan_grid[i][j]])
def gen_cube_windows(plan_grid, window_model):
w = window_model
hpcs = [CUBE(0.00001)]
def gen_cube0(i, j, n, m):
if j+1 == m or not plan_grid[i][j+1]:
hpcs.append(T([1, 2])([1, .5])(MAP([S2, S1, S3])(w)))
if j-1 < 0 or not plan_grid[i][j-1]:
hpcs.append(T(2)(.5)(MAP([S2, S1, S3])(w)))
if i+1 == n or not plan_grid[i+1][j]:
hpcs.append(T([1, 2])([.5, 1])(w))
if i-1 < 0 or not plan_grid[i-1][j]:
hpcs.append(T(1)(.5)(w))
return STRUCT(hpcs)
return gen_cube0
def gen_body(plan_grid, n, m):
c = CUBE(1)
return STRUCT([
T([1,2])([j,i])(c)
for i in range(n)
for j in range(m)
if plan_grid[i][j]])
def gen_house(
box,
plan_grid,
door_model,
window_model,
roof_model):
n = len(plan_grid)
m = len(plan_grid[0])
body = STRUCT([
gen_body(plan_grid, n, m),
T(3)(1),
roof_model])
l2s_scale = map(lambda x,y: x/y, SIZE([1,2,3])(body), box)
s2l_scale = [1/elem for elem in l2s_scale]
scaled_win = S([1,2,3])(l2s_scale)(window_model)
windows = gen_windows(plan_grid, n, m, scaled_win)
house = STRUCT([body, windows])
return TEXTURE(['wood.jpg',True, True, 300,300, r.random()*3.1415, .1,.1, 0,0])(
S([1,2,3])(s2l_scale)(house))
def l_shaped_house(box):
grid = [
[False, False, True],
[True, True, True]]
roof = MKPOL([
[
[ 2, 0, 0],
[2.5, 0, .5],
[ 3, 0, 0],
[ 3, 2, 0],
[ 0, 2, 0],
[ 0, 1.5, .5],
[ 0, 1, 0],
[ 2, 1, 0],
[2.5, 1.5, .5]
],
[
[3,2,1],
[9,2,3,4],
[5,6,9,4],
[7,6,5],
[7,8,9,6],
[9,8,1,2]
],
[1]])
window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))
return gen_house(box, grid, None, window, roof)
def q_shaped_house(box):
grid = [
[True, True, True],
[True, True, True],
[True, False, False]]
roof = MKPOL([
[
[0,0,0], #1
[3,0,0], #2
[3,2,0], #3
[1,2,0], #4
[1,3,0], #5
[.5,3,.5], #6
[0,3,0], #7
[.5,.5,.5], #8
[2.5,.5,.5], #9
[2.5,1.5,.5], #10
[.5,1.5,.5] #11
],
[
[1,8,6,7],
[1,2,9,8],
[2,3,10,9],
[10,3,4,11],
[4,5,6,11],
[6,5,7],
[8,9,10,11]
],
[1]])
window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))
return gen_house(box, grid, None, window, roof)
def rectangular_house(box):
grid = [
[True, True],
[True, True],
[True, True]]
roof = MKPOL([
[
[0,0,0], #1
[1,0,1], #2
[2,0,0], #3
[2,3,0], #4
[1,3,1], #5
[0,3,0] #6
],
[
[1,2,5,6],
[2,3,4,5],
[1,3,2],
[5,4,6]
],
[1]])
window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))
return gen_house(box, grid, None, window, roof)
def squared_house(box):
grid = [
[True, True, True],
[True, True, True],
[True, True, True]]
roof = MKPOL([
[
[0,0,0], #1
[3,0,0], #2
[3,3,0], #3
[0,3,0], #4
[1.5,1.5,1] #5
],
[
[5,1,2],
[5,2,3],
[5,3,4],
[5,4,1]
],
[1]])
window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))
return gen_house(box, grid, None, window, roof)
if __name__=='__main__':
VIEW(squared_house([15, 15, 8]))
|
normal
|
{
"blob_id": "cb48a1601798f72f9cf3759d3c13969bc824a0f6",
"index": 707,
"step-1": "<mask token>\n\n\ndef gen_windows(plan_grid, n, m, window_model):\n return STRUCT([T([1, 2])([j, i])(gen_cube_windows(plan_grid,\n window_model)(i, j, n, m)) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\n<mask token>\n\n\ndef gen_body(plan_grid, n, m):\n c = CUBE(1)\n return STRUCT([T([1, 2])([j, i])(c) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\n<mask token>\n\n\ndef q_shaped_house(box):\n grid = [[True, True, True], [True, True, True], [True, False, False]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 2, 0], [1, 2, 0], [1, 3, 0], [\n 0.5, 3, 0.5], [0, 3, 0], [0.5, 0.5, 0.5], [2.5, 0.5, 0.5], [2.5, \n 1.5, 0.5], [0.5, 1.5, 0.5]], [[1, 8, 6, 7], [1, 2, 9, 8], [2, 3, 10,\n 9], [10, 3, 4, 11], [4, 5, 6, 11], [6, 5, 7], [8, 9, 10, 11]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef rectangular_house(box):\n grid = [[True, True], [True, True], [True, True]]\n roof = MKPOL([[[0, 0, 0], [1, 0, 1], [2, 0, 0], [2, 3, 0], [1, 3, 1], [\n 0, 3, 0]], [[1, 2, 5, 6], [2, 3, 4, 5], [1, 3, 2], [5, 4, 6]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef squared_house(box):\n grid = [[True, True, True], [True, True, True], [True, True, True]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 3, 0], [0, 3, 0], [1.5, 1.5, 1\n ]], [[5, 1, 2], [5, 2, 3], [5, 3, 4], [5, 4, 1]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef gen_windows(plan_grid, n, m, window_model):\n return STRUCT([T([1, 2])([j, i])(gen_cube_windows(plan_grid,\n window_model)(i, j, n, m)) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\n<mask token>\n\n\ndef gen_body(plan_grid, n, m):\n c = CUBE(1)\n return STRUCT([T([1, 2])([j, i])(c) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\ndef gen_house(box, plan_grid, door_model, window_model, roof_model):\n n = len(plan_grid)\n m = len(plan_grid[0])\n body = STRUCT([gen_body(plan_grid, n, m), T(3)(1), roof_model])\n l2s_scale = map(lambda x, y: x / y, SIZE([1, 2, 3])(body), box)\n s2l_scale = [(1 / elem) for elem in l2s_scale]\n scaled_win = S([1, 2, 3])(l2s_scale)(window_model)\n windows = gen_windows(plan_grid, n, m, scaled_win)\n house = STRUCT([body, windows])\n return TEXTURE(['wood.jpg', True, True, 300, 300, r.random() * 3.1415, \n 0.1, 0.1, 0, 0])(S([1, 2, 3])(s2l_scale)(house))\n\n\ndef l_shaped_house(box):\n grid = [[False, False, True], [True, True, True]]\n roof = MKPOL([[[2, 0, 0], [2.5, 0, 0.5], [3, 0, 0], [3, 2, 0], [0, 2, 0\n ], [0, 1.5, 0.5], [0, 1, 0], [2, 1, 0], [2.5, 1.5, 0.5]], [[3, 2, 1\n ], [9, 2, 3, 4], [5, 6, 9, 4], [7, 6, 5], [7, 8, 9, 6], [9, 8, 1, 2\n ]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef q_shaped_house(box):\n grid = [[True, True, True], [True, True, True], [True, False, False]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 2, 0], [1, 2, 0], [1, 3, 0], [\n 0.5, 3, 0.5], [0, 3, 0], [0.5, 0.5, 0.5], [2.5, 0.5, 0.5], [2.5, \n 1.5, 0.5], [0.5, 1.5, 0.5]], [[1, 8, 6, 7], [1, 2, 9, 8], [2, 3, 10,\n 9], [10, 3, 4, 11], [4, 5, 6, 11], [6, 5, 7], [8, 9, 10, 11]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef rectangular_house(box):\n grid = [[True, True], [True, True], [True, True]]\n roof = MKPOL([[[0, 0, 0], [1, 0, 1], [2, 0, 0], [2, 3, 0], [1, 3, 1], [\n 0, 3, 0]], [[1, 2, 5, 6], [2, 3, 4, 5], [1, 3, 2], [5, 4, 6]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef squared_house(box):\n grid = [[True, True, True], [True, True, True], [True, True, True]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 3, 0], [0, 3, 0], [1.5, 1.5, 1\n ]], [[5, 1, 2], [5, 2, 3], [5, 3, 4], [5, 4, 1]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef gen_windows(plan_grid, n, m, window_model):\n return STRUCT([T([1, 2])([j, i])(gen_cube_windows(plan_grid,\n window_model)(i, j, n, m)) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\ndef gen_cube_windows(plan_grid, window_model):\n w = window_model\n hpcs = [CUBE(1e-05)]\n\n def gen_cube0(i, j, n, m):\n if j + 1 == m or not plan_grid[i][j + 1]:\n hpcs.append(T([1, 2])([1, 0.5])(MAP([S2, S1, S3])(w)))\n if j - 1 < 0 or not plan_grid[i][j - 1]:\n hpcs.append(T(2)(0.5)(MAP([S2, S1, S3])(w)))\n if i + 1 == n or not plan_grid[i + 1][j]:\n hpcs.append(T([1, 2])([0.5, 1])(w))\n if i - 1 < 0 or not plan_grid[i - 1][j]:\n hpcs.append(T(1)(0.5)(w))\n return STRUCT(hpcs)\n return gen_cube0\n\n\ndef gen_body(plan_grid, n, m):\n c = CUBE(1)\n return STRUCT([T([1, 2])([j, i])(c) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\ndef gen_house(box, plan_grid, door_model, window_model, roof_model):\n n = len(plan_grid)\n m = len(plan_grid[0])\n body = STRUCT([gen_body(plan_grid, n, m), T(3)(1), roof_model])\n l2s_scale = map(lambda x, y: x / y, SIZE([1, 2, 3])(body), box)\n s2l_scale = [(1 / elem) for elem in l2s_scale]\n scaled_win = S([1, 2, 3])(l2s_scale)(window_model)\n windows = gen_windows(plan_grid, n, m, scaled_win)\n house = STRUCT([body, windows])\n return TEXTURE(['wood.jpg', True, True, 300, 300, r.random() * 3.1415, \n 0.1, 0.1, 0, 0])(S([1, 2, 3])(s2l_scale)(house))\n\n\ndef l_shaped_house(box):\n grid = [[False, False, True], [True, True, True]]\n roof = MKPOL([[[2, 0, 0], [2.5, 0, 0.5], [3, 0, 0], [3, 2, 0], [0, 2, 0\n ], [0, 1.5, 0.5], [0, 1, 0], [2, 1, 0], [2.5, 1.5, 0.5]], [[3, 2, 1\n ], [9, 2, 3, 4], [5, 6, 9, 4], [7, 6, 5], [7, 8, 9, 6], [9, 8, 1, 2\n ]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef q_shaped_house(box):\n grid = [[True, True, True], [True, True, True], [True, False, False]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 2, 0], [1, 2, 0], [1, 3, 0], [\n 0.5, 3, 0.5], [0, 3, 0], [0.5, 0.5, 0.5], [2.5, 0.5, 0.5], [2.5, \n 1.5, 0.5], [0.5, 1.5, 0.5]], [[1, 8, 6, 7], [1, 2, 9, 8], [2, 3, 10,\n 9], [10, 3, 4, 11], [4, 5, 6, 11], [6, 5, 7], [8, 9, 10, 11]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef rectangular_house(box):\n grid = [[True, True], [True, True], [True, True]]\n roof = MKPOL([[[0, 0, 0], [1, 0, 1], [2, 0, 0], [2, 3, 0], [1, 3, 1], [\n 0, 3, 0]], [[1, 2, 5, 6], [2, 3, 4, 5], [1, 3, 2], [5, 4, 6]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef squared_house(box):\n grid = [[True, True, True], [True, True, True], [True, True, True]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 3, 0], [0, 3, 0], [1.5, 1.5, 1\n ]], [[5, 1, 2], [5, 2, 3], [5, 3, 4], [5, 4, 1]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef gen_windows(plan_grid, n, m, window_model):\n return STRUCT([T([1, 2])([j, i])(gen_cube_windows(plan_grid,\n window_model)(i, j, n, m)) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\ndef gen_cube_windows(plan_grid, window_model):\n w = window_model\n hpcs = [CUBE(1e-05)]\n\n def gen_cube0(i, j, n, m):\n if j + 1 == m or not plan_grid[i][j + 1]:\n hpcs.append(T([1, 2])([1, 0.5])(MAP([S2, S1, S3])(w)))\n if j - 1 < 0 or not plan_grid[i][j - 1]:\n hpcs.append(T(2)(0.5)(MAP([S2, S1, S3])(w)))\n if i + 1 == n or not plan_grid[i + 1][j]:\n hpcs.append(T([1, 2])([0.5, 1])(w))\n if i - 1 < 0 or not plan_grid[i - 1][j]:\n hpcs.append(T(1)(0.5)(w))\n return STRUCT(hpcs)\n return gen_cube0\n\n\ndef gen_body(plan_grid, n, m):\n c = CUBE(1)\n return STRUCT([T([1, 2])([j, i])(c) for i in range(n) for j in range(m) if\n plan_grid[i][j]])\n\n\ndef gen_house(box, plan_grid, door_model, window_model, roof_model):\n n = len(plan_grid)\n m = len(plan_grid[0])\n body = STRUCT([gen_body(plan_grid, n, m), T(3)(1), roof_model])\n l2s_scale = map(lambda x, y: x / y, SIZE([1, 2, 3])(body), box)\n s2l_scale = [(1 / elem) for elem in l2s_scale]\n scaled_win = S([1, 2, 3])(l2s_scale)(window_model)\n windows = gen_windows(plan_grid, n, m, scaled_win)\n house = STRUCT([body, windows])\n return TEXTURE(['wood.jpg', True, True, 300, 300, r.random() * 3.1415, \n 0.1, 0.1, 0, 0])(S([1, 2, 3])(s2l_scale)(house))\n\n\ndef l_shaped_house(box):\n grid = [[False, False, True], [True, True, True]]\n roof = MKPOL([[[2, 0, 0], [2.5, 0, 0.5], [3, 0, 0], [3, 2, 0], [0, 2, 0\n ], [0, 1.5, 0.5], [0, 1, 0], [2, 1, 0], [2.5, 1.5, 0.5]], [[3, 2, 1\n ], [9, 2, 3, 4], [5, 6, 9, 4], [7, 6, 5], [7, 8, 9, 6], [9, 8, 1, 2\n ]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef q_shaped_house(box):\n grid = [[True, True, True], [True, True, True], [True, False, False]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 2, 0], [1, 2, 0], [1, 3, 0], [\n 0.5, 3, 0.5], [0, 3, 0], [0.5, 0.5, 0.5], [2.5, 0.5, 0.5], [2.5, \n 1.5, 0.5], [0.5, 1.5, 0.5]], [[1, 8, 6, 7], [1, 2, 9, 8], [2, 3, 10,\n 9], [10, 3, 4, 11], [4, 5, 6, 11], [6, 5, 7], [8, 9, 10, 11]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef rectangular_house(box):\n grid = [[True, True], [True, True], [True, True]]\n roof = MKPOL([[[0, 0, 0], [1, 0, 1], [2, 0, 0], [2, 3, 0], [1, 3, 1], [\n 0, 3, 0]], [[1, 2, 5, 6], [2, 3, 4, 5], [1, 3, 2], [5, 4, 6]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef squared_house(box):\n grid = [[True, True, True], [True, True, True], [True, True, True]]\n roof = MKPOL([[[0, 0, 0], [3, 0, 0], [3, 3, 0], [0, 3, 0], [1.5, 1.5, 1\n ]], [[5, 1, 2], [5, 2, 3], [5, 3, 4], [5, 4, 1]], [1]])\n window = T([1, 2, 3])([-0.75, -0.1, 1.2])(CUBOID([1.5, 0.2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\nif __name__ == '__main__':\n VIEW(squared_house([15, 15, 8]))\n",
"step-5": "from pyplasm import *\nimport random as r\n\ndef gen_windows(plan_grid, n, m, window_model):\n return STRUCT([\n T([1,2])([j,i])(\n gen_cube_windows(plan_grid, window_model)(i, j, n, m))\n for i in range(n) \n for j in range(m) \n if plan_grid[i][j]])\n\ndef gen_cube_windows(plan_grid, window_model):\n w = window_model\n hpcs = [CUBE(0.00001)]\n \n def gen_cube0(i, j, n, m):\n if j+1 == m or not plan_grid[i][j+1]:\n hpcs.append(T([1, 2])([1, .5])(MAP([S2, S1, S3])(w)))\n \n if j-1 < 0 or not plan_grid[i][j-1]:\n hpcs.append(T(2)(.5)(MAP([S2, S1, S3])(w)))\n \n if i+1 == n or not plan_grid[i+1][j]:\n hpcs.append(T([1, 2])([.5, 1])(w))\n \n if i-1 < 0 or not plan_grid[i-1][j]:\n hpcs.append(T(1)(.5)(w))\n \n return STRUCT(hpcs)\n \n return gen_cube0\n \n\ndef gen_body(plan_grid, n, m):\n c = CUBE(1)\n return STRUCT([\n T([1,2])([j,i])(c)\n for i in range(n) \n for j in range(m) \n if plan_grid[i][j]])\n\n\ndef gen_house(\n box,\n plan_grid,\n door_model,\n window_model,\n roof_model):\n \n n = len(plan_grid)\n m = len(plan_grid[0])\n \n body = STRUCT([\n gen_body(plan_grid, n, m),\n T(3)(1),\n roof_model])\n \n l2s_scale = map(lambda x,y: x/y, SIZE([1,2,3])(body), box)\n s2l_scale = [1/elem for elem in l2s_scale]\n \n scaled_win = S([1,2,3])(l2s_scale)(window_model)\n \n windows = gen_windows(plan_grid, n, m, scaled_win)\n \n house = STRUCT([body, windows])\n \n return TEXTURE(['wood.jpg',True, True, 300,300, r.random()*3.1415, .1,.1, 0,0])(\n S([1,2,3])(s2l_scale)(house))\n\n\ndef l_shaped_house(box):\n \n grid = [\n [False, False, True],\n [True, True, True]]\n \n roof = MKPOL([\n [\n [ 2, 0, 0],\n [2.5, 0, .5],\n [ 3, 0, 0],\n [ 3, 2, 0],\n [ 0, 2, 0],\n [ 0, 1.5, .5],\n [ 0, 1, 0],\n [ 2, 1, 0],\n [2.5, 1.5, .5]\n ],\n [\n [3,2,1],\n [9,2,3,4],\n [5,6,9,4],\n [7,6,5],\n [7,8,9,6],\n [9,8,1,2]\n ],\n [1]])\n \n window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))\n return gen_house(box, grid, None, window, roof)\n \ndef q_shaped_house(box):\n\n grid = [\n [True, True, True],\n [True, True, True],\n [True, False, False]]\n roof = MKPOL([\n [\n [0,0,0], #1\n [3,0,0], #2\n [3,2,0], #3\n [1,2,0], #4\n [1,3,0], #5\n [.5,3,.5], #6\n [0,3,0], #7\n [.5,.5,.5], #8\n [2.5,.5,.5], #9\n [2.5,1.5,.5], #10\n [.5,1.5,.5] #11\n ],\n [\n [1,8,6,7],\n [1,2,9,8],\n [2,3,10,9],\n [10,3,4,11],\n [4,5,6,11],\n [6,5,7],\n [8,9,10,11]\n ],\n [1]])\n \n window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef rectangular_house(box):\n\n grid = [\n [True, True],\n [True, True],\n [True, True]]\n roof = MKPOL([\n [\n [0,0,0], #1\n [1,0,1], #2\n [2,0,0], #3\n [2,3,0], #4\n [1,3,1], #5\n [0,3,0] #6\n ],\n [\n [1,2,5,6],\n [2,3,4,5],\n [1,3,2],\n [5,4,6]\n ],\n [1]])\n \n window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))\n return gen_house(box, grid, None, window, roof)\n\n\ndef squared_house(box):\n \n grid = [\n [True, True, True],\n [True, True, True],\n [True, True, True]]\n roof = MKPOL([\n [\n [0,0,0], #1\n [3,0,0], #2\n [3,3,0], #3\n [0,3,0], #4\n [1.5,1.5,1] #5\n ],\n [\n [5,1,2],\n [5,2,3],\n [5,3,4],\n [5,4,1]\n ],\n [1]])\n \n window = T([1,2,3])([-.75, -.1, 1.2])(CUBOID([1.5, .2, 2]))\n return gen_house(box, grid, None, window, roof)\n \n\nif __name__=='__main__':\n VIEW(squared_house([15, 15, 8]))\n\n",
"step-ids": [
5,
7,
8,
9,
11
]
}
|
[
5,
7,
8,
9,
11
] |
data_dir = "../data"
output_dir = './'
valid_id = dict()
for category in ("beauty", "fashion", "mobile"):
with open("%s/%s_data_info_val_competition.csv" % (data_dir, category), "r") as infile:
next(infile)
for line in infile:
curr_id = line.strip().split(',')[0]
valid_id[curr_id] = True
# This is the new output submission file containing 977987 rows
with open("submission_977.csv", "w") as outfile:
outfile.write("id,tagging\n")
# Please change the file below to your current submission filename containing 1174802 rows
# with open("submission-in.csv", "r") as infile:
with open("%s/submission_2103.csv" % output_dir, "r") as infile:
next(infile)
for line in infile:
curr_id = line.strip().split('_')[0]
if curr_id in valid_id:
outfile.write(line.strip() + '\n')
|
normal
|
{
"blob_id": "82556291c456b9e43e4e589ea4a77d320430344b",
"index": 7478,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor category in ('beauty', 'fashion', 'mobile'):\n with open('%s/%s_data_info_val_competition.csv' % (data_dir, category), 'r'\n ) as infile:\n next(infile)\n for line in infile:\n curr_id = line.strip().split(',')[0]\n valid_id[curr_id] = True\nwith open('submission_977.csv', 'w') as outfile:\n outfile.write('id,tagging\\n')\n with open('%s/submission_2103.csv' % output_dir, 'r') as infile:\n next(infile)\n for line in infile:\n curr_id = line.strip().split('_')[0]\n if curr_id in valid_id:\n outfile.write(line.strip() + '\\n')\n",
"step-3": "data_dir = '../data'\noutput_dir = './'\nvalid_id = dict()\nfor category in ('beauty', 'fashion', 'mobile'):\n with open('%s/%s_data_info_val_competition.csv' % (data_dir, category), 'r'\n ) as infile:\n next(infile)\n for line in infile:\n curr_id = line.strip().split(',')[0]\n valid_id[curr_id] = True\nwith open('submission_977.csv', 'w') as outfile:\n outfile.write('id,tagging\\n')\n with open('%s/submission_2103.csv' % output_dir, 'r') as infile:\n next(infile)\n for line in infile:\n curr_id = line.strip().split('_')[0]\n if curr_id in valid_id:\n outfile.write(line.strip() + '\\n')\n",
"step-4": "data_dir = \"../data\"\noutput_dir = './'\nvalid_id = dict()\n\nfor category in (\"beauty\", \"fashion\", \"mobile\"):\n with open(\"%s/%s_data_info_val_competition.csv\" % (data_dir, category), \"r\") as infile:\n next(infile)\n for line in infile:\n curr_id = line.strip().split(',')[0]\n valid_id[curr_id] = True\n\n# This is the new output submission file containing 977987 rows\nwith open(\"submission_977.csv\", \"w\") as outfile:\n outfile.write(\"id,tagging\\n\")\n \n # Please change the file below to your current submission filename containing 1174802 rows\n # with open(\"submission-in.csv\", \"r\") as infile:\n with open(\"%s/submission_2103.csv\" % output_dir, \"r\") as infile:\n next(infile)\n for line in infile:\n curr_id = line.strip().split('_')[0]\n if curr_id in valid_id:\n outfile.write(line.strip() + '\\n')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import sys
from bs4 import BeautifulSoup
def get_classes(html):
"""
returns a list of classes and titles, parsing through 'html'
"""
# elements = html.find_all("span", "code")
# titles = html.find_all("span", "title")
# classes = []
# for i in range(len(elements)):
# item = elements[i]
# tit = titles[i]
# classes += [(item.text.replace('\xa0', ' '), tit.text.replace('\xa0', ' '))]
# return classes
|
normal
|
{
"blob_id": "9bb8e0f732eac474dbc01c374f9c74178f65dc36",
"index": 3063,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_classes(html):\n \"\"\"\n returns a list of classes and titles, parsing through 'html'\n \"\"\"\n",
"step-3": "import sys\nfrom bs4 import BeautifulSoup\n\n\ndef get_classes(html):\n \"\"\"\n returns a list of classes and titles, parsing through 'html'\n \"\"\"\n",
"step-4": "import sys\nfrom bs4 import BeautifulSoup\n\n\ndef get_classes(html):\n \"\"\"\n returns a list of classes and titles, parsing through 'html'\n \"\"\"\n # elements = html.find_all(\"span\", \"code\")\n # titles = html.find_all(\"span\", \"title\")\n # classes = []\n # for i in range(len(elements)):\n # item = elements[i]\n # tit = titles[i]\n # classes += [(item.text.replace('\\xa0', ' '), tit.text.replace('\\xa0', ' '))]\n # return classes\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def randomizer(n, garrafa_vidro, lata_metal, copo_plastico, bola_papel,
maça_organico):
lixos = [garrafa_vidro, lata_metal, copo_plastico, bola_papel,
maça_organico]
return lixos[n]
|
normal
|
{
"blob_id": "71a9c9b8f47dcfbecc154c44d5a72ddbd852145a",
"index": 328,
"step-1": "<mask token>\n",
"step-2": "def randomizer(n, garrafa_vidro, lata_metal, copo_plastico, bola_papel,\n maça_organico):\n lixos = [garrafa_vidro, lata_metal, copo_plastico, bola_papel,\n maça_organico]\n return lixos[n]\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from arcade.sprite_list.sprite_list import SpriteList
import GamePiece as gp
from Errors import *
class GameConfig:
WINDOW_TITLE = "MyPyTris"
SCREEN_WIDTH = 450
SCREEN_HEIGHT = 900
BLOCK_PX = 45 # 45px blocks on screen
SPRITE_PX = 64 # 64px sprite
BLOCK_SCALE = BLOCK_PX/SPRITE_PX # sprite scale ratio
class GameBoard:
""" Class to manage blocks on the game board """
def __init__(self, width: int, height: int):
# 2D list of blocks initialized to empty in the width and height of our game board
self.width = width
self.height = height
self.blocks = [[None for y in range(width)] for x in range(height)]
self.playerSprites = SpriteList()
self.groundSprites = SpriteList()
def draw(self):
self.playerSprites.draw()
self.groundSprites.draw()
def canMoveBlock(self, x: int, y: int) -> bool:
return self.blocks[x][y] is None
def canMoveGamePiece(self, gamePiece:gp.GamePiece, xTo:int, yTo:int) -> bool:
for yDiff, row in enumerate(gamePiece.blocks):
for xDiff, block in enumerate(row):
if block is None:
continue
newX = xTo + xDiff
newY = yTo + yDiff
if newX >= self.width or newX < 0:
return False
if newY < 0 or newY >= self.height:
return False
if self.blocks[newY][newX] is not None \
and self.blocks[newY][newX] not in gamePiece.allBlocks():
return False
return True
def moveGamePiece(self, gamePiece:gp.GamePiece, xTo:int, yTo:int):
if (not self.canMoveGamePiece(gamePiece, xTo, yTo)):
return False
# remove blocks from game board
for y, row in enumerate(gamePiece.blocks):
for x, block in enumerate(row):
if block is not None:
self.blocks[y + gamePiece.y][x + gamePiece.x] = None
# add blocks in new positions
for y, row in enumerate(gamePiece.blocks):
for x, block in enumerate(row):
if block is not None:
blockXDiff = block.x - gamePiece.x
blockYDiff = block.y - gamePiece.y
newBlockX = xTo + blockXDiff
newBlockY = yTo + blockYDiff
self.blocks[newBlockY][newBlockX] = block
block.moveTo(newBlockX, newBlockY)
gamePiece.x = xTo
gamePiece.y = yTo
def addBlock(self, aBlock: gp.Block):
"""adds a block to the game board"""
if self.blocks[aBlock.y][aBlock.x] != None:
raise MovementError('game board space not empty')
self.blocks[aBlock.y][aBlock.x] = aBlock
self.groundSprites.append(aBlock.sprite)
def addGamePiece(self, gamePiece:gp.GamePiece):
for y in range(gamePiece.size):
for x in range(gamePiece.size):
block = gamePiece.blocks[y][x]
if block is None:
continue
self.blocks[block.y][block.x] = block
self.playerSprites.append(block.sprite)
def moveBlock(self, aBlock: gp.Block, x: int, y: int):
self.blocks[aBlock.y][aBlock.x] = None
self.blocks[y][x] = aBlock
def removeBlock(self, aBlock: gp.Block):
""" remove a block from the game board """
for y, row in iter(self.blocks):
for x, block in iter(row):
if block is aBlock:
self.blocks[y][x] = None
self.playerSprites.remove(aBlock.sprite)
return
class GameManager:
def __init__(self) -> None:
pass
def start(self):
gameBoard = GameBoard(10, 20)
gameBoard.addGamePiece()
|
normal
|
{
"blob_id": "2d7431996bc8d1099c08fddc815b4706deb4f023",
"index": 4393,
"step-1": "<mask token>\n\n\nclass GameBoard:\n <mask token>\n <mask token>\n\n def draw(self):\n self.playerSprites.draw()\n self.groundSprites.draw()\n <mask token>\n <mask token>\n\n def moveGamePiece(self, gamePiece: gp.GamePiece, xTo: int, yTo: int):\n if not self.canMoveGamePiece(gamePiece, xTo, yTo):\n return False\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n self.blocks[y + gamePiece.y][x + gamePiece.x] = None\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n blockXDiff = block.x - gamePiece.x\n blockYDiff = block.y - gamePiece.y\n newBlockX = xTo + blockXDiff\n newBlockY = yTo + blockYDiff\n self.blocks[newBlockY][newBlockX] = block\n block.moveTo(newBlockX, newBlockY)\n gamePiece.x = xTo\n gamePiece.y = yTo\n <mask token>\n <mask token>\n <mask token>\n\n def removeBlock(self, aBlock: gp.Block):\n \"\"\" remove a block from the game board \"\"\"\n for y, row in iter(self.blocks):\n for x, block in iter(row):\n if block is aBlock:\n self.blocks[y][x] = None\n self.playerSprites.remove(aBlock.sprite)\n return\n\n\nclass GameManager:\n\n def __init__(self) ->None:\n pass\n\n def start(self):\n gameBoard = GameBoard(10, 20)\n gameBoard.addGamePiece()\n",
"step-2": "<mask token>\n\n\nclass GameBoard:\n <mask token>\n\n def __init__(self, width: int, height: int):\n self.width = width\n self.height = height\n self.blocks = [[None for y in range(width)] for x in range(height)]\n self.playerSprites = SpriteList()\n self.groundSprites = SpriteList()\n\n def draw(self):\n self.playerSprites.draw()\n self.groundSprites.draw()\n\n def canMoveBlock(self, x: int, y: int) ->bool:\n return self.blocks[x][y] is None\n <mask token>\n\n def moveGamePiece(self, gamePiece: gp.GamePiece, xTo: int, yTo: int):\n if not self.canMoveGamePiece(gamePiece, xTo, yTo):\n return False\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n self.blocks[y + gamePiece.y][x + gamePiece.x] = None\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n blockXDiff = block.x - gamePiece.x\n blockYDiff = block.y - gamePiece.y\n newBlockX = xTo + blockXDiff\n newBlockY = yTo + blockYDiff\n self.blocks[newBlockY][newBlockX] = block\n block.moveTo(newBlockX, newBlockY)\n gamePiece.x = xTo\n gamePiece.y = yTo\n\n def addBlock(self, aBlock: gp.Block):\n \"\"\"adds a block to the game board\"\"\"\n if self.blocks[aBlock.y][aBlock.x] != None:\n raise MovementError('game board space not empty')\n self.blocks[aBlock.y][aBlock.x] = aBlock\n self.groundSprites.append(aBlock.sprite)\n\n def addGamePiece(self, gamePiece: gp.GamePiece):\n for y in range(gamePiece.size):\n for x in range(gamePiece.size):\n block = gamePiece.blocks[y][x]\n if block is None:\n continue\n self.blocks[block.y][block.x] = block\n self.playerSprites.append(block.sprite)\n\n def moveBlock(self, aBlock: gp.Block, x: int, y: int):\n self.blocks[aBlock.y][aBlock.x] = None\n self.blocks[y][x] = aBlock\n\n def removeBlock(self, aBlock: gp.Block):\n \"\"\" remove a block from the game board \"\"\"\n for y, row in iter(self.blocks):\n for x, block in iter(row):\n if block is aBlock:\n self.blocks[y][x] = None\n self.playerSprites.remove(aBlock.sprite)\n return\n\n\nclass GameManager:\n\n def __init__(self) ->None:\n pass\n\n def start(self):\n gameBoard = GameBoard(10, 20)\n gameBoard.addGamePiece()\n",
"step-3": "<mask token>\n\n\nclass GameBoard:\n <mask token>\n\n def __init__(self, width: int, height: int):\n self.width = width\n self.height = height\n self.blocks = [[None for y in range(width)] for x in range(height)]\n self.playerSprites = SpriteList()\n self.groundSprites = SpriteList()\n\n def draw(self):\n self.playerSprites.draw()\n self.groundSprites.draw()\n\n def canMoveBlock(self, x: int, y: int) ->bool:\n return self.blocks[x][y] is None\n\n def canMoveGamePiece(self, gamePiece: gp.GamePiece, xTo: int, yTo: int\n ) ->bool:\n for yDiff, row in enumerate(gamePiece.blocks):\n for xDiff, block in enumerate(row):\n if block is None:\n continue\n newX = xTo + xDiff\n newY = yTo + yDiff\n if newX >= self.width or newX < 0:\n return False\n if newY < 0 or newY >= self.height:\n return False\n if self.blocks[newY][newX] is not None and self.blocks[newY][\n newX] not in gamePiece.allBlocks():\n return False\n return True\n\n def moveGamePiece(self, gamePiece: gp.GamePiece, xTo: int, yTo: int):\n if not self.canMoveGamePiece(gamePiece, xTo, yTo):\n return False\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n self.blocks[y + gamePiece.y][x + gamePiece.x] = None\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n blockXDiff = block.x - gamePiece.x\n blockYDiff = block.y - gamePiece.y\n newBlockX = xTo + blockXDiff\n newBlockY = yTo + blockYDiff\n self.blocks[newBlockY][newBlockX] = block\n block.moveTo(newBlockX, newBlockY)\n gamePiece.x = xTo\n gamePiece.y = yTo\n\n def addBlock(self, aBlock: gp.Block):\n \"\"\"adds a block to the game board\"\"\"\n if self.blocks[aBlock.y][aBlock.x] != None:\n raise MovementError('game board space not empty')\n self.blocks[aBlock.y][aBlock.x] = aBlock\n self.groundSprites.append(aBlock.sprite)\n\n def addGamePiece(self, gamePiece: gp.GamePiece):\n for y in range(gamePiece.size):\n for x in range(gamePiece.size):\n block = gamePiece.blocks[y][x]\n if block is None:\n continue\n self.blocks[block.y][block.x] = block\n self.playerSprites.append(block.sprite)\n\n def moveBlock(self, aBlock: gp.Block, x: int, y: int):\n self.blocks[aBlock.y][aBlock.x] = None\n self.blocks[y][x] = aBlock\n\n def removeBlock(self, aBlock: gp.Block):\n \"\"\" remove a block from the game board \"\"\"\n for y, row in iter(self.blocks):\n for x, block in iter(row):\n if block is aBlock:\n self.blocks[y][x] = None\n self.playerSprites.remove(aBlock.sprite)\n return\n\n\nclass GameManager:\n\n def __init__(self) ->None:\n pass\n\n def start(self):\n gameBoard = GameBoard(10, 20)\n gameBoard.addGamePiece()\n",
"step-4": "from arcade.sprite_list.sprite_list import SpriteList\nimport GamePiece as gp\nfrom Errors import *\n\n\nclass GameConfig:\n WINDOW_TITLE = 'MyPyTris'\n SCREEN_WIDTH = 450\n SCREEN_HEIGHT = 900\n BLOCK_PX = 45\n SPRITE_PX = 64\n BLOCK_SCALE = BLOCK_PX / SPRITE_PX\n\n\nclass GameBoard:\n \"\"\" Class to manage blocks on the game board \"\"\"\n\n def __init__(self, width: int, height: int):\n self.width = width\n self.height = height\n self.blocks = [[None for y in range(width)] for x in range(height)]\n self.playerSprites = SpriteList()\n self.groundSprites = SpriteList()\n\n def draw(self):\n self.playerSprites.draw()\n self.groundSprites.draw()\n\n def canMoveBlock(self, x: int, y: int) ->bool:\n return self.blocks[x][y] is None\n\n def canMoveGamePiece(self, gamePiece: gp.GamePiece, xTo: int, yTo: int\n ) ->bool:\n for yDiff, row in enumerate(gamePiece.blocks):\n for xDiff, block in enumerate(row):\n if block is None:\n continue\n newX = xTo + xDiff\n newY = yTo + yDiff\n if newX >= self.width or newX < 0:\n return False\n if newY < 0 or newY >= self.height:\n return False\n if self.blocks[newY][newX] is not None and self.blocks[newY][\n newX] not in gamePiece.allBlocks():\n return False\n return True\n\n def moveGamePiece(self, gamePiece: gp.GamePiece, xTo: int, yTo: int):\n if not self.canMoveGamePiece(gamePiece, xTo, yTo):\n return False\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n self.blocks[y + gamePiece.y][x + gamePiece.x] = None\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n blockXDiff = block.x - gamePiece.x\n blockYDiff = block.y - gamePiece.y\n newBlockX = xTo + blockXDiff\n newBlockY = yTo + blockYDiff\n self.blocks[newBlockY][newBlockX] = block\n block.moveTo(newBlockX, newBlockY)\n gamePiece.x = xTo\n gamePiece.y = yTo\n\n def addBlock(self, aBlock: gp.Block):\n \"\"\"adds a block to the game board\"\"\"\n if self.blocks[aBlock.y][aBlock.x] != None:\n raise MovementError('game board space not empty')\n self.blocks[aBlock.y][aBlock.x] = aBlock\n self.groundSprites.append(aBlock.sprite)\n\n def addGamePiece(self, gamePiece: gp.GamePiece):\n for y in range(gamePiece.size):\n for x in range(gamePiece.size):\n block = gamePiece.blocks[y][x]\n if block is None:\n continue\n self.blocks[block.y][block.x] = block\n self.playerSprites.append(block.sprite)\n\n def moveBlock(self, aBlock: gp.Block, x: int, y: int):\n self.blocks[aBlock.y][aBlock.x] = None\n self.blocks[y][x] = aBlock\n\n def removeBlock(self, aBlock: gp.Block):\n \"\"\" remove a block from the game board \"\"\"\n for y, row in iter(self.blocks):\n for x, block in iter(row):\n if block is aBlock:\n self.blocks[y][x] = None\n self.playerSprites.remove(aBlock.sprite)\n return\n\n\nclass GameManager:\n\n def __init__(self) ->None:\n pass\n\n def start(self):\n gameBoard = GameBoard(10, 20)\n gameBoard.addGamePiece()\n",
"step-5": "\nfrom arcade.sprite_list.sprite_list import SpriteList\nimport GamePiece as gp\nfrom Errors import *\n\nclass GameConfig:\n WINDOW_TITLE = \"MyPyTris\"\n SCREEN_WIDTH = 450\n SCREEN_HEIGHT = 900\n BLOCK_PX = 45 # 45px blocks on screen\n SPRITE_PX = 64 # 64px sprite\n BLOCK_SCALE = BLOCK_PX/SPRITE_PX # sprite scale ratio\n\nclass GameBoard:\n \"\"\" Class to manage blocks on the game board \"\"\"\n\n def __init__(self, width: int, height: int):\n # 2D list of blocks initialized to empty in the width and height of our game board\n self.width = width\n self.height = height\n self.blocks = [[None for y in range(width)] for x in range(height)]\n self.playerSprites = SpriteList()\n self.groundSprites = SpriteList()\n\n\n def draw(self):\n self.playerSprites.draw()\n self.groundSprites.draw()\n\n def canMoveBlock(self, x: int, y: int) -> bool:\n return self.blocks[x][y] is None\n\n def canMoveGamePiece(self, gamePiece:gp.GamePiece, xTo:int, yTo:int) -> bool:\n for yDiff, row in enumerate(gamePiece.blocks):\n for xDiff, block in enumerate(row):\n if block is None:\n continue\n newX = xTo + xDiff\n newY = yTo + yDiff\n if newX >= self.width or newX < 0:\n return False\n if newY < 0 or newY >= self.height:\n return False\n if self.blocks[newY][newX] is not None \\\n and self.blocks[newY][newX] not in gamePiece.allBlocks():\n return False\n return True\n\n def moveGamePiece(self, gamePiece:gp.GamePiece, xTo:int, yTo:int):\n if (not self.canMoveGamePiece(gamePiece, xTo, yTo)):\n return False\n\n # remove blocks from game board\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n self.blocks[y + gamePiece.y][x + gamePiece.x] = None\n\n # add blocks in new positions\n for y, row in enumerate(gamePiece.blocks):\n for x, block in enumerate(row):\n if block is not None:\n blockXDiff = block.x - gamePiece.x\n blockYDiff = block.y - gamePiece.y\n newBlockX = xTo + blockXDiff\n newBlockY = yTo + blockYDiff\n self.blocks[newBlockY][newBlockX] = block\n block.moveTo(newBlockX, newBlockY)\n\n gamePiece.x = xTo\n gamePiece.y = yTo\n \n\n def addBlock(self, aBlock: gp.Block):\n \"\"\"adds a block to the game board\"\"\"\n\n if self.blocks[aBlock.y][aBlock.x] != None:\n raise MovementError('game board space not empty')\n self.blocks[aBlock.y][aBlock.x] = aBlock\n self.groundSprites.append(aBlock.sprite)\n\n def addGamePiece(self, gamePiece:gp.GamePiece):\n for y in range(gamePiece.size):\n for x in range(gamePiece.size):\n block = gamePiece.blocks[y][x]\n if block is None:\n continue\n self.blocks[block.y][block.x] = block\n self.playerSprites.append(block.sprite)\n\n def moveBlock(self, aBlock: gp.Block, x: int, y: int):\n self.blocks[aBlock.y][aBlock.x] = None\n self.blocks[y][x] = aBlock\n\n def removeBlock(self, aBlock: gp.Block):\n \"\"\" remove a block from the game board \"\"\"\n \n for y, row in iter(self.blocks):\n for x, block in iter(row):\n if block is aBlock:\n self.blocks[y][x] = None\n self.playerSprites.remove(aBlock.sprite)\n return\n\n\nclass GameManager:\n\n def __init__(self) -> None:\n pass\n \n def start(self):\n gameBoard = GameBoard(10, 20)\n gameBoard.addGamePiece()",
"step-ids": [
7,
12,
13,
17,
18
]
}
|
[
7,
12,
13,
17,
18
] |
import numpy as np
import tensorflow as tf
import math
from .. import util
def debug_inference(inference, dummy, entropy, cross_entropy, expected_log_likelhood):
dummy = tf.Print(dummy, [entropy], 'entropy: ')
dummy = tf.Print(dummy, [cross_entropy], 'cross_entropy: ')
dummy = tf.Print(dummy, [expected_log_likelhood], 'expected_log_likelhood: ')
#dummy = tf.Print(dummy, [inference.q_means_u], 'self.q_means_u: ')
#dummy = tf.Print(dummy, [inference.q_covars_u], 'self.q_covars_u: ')
#dummy = tf.Print(dummy, [inference.q_means_v], 'self.q_means_v: ')
#dummy = tf.Print(dummy, [inference.q_covars_v], 'self.q_covars_v: ')
return dummy
def matrix_conditions(session, inference):
for j in range(inference.num_latent):
k_j = inference.kern_f[j]
K_zz_f = k_j.kernel(inference.inducing_locations, inference.inducing_locations, jitter=True)
mat = K_zz_f.eval(session=session)
cond = np.linalg.cond(mat)
sigma = k_j.sigma.eval(session=session)
ls = k_j.length_scales.eval(session=session)
print('MATRIX CONDITION F('+str(j)+'): ', cond)
print('SIGMA F('+str(j)+'): ', sigma)
print('LENGTH_SCALES F('+str(j)+'): ', ls)
print(mat)
for j in range(inference.num_latent):
for i in range(inference.num_outputs):
k_ij = inference.kern_w[i][j]
K_zz_w = k_ij.kernel(inference.inducing_locations, inference.inducing_locations, jitter=True)
mat = K_zz_w.eval(session=session)
cond = np.linalg.cond(mat)
sigma = k_ij.sigma.eval(session=session)
ls = k_ij.length_scales.eval(session=session)
print('MATRIX CONDITION W('+str(i)+','+str(j)+'): ', cond)
print('SIGMA W('+str(i)+','+str(j)+'): ', sigma)
print('LENGTH_SCALES W('+str(i)+','+str(j)+'): ', ls)
print(mat)
|
normal
|
{
"blob_id": "4758d6efde21e3b5d91f107188f24b6ddf7cbbe4",
"index": 7935,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef debug_inference(inference, dummy, entropy, cross_entropy,\n expected_log_likelhood):\n dummy = tf.Print(dummy, [entropy], 'entropy: ')\n dummy = tf.Print(dummy, [cross_entropy], 'cross_entropy: ')\n dummy = tf.Print(dummy, [expected_log_likelhood],\n 'expected_log_likelhood: ')\n return dummy\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef debug_inference(inference, dummy, entropy, cross_entropy,\n expected_log_likelhood):\n dummy = tf.Print(dummy, [entropy], 'entropy: ')\n dummy = tf.Print(dummy, [cross_entropy], 'cross_entropy: ')\n dummy = tf.Print(dummy, [expected_log_likelhood],\n 'expected_log_likelhood: ')\n return dummy\n\n\ndef matrix_conditions(session, inference):\n for j in range(inference.num_latent):\n k_j = inference.kern_f[j]\n K_zz_f = k_j.kernel(inference.inducing_locations, inference.\n inducing_locations, jitter=True)\n mat = K_zz_f.eval(session=session)\n cond = np.linalg.cond(mat)\n sigma = k_j.sigma.eval(session=session)\n ls = k_j.length_scales.eval(session=session)\n print('MATRIX CONDITION F(' + str(j) + '): ', cond)\n print('SIGMA F(' + str(j) + '): ', sigma)\n print('LENGTH_SCALES F(' + str(j) + '): ', ls)\n print(mat)\n for j in range(inference.num_latent):\n for i in range(inference.num_outputs):\n k_ij = inference.kern_w[i][j]\n K_zz_w = k_ij.kernel(inference.inducing_locations, inference.\n inducing_locations, jitter=True)\n mat = K_zz_w.eval(session=session)\n cond = np.linalg.cond(mat)\n sigma = k_ij.sigma.eval(session=session)\n ls = k_ij.length_scales.eval(session=session)\n print('MATRIX CONDITION W(' + str(i) + ',' + str(j) + '): ', cond)\n print('SIGMA W(' + str(i) + ',' + str(j) + '): ', sigma)\n print('LENGTH_SCALES W(' + str(i) + ',' + str(j) + '): ', ls)\n print(mat)\n",
"step-4": "import numpy as np\nimport tensorflow as tf\nimport math\nfrom .. import util\n\n\ndef debug_inference(inference, dummy, entropy, cross_entropy,\n expected_log_likelhood):\n dummy = tf.Print(dummy, [entropy], 'entropy: ')\n dummy = tf.Print(dummy, [cross_entropy], 'cross_entropy: ')\n dummy = tf.Print(dummy, [expected_log_likelhood],\n 'expected_log_likelhood: ')\n return dummy\n\n\ndef matrix_conditions(session, inference):\n for j in range(inference.num_latent):\n k_j = inference.kern_f[j]\n K_zz_f = k_j.kernel(inference.inducing_locations, inference.\n inducing_locations, jitter=True)\n mat = K_zz_f.eval(session=session)\n cond = np.linalg.cond(mat)\n sigma = k_j.sigma.eval(session=session)\n ls = k_j.length_scales.eval(session=session)\n print('MATRIX CONDITION F(' + str(j) + '): ', cond)\n print('SIGMA F(' + str(j) + '): ', sigma)\n print('LENGTH_SCALES F(' + str(j) + '): ', ls)\n print(mat)\n for j in range(inference.num_latent):\n for i in range(inference.num_outputs):\n k_ij = inference.kern_w[i][j]\n K_zz_w = k_ij.kernel(inference.inducing_locations, inference.\n inducing_locations, jitter=True)\n mat = K_zz_w.eval(session=session)\n cond = np.linalg.cond(mat)\n sigma = k_ij.sigma.eval(session=session)\n ls = k_ij.length_scales.eval(session=session)\n print('MATRIX CONDITION W(' + str(i) + ',' + str(j) + '): ', cond)\n print('SIGMA W(' + str(i) + ',' + str(j) + '): ', sigma)\n print('LENGTH_SCALES W(' + str(i) + ',' + str(j) + '): ', ls)\n print(mat)\n",
"step-5": "import numpy as np\nimport tensorflow as tf\nimport math\nfrom .. import util\n\ndef debug_inference(inference, dummy, entropy, cross_entropy, expected_log_likelhood):\n dummy = tf.Print(dummy, [entropy], 'entropy: ')\n dummy = tf.Print(dummy, [cross_entropy], 'cross_entropy: ')\n dummy = tf.Print(dummy, [expected_log_likelhood], 'expected_log_likelhood: ')\n #dummy = tf.Print(dummy, [inference.q_means_u], 'self.q_means_u: ')\n #dummy = tf.Print(dummy, [inference.q_covars_u], 'self.q_covars_u: ')\n #dummy = tf.Print(dummy, [inference.q_means_v], 'self.q_means_v: ')\n #dummy = tf.Print(dummy, [inference.q_covars_v], 'self.q_covars_v: ')\n\n return dummy\n\ndef matrix_conditions(session, inference):\n for j in range(inference.num_latent):\n k_j = inference.kern_f[j]\n K_zz_f = k_j.kernel(inference.inducing_locations, inference.inducing_locations, jitter=True)\n mat = K_zz_f.eval(session=session)\n cond = np.linalg.cond(mat)\n sigma = k_j.sigma.eval(session=session)\n ls = k_j.length_scales.eval(session=session)\n print('MATRIX CONDITION F('+str(j)+'): ', cond)\n print('SIGMA F('+str(j)+'): ', sigma)\n print('LENGTH_SCALES F('+str(j)+'): ', ls)\n\n print(mat)\n\n for j in range(inference.num_latent):\n for i in range(inference.num_outputs):\n k_ij = inference.kern_w[i][j]\n K_zz_w = k_ij.kernel(inference.inducing_locations, inference.inducing_locations, jitter=True)\n mat = K_zz_w.eval(session=session)\n cond = np.linalg.cond(mat)\n sigma = k_ij.sigma.eval(session=session)\n ls = k_ij.length_scales.eval(session=session)\n print('MATRIX CONDITION W('+str(i)+','+str(j)+'): ', cond)\n print('SIGMA W('+str(i)+','+str(j)+'): ', sigma)\n print('LENGTH_SCALES W('+str(i)+','+str(j)+'): ', ls)\n print(mat)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#########################################################
# Author: Todd A. Reisel
# Date: 2/24/2003
# Class: StaticTemplateList
#########################################################
from BaseClasses.TemplateList import *;
class StaticTemplateList(TemplateList):
def __init__(self, viewMode = None):
TemplateList.__init__(self, viewMode);
def getList(self):
return [ ["graphical", "interface.html"], ["ada", "interface.html"] ];
def getFeatureName(self):
return "static";
|
normal
|
{
"blob_id": "7de3c0ab2e7c8ac00d37f1dfb5948027cfa7806c",
"index": 5084,
"step-1": "<mask token>\n\n\nclass StaticTemplateList(TemplateList):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass StaticTemplateList(TemplateList):\n\n def __init__(self, viewMode=None):\n TemplateList.__init__(self, viewMode)\n <mask token>\n\n def getFeatureName(self):\n return 'static'\n",
"step-3": "<mask token>\n\n\nclass StaticTemplateList(TemplateList):\n\n def __init__(self, viewMode=None):\n TemplateList.__init__(self, viewMode)\n\n def getList(self):\n return [['graphical', 'interface.html'], ['ada', 'interface.html']]\n\n def getFeatureName(self):\n return 'static'\n",
"step-4": "from BaseClasses.TemplateList import *\n\n\nclass StaticTemplateList(TemplateList):\n\n def __init__(self, viewMode=None):\n TemplateList.__init__(self, viewMode)\n\n def getList(self):\n return [['graphical', 'interface.html'], ['ada', 'interface.html']]\n\n def getFeatureName(self):\n return 'static'\n",
"step-5": "#########################################################\n# Author: Todd A. Reisel\n# Date: 2/24/2003\n# Class: StaticTemplateList\n#########################################################\n\nfrom BaseClasses.TemplateList import *;\n\nclass StaticTemplateList(TemplateList):\n def __init__(self, viewMode = None):\n TemplateList.__init__(self, viewMode);\n \n def getList(self):\n return [ [\"graphical\", \"interface.html\"], [\"ada\", \"interface.html\"] ];\n \n def getFeatureName(self):\n return \"static\";\n \n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# 데이터 출처: kaggle
# 데이터 개요: 511, 유리를 위한 다양한 속성(화학원소)들로부터 type 구별
# 데이터 예측 모델: 이진클래스
# 적용 머신러닝 모델: 깊은 다층 퍼셉트론 신경망
# 훈련 데이터셋: 160건
# 검증 데이터셋: 건
# 시험 데이터셋: 수집데이터로서 시험셋을 확보할 수 없으므로 고려하지 않음
# 입력 데이터: 10개 항목의 데이터
# 은닉층: 2개
# 사용한 활성화 함수
# - 제1 은닉층: Relu
# - 제2 은닉층: Relu
# - Output Layer: Softmax
# 사용한 손실함수: categorical_crossentropy
# 사용한 Optimizer: rmsprop
# Tensorflow 버전: 2.0.0
# 파이썬버전: 3.7.4
import pandas as pd
from datetime import datetime
from sklearn.model_selection import train_test_split
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.utils import to_categorical
np.random.seed(5)
match_dic={}
zoo_class = pd.read_csv('zoo.csv',sep=',',header=0)
zoo_class.columns = zoo_class.columns.str.replace(' ','_')
# 전체 독립변수 식별
input_data_header = list(zoo_class.columns.difference(["animal_name","class_type"]))
input_data_number = len(input_data_header)
label = zoo_class["class_type"]
start_time = datetime.now()
train_data, test_data, train_label, test_label = train_test_split(zoo_class[input_data_header],label)
train_label = to_categorical(train_label, num_classes=7)
test_label = to_categorical(test_label, num_classes=7)
# 훈련셋과 시험셋 불러오기
# x_train = x_train.reshape(60000, width * height).astype('float32') / 255.0
# x_test = x_test.reshape(10000, width * height).astype('float32') / 255.0
# 모델 구성하기
model = Sequential()
model.add(Dense(64, input_dim=input_data_number, activation='relu'))
model.add(Dense(64, activation='relu'))
# model.add(Dense(6, activation='sigmoid'))
model.add(Dense(7, activation='softmax'))
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])
# 4. 모델 학습시키기
hist = model.fit(train_data, train_label, epochs=20000, batch_size=64, validation_data=(test_data, test_label))
# hist = model.fit(train_data, train_label, epochs=1000, batch_size=64)
end_time = datetime.now()
# 5. 학습과정 살펴보기
import matplotlib.pyplot as plt
fig, loss_ax = plt.subplots()
acc_ax = loss_ax.twinx()
loss_ax.plot(hist.history['loss'], 'y', label='train loss')
loss_ax.plot(hist.history['val_loss'], 'r', label='val loss')
# acc_ax.plot(hist.history['acc'], 'b', label='train acc')
acc_ax.plot(hist.history['accuracy'], 'b', label='train acc')
# acc_ax.plot(hist.history['val_acc'], 'g', label='val acc')
acc_ax.plot(hist.history['val_accuracy'],'g', label='val acc')
loss_ax.set_xlabel('epoch')
loss_ax.set_ylabel('loss')
acc_ax.set_ylabel('accuray')
loss_ax.legend(loc='upper left')
acc_ax.legend(loc='lower left')
plt.show()
# 6. 모델 평가하기
loss_and_metrics = model.evaluate(test_data, test_label, batch_size=32)
print('loss_and_metrics : ' + str(loss_and_metrics))
scores = model.evaluate(test_data, test_label)
print("%s: %.2f%%"%(model.metrics_names[1],scores[1]*100))
|
normal
|
{
"blob_id": "bfa5739949c26758e3762fcff8347d23ad70f704",
"index": 6114,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.random.seed(5)\n<mask token>\nmodel.add(Dense(64, input_dim=input_data_number, activation='relu'))\nmodel.add(Dense(64, activation='relu'))\nmodel.add(Dense(7, activation='softmax'))\nmodel.compile(optimizer='adam', loss='categorical_crossentropy', metrics=[\n 'accuracy'])\n<mask token>\nloss_ax.plot(hist.history['loss'], 'y', label='train loss')\nloss_ax.plot(hist.history['val_loss'], 'r', label='val loss')\nacc_ax.plot(hist.history['accuracy'], 'b', label='train acc')\nacc_ax.plot(hist.history['val_accuracy'], 'g', label='val acc')\nloss_ax.set_xlabel('epoch')\nloss_ax.set_ylabel('loss')\nacc_ax.set_ylabel('accuray')\nloss_ax.legend(loc='upper left')\nacc_ax.legend(loc='lower left')\nplt.show()\n<mask token>\nprint('loss_and_metrics : ' + str(loss_and_metrics))\n<mask token>\nprint('%s: %.2f%%' % (model.metrics_names[1], scores[1] * 100))\n",
"step-3": "<mask token>\nnp.random.seed(5)\nmatch_dic = {}\nzoo_class = pd.read_csv('zoo.csv', sep=',', header=0)\nzoo_class.columns = zoo_class.columns.str.replace(' ', '_')\ninput_data_header = list(zoo_class.columns.difference(['animal_name',\n 'class_type']))\ninput_data_number = len(input_data_header)\nlabel = zoo_class['class_type']\nstart_time = datetime.now()\ntrain_data, test_data, train_label, test_label = train_test_split(zoo_class\n [input_data_header], label)\ntrain_label = to_categorical(train_label, num_classes=7)\ntest_label = to_categorical(test_label, num_classes=7)\nmodel = Sequential()\nmodel.add(Dense(64, input_dim=input_data_number, activation='relu'))\nmodel.add(Dense(64, activation='relu'))\nmodel.add(Dense(7, activation='softmax'))\nmodel.compile(optimizer='adam', loss='categorical_crossentropy', metrics=[\n 'accuracy'])\nhist = model.fit(train_data, train_label, epochs=20000, batch_size=64,\n validation_data=(test_data, test_label))\nend_time = datetime.now()\n<mask token>\nfig, loss_ax = plt.subplots()\nacc_ax = loss_ax.twinx()\nloss_ax.plot(hist.history['loss'], 'y', label='train loss')\nloss_ax.plot(hist.history['val_loss'], 'r', label='val loss')\nacc_ax.plot(hist.history['accuracy'], 'b', label='train acc')\nacc_ax.plot(hist.history['val_accuracy'], 'g', label='val acc')\nloss_ax.set_xlabel('epoch')\nloss_ax.set_ylabel('loss')\nacc_ax.set_ylabel('accuray')\nloss_ax.legend(loc='upper left')\nacc_ax.legend(loc='lower left')\nplt.show()\nloss_and_metrics = model.evaluate(test_data, test_label, batch_size=32)\nprint('loss_and_metrics : ' + str(loss_and_metrics))\nscores = model.evaluate(test_data, test_label)\nprint('%s: %.2f%%' % (model.metrics_names[1], scores[1] * 100))\n",
"step-4": "import pandas as pd\nfrom datetime import datetime\nfrom sklearn.model_selection import train_test_split\nimport numpy as np\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.utils import to_categorical\nnp.random.seed(5)\nmatch_dic = {}\nzoo_class = pd.read_csv('zoo.csv', sep=',', header=0)\nzoo_class.columns = zoo_class.columns.str.replace(' ', '_')\ninput_data_header = list(zoo_class.columns.difference(['animal_name',\n 'class_type']))\ninput_data_number = len(input_data_header)\nlabel = zoo_class['class_type']\nstart_time = datetime.now()\ntrain_data, test_data, train_label, test_label = train_test_split(zoo_class\n [input_data_header], label)\ntrain_label = to_categorical(train_label, num_classes=7)\ntest_label = to_categorical(test_label, num_classes=7)\nmodel = Sequential()\nmodel.add(Dense(64, input_dim=input_data_number, activation='relu'))\nmodel.add(Dense(64, activation='relu'))\nmodel.add(Dense(7, activation='softmax'))\nmodel.compile(optimizer='adam', loss='categorical_crossentropy', metrics=[\n 'accuracy'])\nhist = model.fit(train_data, train_label, epochs=20000, batch_size=64,\n validation_data=(test_data, test_label))\nend_time = datetime.now()\nimport matplotlib.pyplot as plt\nfig, loss_ax = plt.subplots()\nacc_ax = loss_ax.twinx()\nloss_ax.plot(hist.history['loss'], 'y', label='train loss')\nloss_ax.plot(hist.history['val_loss'], 'r', label='val loss')\nacc_ax.plot(hist.history['accuracy'], 'b', label='train acc')\nacc_ax.plot(hist.history['val_accuracy'], 'g', label='val acc')\nloss_ax.set_xlabel('epoch')\nloss_ax.set_ylabel('loss')\nacc_ax.set_ylabel('accuray')\nloss_ax.legend(loc='upper left')\nacc_ax.legend(loc='lower left')\nplt.show()\nloss_and_metrics = model.evaluate(test_data, test_label, batch_size=32)\nprint('loss_and_metrics : ' + str(loss_and_metrics))\nscores = model.evaluate(test_data, test_label)\nprint('%s: %.2f%%' % (model.metrics_names[1], scores[1] * 100))\n",
"step-5": "# 데이터 출처: kaggle\n# 데이터 개요: 511, 유리를 위한 다양한 속성(화학원소)들로부터 type 구별\n# 데이터 예측 모델: 이진클래스\n# 적용 머신러닝 모델: 깊은 다층 퍼셉트론 신경망\n# 훈련 데이터셋: 160건\n# 검증 데이터셋: 건\n# 시험 데이터셋: 수집데이터로서 시험셋을 확보할 수 없으므로 고려하지 않음\n# 입력 데이터: 10개 항목의 데이터\n# 은닉층: 2개\n# 사용한 활성화 함수\n# - 제1 은닉층: Relu\n# - 제2 은닉층: Relu\n# - Output Layer: Softmax\n# 사용한 손실함수: categorical_crossentropy\n# 사용한 Optimizer: rmsprop\n# Tensorflow 버전: 2.0.0\n# 파이썬버전: 3.7.4\n\nimport pandas as pd\nfrom datetime import datetime\nfrom sklearn.model_selection import train_test_split\nimport numpy as np\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.utils import to_categorical\n\nnp.random.seed(5)\nmatch_dic={}\n\nzoo_class = pd.read_csv('zoo.csv',sep=',',header=0)\nzoo_class.columns = zoo_class.columns.str.replace(' ','_')\n\n\n# 전체 독립변수 식별\ninput_data_header = list(zoo_class.columns.difference([\"animal_name\",\"class_type\"]))\ninput_data_number = len(input_data_header)\nlabel = zoo_class[\"class_type\"]\n\nstart_time = datetime.now()\n\ntrain_data, test_data, train_label, test_label = train_test_split(zoo_class[input_data_header],label)\ntrain_label = to_categorical(train_label, num_classes=7)\ntest_label = to_categorical(test_label, num_classes=7)\n\n# 훈련셋과 시험셋 불러오기\n# x_train = x_train.reshape(60000, width * height).astype('float32') / 255.0\n# x_test = x_test.reshape(10000, width * height).astype('float32') / 255.0\n\n# 모델 구성하기\nmodel = Sequential()\nmodel.add(Dense(64, input_dim=input_data_number, activation='relu'))\nmodel.add(Dense(64, activation='relu'))\n# model.add(Dense(6, activation='sigmoid'))\nmodel.add(Dense(7, activation='softmax'))\n\nmodel.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n# model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])\n\n# 4. 모델 학습시키기\nhist = model.fit(train_data, train_label, epochs=20000, batch_size=64, validation_data=(test_data, test_label))\n# hist = model.fit(train_data, train_label, epochs=1000, batch_size=64)\n\nend_time = datetime.now()\n\n# 5. 학습과정 살펴보기\nimport matplotlib.pyplot as plt\n\nfig, loss_ax = plt.subplots()\n\nacc_ax = loss_ax.twinx()\n\nloss_ax.plot(hist.history['loss'], 'y', label='train loss')\nloss_ax.plot(hist.history['val_loss'], 'r', label='val loss')\n\n# acc_ax.plot(hist.history['acc'], 'b', label='train acc')\nacc_ax.plot(hist.history['accuracy'], 'b', label='train acc')\n# acc_ax.plot(hist.history['val_acc'], 'g', label='val acc')\nacc_ax.plot(hist.history['val_accuracy'],'g', label='val acc')\n\nloss_ax.set_xlabel('epoch')\nloss_ax.set_ylabel('loss')\nacc_ax.set_ylabel('accuray')\n\nloss_ax.legend(loc='upper left')\nacc_ax.legend(loc='lower left')\n\nplt.show()\n\n# 6. 모델 평가하기\nloss_and_metrics = model.evaluate(test_data, test_label, batch_size=32)\nprint('loss_and_metrics : ' + str(loss_and_metrics))\n\nscores = model.evaluate(test_data, test_label)\nprint(\"%s: %.2f%%\"%(model.metrics_names[1],scores[1]*100))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
plik=open("nowy_zad_84.txt", "w")
print(" Podaj 5 imion")
for i in range(1,6):
imie=input(f" Podaj imie nr {i} ")
# plik.write(imie)
# plik.write("\n")
plik.write(f" {imie} \n")
plik.close()
plik=open("nowy_zad_84.txt", "a")
for i in range(1,101):
plik.write(str(i))
plik.write("\n")
plik.close()
|
normal
|
{
"blob_id": "0ac99e2b33f676a99674c9a8e5d9d47c5bce084b",
"index": 5820,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(' Podaj 5 imion')\nfor i in range(1, 6):\n imie = input(f' Podaj imie nr {i} ')\n plik.write(f' {imie} \\n')\nplik.close()\n<mask token>\nfor i in range(1, 101):\n plik.write(str(i))\n plik.write('\\n')\nplik.close()\n",
"step-3": "plik = open('nowy_zad_84.txt', 'w')\nprint(' Podaj 5 imion')\nfor i in range(1, 6):\n imie = input(f' Podaj imie nr {i} ')\n plik.write(f' {imie} \\n')\nplik.close()\nplik = open('nowy_zad_84.txt', 'a')\nfor i in range(1, 101):\n plik.write(str(i))\n plik.write('\\n')\nplik.close()\n",
"step-4": "\r\n\r\nplik=open(\"nowy_zad_84.txt\", \"w\")\r\n\r\nprint(\" Podaj 5 imion\")\r\nfor i in range(1,6):\r\n imie=input(f\" Podaj imie nr {i} \")\r\n # plik.write(imie)\r\n # plik.write(\"\\n\")\r\n plik.write(f\" {imie} \\n\")\r\n\r\nplik.close()\r\n\r\nplik=open(\"nowy_zad_84.txt\", \"a\")\r\n\r\nfor i in range(1,101):\r\n plik.write(str(i))\r\n plik.write(\"\\n\")\r\n\r\nplik.close()\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import hashlib
import argparse
def hashfile(path, blocksize=65536):
afile = open(path, 'rb')
hasher = hashlib.md5()
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
afile.close()
return hasher.hexdigest()
def make_duplicate_list(filepath):
unique_hashes = {}
duplicate_files = {}
for dir_name, subdir_list, file_list in os.walk(filepath):
for filename in file_list:
path = os.path.join(dir_name, filename)
file_hash = hashfile(path)
if file_hash in unique_hashes:
if file_hash not in duplicate_files:
# More than 2 duplicate files with same hash can exist,
# so list of filepaths is created.
duplicate_files[file_hash] = []
duplicate_files[file_hash].append(unique_hashes[file_hash])
duplicate_files[file_hash].append(path)
else:
unique_hashes[file_hash] = path
return duplicate_files
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="duplicates detector")
parser.add_argument("path_to_folder",
help="path to folder containig duplicates")
args = parser.parse_args()
path = args.path_to_folder
duplicates = make_duplicate_list(path)
for idx, (key, value) in enumerate(duplicates.items(), 1):
print("{}) {} files with {} MD5 hash were " +
"found:".format(idx, len(value), key))
for idx, folder in enumerate(value, 1):
print(" {}. {}".format(idx, folder))
|
normal
|
{
"blob_id": "e99c158e54fd86b00e4e045e7fb28d961089800d",
"index": 3289,
"step-1": "<mask token>\n\n\ndef hashfile(path, blocksize=65536):\n afile = open(path, 'rb')\n hasher = hashlib.md5()\n buf = afile.read(blocksize)\n while len(buf) > 0:\n hasher.update(buf)\n buf = afile.read(blocksize)\n afile.close()\n return hasher.hexdigest()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef hashfile(path, blocksize=65536):\n afile = open(path, 'rb')\n hasher = hashlib.md5()\n buf = afile.read(blocksize)\n while len(buf) > 0:\n hasher.update(buf)\n buf = afile.read(blocksize)\n afile.close()\n return hasher.hexdigest()\n\n\ndef make_duplicate_list(filepath):\n unique_hashes = {}\n duplicate_files = {}\n for dir_name, subdir_list, file_list in os.walk(filepath):\n for filename in file_list:\n path = os.path.join(dir_name, filename)\n file_hash = hashfile(path)\n if file_hash in unique_hashes:\n if file_hash not in duplicate_files:\n duplicate_files[file_hash] = []\n duplicate_files[file_hash].append(unique_hashes[file_hash])\n duplicate_files[file_hash].append(path)\n else:\n unique_hashes[file_hash] = path\n return duplicate_files\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hashfile(path, blocksize=65536):\n afile = open(path, 'rb')\n hasher = hashlib.md5()\n buf = afile.read(blocksize)\n while len(buf) > 0:\n hasher.update(buf)\n buf = afile.read(blocksize)\n afile.close()\n return hasher.hexdigest()\n\n\ndef make_duplicate_list(filepath):\n unique_hashes = {}\n duplicate_files = {}\n for dir_name, subdir_list, file_list in os.walk(filepath):\n for filename in file_list:\n path = os.path.join(dir_name, filename)\n file_hash = hashfile(path)\n if file_hash in unique_hashes:\n if file_hash not in duplicate_files:\n duplicate_files[file_hash] = []\n duplicate_files[file_hash].append(unique_hashes[file_hash])\n duplicate_files[file_hash].append(path)\n else:\n unique_hashes[file_hash] = path\n return duplicate_files\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='duplicates detector')\n parser.add_argument('path_to_folder', help=\n 'path to folder containig duplicates')\n args = parser.parse_args()\n path = args.path_to_folder\n duplicates = make_duplicate_list(path)\n for idx, (key, value) in enumerate(duplicates.items(), 1):\n print('{}) {} files with {} MD5 hash were ' + 'found:'.format(idx,\n len(value), key))\n for idx, folder in enumerate(value, 1):\n print(' {}. {}'.format(idx, folder))\n",
"step-4": "import os\nimport hashlib\nimport argparse\n\n\ndef hashfile(path, blocksize=65536):\n afile = open(path, 'rb')\n hasher = hashlib.md5()\n buf = afile.read(blocksize)\n while len(buf) > 0:\n hasher.update(buf)\n buf = afile.read(blocksize)\n afile.close()\n return hasher.hexdigest()\n\n\ndef make_duplicate_list(filepath):\n unique_hashes = {}\n duplicate_files = {}\n for dir_name, subdir_list, file_list in os.walk(filepath):\n for filename in file_list:\n path = os.path.join(dir_name, filename)\n file_hash = hashfile(path)\n if file_hash in unique_hashes:\n if file_hash not in duplicate_files:\n duplicate_files[file_hash] = []\n duplicate_files[file_hash].append(unique_hashes[file_hash])\n duplicate_files[file_hash].append(path)\n else:\n unique_hashes[file_hash] = path\n return duplicate_files\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='duplicates detector')\n parser.add_argument('path_to_folder', help=\n 'path to folder containig duplicates')\n args = parser.parse_args()\n path = args.path_to_folder\n duplicates = make_duplicate_list(path)\n for idx, (key, value) in enumerate(duplicates.items(), 1):\n print('{}) {} files with {} MD5 hash were ' + 'found:'.format(idx,\n len(value), key))\n for idx, folder in enumerate(value, 1):\n print(' {}. {}'.format(idx, folder))\n",
"step-5": "import os\nimport hashlib\nimport argparse\n\n\ndef hashfile(path, blocksize=65536):\n afile = open(path, 'rb')\n hasher = hashlib.md5()\n buf = afile.read(blocksize)\n while len(buf) > 0:\n hasher.update(buf)\n buf = afile.read(blocksize)\n afile.close()\n return hasher.hexdigest()\n\n\ndef make_duplicate_list(filepath):\n unique_hashes = {}\n duplicate_files = {}\n for dir_name, subdir_list, file_list in os.walk(filepath):\n for filename in file_list:\n path = os.path.join(dir_name, filename)\n file_hash = hashfile(path)\n if file_hash in unique_hashes:\n if file_hash not in duplicate_files:\n # More than 2 duplicate files with same hash can exist,\n # so list of filepaths is created.\n duplicate_files[file_hash] = []\n duplicate_files[file_hash].append(unique_hashes[file_hash])\n duplicate_files[file_hash].append(path)\n else:\n unique_hashes[file_hash] = path\n return duplicate_files\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\"duplicates detector\")\n parser.add_argument(\"path_to_folder\",\n help=\"path to folder containig duplicates\")\n args = parser.parse_args()\n path = args.path_to_folder\n duplicates = make_duplicate_list(path)\n for idx, (key, value) in enumerate(duplicates.items(), 1):\n print(\"{}) {} files with {} MD5 hash were \" +\n \"found:\".format(idx, len(value), key))\n for idx, folder in enumerate(value, 1):\n print(\" {}. {}\".format(idx, folder))\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
'''
文件读写的步骤
1.打开文件
2.处理数据
3.关闭文件
1.open函数:
fileobj = open(filename, mode)
fileobj是open()函数返回的文件对象
mode第一个字母指明文件类型和操作的字符串,第二个字母是文件类型:
t(可省略)文本类型,b二进制类型。
文件打开模式:r只读(默认),w覆盖写(不存在则新创建)
a追加模式(不存在则创建)
2.read(size):从文件读取长度为size的字符串,若未给定或为负则读取所有内容
3.readline():读取整行返回字符串
4.readlines():读取所有行并返回列表
5.write(s):把字符串s的内容写入文件
'''
'''
#复制一个文件
fileobj1 = open("test1.txt", "r")
fileobj2 = open("test2.txt", "w")
s = fileobj1.read()
fileobj2.write(s)
fileobj1.close()
fileobj2.close()
'''
#多行文件读写
fileobj3 = open("lines.txt", "r")
for line in fileobj3.readlines():
print(line)
fileobj3.close()
|
normal
|
{
"blob_id": "25f3c9f48b779d2aec260d529529156ff3c508ca",
"index": 7719,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in fileobj3.readlines():\n print(line)\nfileobj3.close()\n",
"step-3": "<mask token>\nfileobj3 = open('lines.txt', 'r')\nfor line in fileobj3.readlines():\n print(line)\nfileobj3.close()\n",
"step-4": "'''\n文件读写的步骤\n 1.打开文件\n 2.处理数据\n 3.关闭文件\n1.open函数:\n fileobj = open(filename, mode)\n fileobj是open()函数返回的文件对象\n mode第一个字母指明文件类型和操作的字符串,第二个字母是文件类型:\n t(可省略)文本类型,b二进制类型。\n 文件打开模式:r只读(默认),w覆盖写(不存在则新创建)\n a追加模式(不存在则创建)\n2.read(size):从文件读取长度为size的字符串,若未给定或为负则读取所有内容\n3.readline():读取整行返回字符串\n4.readlines():读取所有行并返回列表\n5.write(s):把字符串s的内容写入文件\n'''\n'''\n#复制一个文件\nfileobj1 = open(\"test1.txt\", \"r\")\nfileobj2 = open(\"test2.txt\", \"w\")\ns = fileobj1.read()\nfileobj2.write(s)\nfileobj1.close()\nfileobj2.close()\n'''\n\n#多行文件读写\nfileobj3 = open(\"lines.txt\", \"r\")\nfor line in fileobj3.readlines():\n print(line)\nfileobj3.close()",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from scrapy import cmdline
cmdline.execute("scrapy crawl rapo.com".split())
|
normal
|
{
"blob_id": "326f1b5bee8f488382a76fcc5559f4ea13734f21",
"index": 6551,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncmdline.execute('scrapy crawl rapo.com'.split())\n",
"step-3": "from scrapy import cmdline\ncmdline.execute('scrapy crawl rapo.com'.split())\n",
"step-4": "from scrapy import cmdline\ncmdline.execute(\"scrapy crawl rapo.com\".split())\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from linebot import LineBotApi, WebhookParser
from linebot.exceptions import InvalidSignatureError, LineBotApiError
from linebot.models import MessageEvent, TextMessage
from module import func
from urllib.parse import parse_qsl
from func5api.models import users
from django.shortcuts import render
line_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)
parser = WebhookParser(settings.LINE_CHANNEL_SECRET)
@csrf_exempt
def callback(request):
if request.method == 'POST':
signature = request.META['HTTP_X_LINE_SIGNATURE']
body = request.body.decode('utf-8')
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
return HttpResponseForbidden()
except LineBotApiError:
return HttpResponseBadRequest()
for event in events:
if isinstance(event, MessageEvent):
user_id = event.source.user_id #取得user_id
if not(users.objects.filter(uid = user_id).exists()): #將user_id存入資料庫中
unit = users.objects.create(uid = user_id)
unit.save() #將user_id上傳至資料庫
if isinstance(event.message, TextMessage):
mtext = event.message.text
if mtext == '@修繕申請':
func.sendFix(event, user_id)
elif mtext =='@修繕查詢':
func.fix_inquire(event, user_id)
elif mtext == 'admin_mode':
func.judge(event, mtext, user_id)
elif mtext[:6] == '123456' and len(mtext) > 6: #all
func.judge(event, mtext, user_id)
elif mtext[:2] == '++' and len(mtext) > 2: #specify
func.judge(event, mtext, user_id)
elif mtext[:2] == '##' and len(mtext) > 2:
func.manageForm(event, mtext, user_id)
elif mtext[:3] == '!!!' and len(mtext) > 3:
func.personData(event, mtext, user_id)
return HttpResponse()
else:
return HttpResponseBadRequest()
def listall(request):
user = users.objects.all().order_by('name')
return render(request, "listall.html", locals())
|
normal
|
{
"blob_id": "19f202c32e1cf9f7ab2663827f1f98080f70b83e",
"index": 8313,
"step-1": "<mask token>\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n",
"step-3": "<mask token>\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n",
"step-4": "from django.conf import settings\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\nfrom django.views.decorators.csrf import csrf_exempt\nfrom linebot import LineBotApi, WebhookParser\nfrom linebot.exceptions import InvalidSignatureError, LineBotApiError\nfrom linebot.models import MessageEvent, TextMessage\nfrom module import func\nfrom urllib.parse import parse_qsl\nfrom func5api.models import users\nfrom django.shortcuts import render\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\n\n\n@csrf_exempt\ndef callback(request):\n if request.method == 'POST':\n signature = request.META['HTTP_X_LINE_SIGNATURE']\n body = request.body.decode('utf-8')\n try:\n events = parser.parse(body, signature)\n except InvalidSignatureError:\n return HttpResponseForbidden()\n except LineBotApiError:\n return HttpResponseBadRequest()\n for event in events:\n if isinstance(event, MessageEvent):\n user_id = event.source.user_id\n if not users.objects.filter(uid=user_id).exists():\n unit = users.objects.create(uid=user_id)\n unit.save()\n if isinstance(event.message, TextMessage):\n mtext = event.message.text\n if mtext == '@修繕申請':\n func.sendFix(event, user_id)\n elif mtext == '@修繕查詢':\n func.fix_inquire(event, user_id)\n elif mtext == 'admin_mode':\n func.judge(event, mtext, user_id)\n elif mtext[:6] == '123456' and len(mtext) > 6:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '++' and len(mtext) > 2:\n func.judge(event, mtext, user_id)\n elif mtext[:2] == '##' and len(mtext) > 2:\n func.manageForm(event, mtext, user_id)\n elif mtext[:3] == '!!!' and len(mtext) > 3:\n func.personData(event, mtext, user_id)\n return HttpResponse()\n else:\n return HttpResponseBadRequest()\n\n\ndef listall(request):\n user = users.objects.all().order_by('name')\n return render(request, 'listall.html', locals())\n",
"step-5": "from django.conf import settings\r\nfrom django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden\r\nfrom django.views.decorators.csrf import csrf_exempt\r\n\r\nfrom linebot import LineBotApi, WebhookParser\r\nfrom linebot.exceptions import InvalidSignatureError, LineBotApiError\r\nfrom linebot.models import MessageEvent, TextMessage\r\nfrom module import func\r\nfrom urllib.parse import parse_qsl\r\nfrom func5api.models import users\r\nfrom django.shortcuts import render\r\n\r\nline_bot_api = LineBotApi(settings.LINE_CHANNEL_ACCESS_TOKEN)\r\nparser = WebhookParser(settings.LINE_CHANNEL_SECRET)\r\n\r\n@csrf_exempt\r\ndef callback(request):\r\n if request.method == 'POST':\r\n signature = request.META['HTTP_X_LINE_SIGNATURE']\r\n body = request.body.decode('utf-8')\r\n try:\r\n events = parser.parse(body, signature)\r\n except InvalidSignatureError:\r\n return HttpResponseForbidden()\r\n except LineBotApiError:\r\n return HttpResponseBadRequest()\r\n\r\n for event in events:\r\n if isinstance(event, MessageEvent):\r\n user_id = event.source.user_id #取得user_id\r\n if not(users.objects.filter(uid = user_id).exists()): #將user_id存入資料庫中\r\n unit = users.objects.create(uid = user_id)\r\n unit.save() #將user_id上傳至資料庫\r\n if isinstance(event.message, TextMessage):\r\n mtext = event.message.text\r\n if mtext == '@修繕申請':\r\n func.sendFix(event, user_id)\r\n elif mtext =='@修繕查詢':\r\n func.fix_inquire(event, user_id)\r\n elif mtext == 'admin_mode':\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:6] == '123456' and len(mtext) > 6: #all\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:2] == '++' and len(mtext) > 2: #specify\r\n func.judge(event, mtext, user_id)\r\n elif mtext[:2] == '##' and len(mtext) > 2:\r\n func.manageForm(event, mtext, user_id)\r\n elif mtext[:3] == '!!!' and len(mtext) > 3:\r\n func.personData(event, mtext, user_id)\r\n \r\n return HttpResponse()\r\n\r\n else:\r\n return HttpResponseBadRequest()\r\n \r\ndef listall(request):\r\n user = users.objects.all().order_by('name')\r\n return render(request, \"listall.html\", locals())\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- encoding:ascii -*-
from mako import runtime, filters, cache
UNDEFINED = runtime.UNDEFINED
__M_dict_builtin = dict
__M_locals_builtin = locals
_magic_number = 6
_modified_time = 1383550959.0389481
_template_filename='templates/webapps/tool_shed/repository/browse_repository.mako'
_template_uri='/webapps/tool_shed/repository/browse_repository.mako'
_template_cache=cache.Cache(__name__, _modified_time)
_source_encoding='ascii'
_exports = ['stylesheets', 'javascripts']
# SOURCE LINE 7
def inherit(context):
if context.get('use_panels'):
return '/webapps/tool_shed/base_panels.mako'
else:
return '/base.mako'
def _mako_get_namespace(context, name):
try:
return context.namespaces[(__name__, name)]
except KeyError:
_mako_generate_namespaces(context)
return context.namespaces[(__name__, name)]
def _mako_generate_namespaces(context):
# SOURCE LINE 2
ns = runtime.TemplateNamespace('__anon_0x88e2e50', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x88e2e50')] = ns
# SOURCE LINE 4
ns = runtime.TemplateNamespace('__anon_0x7ee9750', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/common.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x7ee9750')] = ns
# SOURCE LINE 5
ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/repository/common.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x8a2fd90')] = ns
# SOURCE LINE 3
ns = runtime.TemplateNamespace('__anon_0x88e21d0', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/repository_actions_menu.mako', callables=None, calling_uri=_template_uri)
context.namespaces[(__name__, '__anon_0x88e21d0')] = ns
def _mako_inherit(template, context):
_mako_generate_namespaces(context)
return runtime._inherit_from(context, (inherit(context)), _template_uri)
def render_body(context,**pageargs):
context.caller_stack._push_frame()
try:
__M_locals = __M_dict_builtin(pageargs=pageargs)
_import_ns = {}
_mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])
_mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])
status = _import_ns.get('status', context.get('status', UNDEFINED))
render_clone_str = _import_ns.get('render_clone_str', context.get('render_clone_str', UNDEFINED))
render_repository_type_select_field = _import_ns.get('render_repository_type_select_field', context.get('render_repository_type_select_field', UNDEFINED))
render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED))
repository = _import_ns.get('repository', context.get('repository', UNDEFINED))
h = _import_ns.get('h', context.get('h', UNDEFINED))
render_tool_shed_repository_actions = _import_ns.get('render_tool_shed_repository_actions', context.get('render_tool_shed_repository_actions', UNDEFINED))
is_malicious = _import_ns.get('is_malicious', context.get('is_malicious', UNDEFINED))
repository_type_select_field = _import_ns.get('repository_type_select_field', context.get('repository_type_select_field', UNDEFINED))
commit_message = _import_ns.get('commit_message', context.get('commit_message', UNDEFINED))
message = _import_ns.get('message', context.get('message', UNDEFINED))
trans = _import_ns.get('trans', context.get('trans', UNDEFINED))
__M_writer = context.writer()
# SOURCE LINE 1
__M_writer(u'\n')
# SOURCE LINE 2
__M_writer(u'\n')
# SOURCE LINE 3
__M_writer(u'\n')
# SOURCE LINE 4
__M_writer(u'\n')
# SOURCE LINE 5
__M_writer(u'\n\n')
# SOURCE LINE 13
__M_writer(u'\n')
# SOURCE LINE 14
__M_writer(u'\n\n')
# SOURCE LINE 19
__M_writer(u'\n\n')
# SOURCE LINE 25
__M_writer(u'\n\n')
# SOURCE LINE 27
is_new = repository.is_new( trans.app )
can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )
can_download = not is_new and ( not is_malicious or can_push )
can_browse_contents = not is_new
__M_locals_builtin_stored = __M_locals_builtin()
__M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push','can_browse_contents','is_new','can_download'] if __M_key in __M_locals_builtin_stored]))
# SOURCE LINE 32
__M_writer(u'\n\n')
# SOURCE LINE 34
__M_writer(unicode(render_tool_shed_repository_actions( repository )))
__M_writer(u'\n\n')
# SOURCE LINE 36
if message:
# SOURCE LINE 37
__M_writer(u' ')
__M_writer(unicode(render_msg( message, status )))
__M_writer(u'\n')
pass
# SOURCE LINE 39
__M_writer(u'\n')
# SOURCE LINE 40
if can_browse_contents:
# SOURCE LINE 41
__M_writer(u' <div class="toolForm">\n <div class="toolFormTitle">Repository \'')
# SOURCE LINE 42
__M_writer(filters.html_escape(unicode(repository.name )))
__M_writer(u"' revision ")
__M_writer(filters.html_escape(unicode(repository.tip( trans.app ) )))
__M_writer(u' (repository tip)</div>\n')
# SOURCE LINE 43
if can_download:
# SOURCE LINE 44
__M_writer(u' <div class="form-row">\n <label>Clone this repository:</label>\n ')
# SOURCE LINE 46
__M_writer(unicode(render_clone_str( repository )))
__M_writer(u'\n </div>\n')
pass
# SOURCE LINE 49
__M_writer(u' <form name="repository_type">\n ')
# SOURCE LINE 50
__M_writer(unicode(render_repository_type_select_field( repository_type_select_field, render_help=False )))
__M_writer(u'\n </form>\n')
# SOURCE LINE 52
if can_push:
# SOURCE LINE 53
__M_writer(u' <form name="select_files_to_delete" id="select_files_to_delete" action="')
__M_writer(unicode(h.url_for( controller='repository', action='select_files_to_delete', id=trans.security.encode_id( repository.id ))))
__M_writer(u'" method="post" >\n <div class="form-row" >\n <label>Contents:</label>\n <div id="tree" >\n Loading...\n </div>\n <div class="toolParamHelp" style="clear: both;">\n Click on a file to display it\'s contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id="selected_files_to_delete" name="selected_files_to_delete" type="hidden" value=""/>\n </div>\n <div class="form-row">\n <label>Message:</label>\n <div class="form-row-input">\n')
# SOURCE LINE 67
if commit_message:
# SOURCE LINE 68
__M_writer(u' <textarea name="commit_message" rows="3" cols="35">')
__M_writer(filters.html_escape(unicode(commit_message )))
__M_writer(u'</textarea>\n')
# SOURCE LINE 69
else:
# SOURCE LINE 70
__M_writer(u' <textarea name="commit_message" rows="3" cols="35"></textarea>\n')
pass
# SOURCE LINE 72
__M_writer(u' </div>\n <div class="toolParamHelp" style="clear: both;">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style="clear: both"></div>\n </div>\n <div class="form-row">\n <input type="submit" name="select_files_to_delete_button" value="Delete selected files"/>\n </div>\n <div class="form-row">\n <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>\n </div>\n </form>\n')
# SOURCE LINE 85
else:
# SOURCE LINE 86
__M_writer(u' <div class="toolFormBody">\n <div class="form-row" >\n <label>Contents:</label>\n <div id="tree" >\n Loading...\n </div>\n </div>\n <div class="form-row">\n <div id="file_contents" class="toolParamHelp" style="clear: both;background-color:#FAFAFA;"></div>\n </div>\n </div>\n')
pass
# SOURCE LINE 98
__M_writer(u' </div>\n <p/>\n')
pass
return ''
finally:
context.caller_stack._pop_frame()
def render_stylesheets(context):
context.caller_stack._push_frame()
try:
_import_ns = {}
_mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])
_mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])
h = _import_ns.get('h', context.get('h', UNDEFINED))
parent = _import_ns.get('parent', context.get('parent', UNDEFINED))
__M_writer = context.writer()
# SOURCE LINE 16
__M_writer(u'\n ')
# SOURCE LINE 17
__M_writer(unicode(parent.stylesheets()))
__M_writer(u'\n ')
# SOURCE LINE 18
__M_writer(unicode(h.css( "jquery.rating", "dynatree_skin/ui.dynatree" )))
__M_writer(u'\n')
return ''
finally:
context.caller_stack._pop_frame()
def render_javascripts(context):
context.caller_stack._push_frame()
try:
_import_ns = {}
_mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])
_mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])
_mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])
common_javascripts = _import_ns.get('common_javascripts', context.get('common_javascripts', UNDEFINED))
h = _import_ns.get('h', context.get('h', UNDEFINED))
repository = _import_ns.get('repository', context.get('repository', UNDEFINED))
parent = _import_ns.get('parent', context.get('parent', UNDEFINED))
__M_writer = context.writer()
# SOURCE LINE 21
__M_writer(u'\n ')
# SOURCE LINE 22
__M_writer(unicode(parent.javascripts()))
__M_writer(u'\n ')
# SOURCE LINE 23
__M_writer(unicode(h.js( "libs/jquery/jquery.rating", "libs/jquery/jquery-ui", "libs/jquery/jquery.cookie", "libs/jquery/jquery.dynatree" )))
__M_writer(u'\n ')
# SOURCE LINE 24
__M_writer(unicode(common_javascripts(repository)))
__M_writer(u'\n')
return ''
finally:
context.caller_stack._pop_frame()
|
normal
|
{
"blob_id": "fd54bbfbc81aec371ad6c82bf402a5a3673a9f24",
"index": 8892,
"step-1": "<mask token>\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\n<mask token>\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\n<mask token>\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.\n get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.js('libs/jquery/jquery.rating',\n 'libs/jquery/jquery-ui', 'libs/jquery/jquery.cookie',\n 'libs/jquery/jquery.dynatree')))\n __M_writer(u'\\n ')\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n",
"step-3": "<mask token>\n\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\ndef _mako_get_namespace(context, name):\n try:\n return context.namespaces[__name__, name]\n except KeyError:\n _mako_generate_namespaces(context)\n return context.namespaces[__name__, name]\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_stylesheets(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.stylesheets()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.css('jquery.rating', 'dynatree_skin/ui.dynatree'))\n )\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.\n get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.js('libs/jquery/jquery.rating',\n 'libs/jquery/jquery-ui', 'libs/jquery/jquery.cookie',\n 'libs/jquery/jquery.dynatree')))\n __M_writer(u'\\n ')\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n",
"step-4": "from mako import runtime, filters, cache\nUNDEFINED = runtime.UNDEFINED\n__M_dict_builtin = dict\n__M_locals_builtin = locals\n_magic_number = 6\n_modified_time = 1383550959.038948\n_template_filename = (\n 'templates/webapps/tool_shed/repository/browse_repository.mako')\n_template_uri = '/webapps/tool_shed/repository/browse_repository.mako'\n_template_cache = cache.Cache(__name__, _modified_time)\n_source_encoding = 'ascii'\n_exports = ['stylesheets', 'javascripts']\n\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\ndef _mako_get_namespace(context, name):\n try:\n return context.namespaces[__name__, name]\n except KeyError:\n _mako_generate_namespaces(context)\n return context.namespaces[__name__, name]\n\n\ndef _mako_generate_namespaces(context):\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context.\n _clean_inheritance_tokens(), templateuri=u'/message.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e2e50'] = ns\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x7ee9750'] = ns\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/repository/common.mako', callables=None,\n calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x8a2fd90'] = ns\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context.\n _clean_inheritance_tokens(), templateuri=\n u'/webapps/tool_shed/common/repository_actions_menu.mako',\n callables=None, calling_uri=_template_uri)\n context.namespaces[__name__, '__anon_0x88e21d0'] = ns\n\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, inherit(context), _template_uri)\n\n\ndef render_body(context, **pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get(\n 'render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get(\n 'render_repository_type_select_field', context.get(\n 'render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg',\n UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get(\n 'render_tool_shed_repository_actions', context.get(\n 'render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get(\n 'is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get(\n 'repository_type_select_field', context.get(\n 'repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get(\n 'commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n __M_writer(u'\\n\\n')\n is_new = repository.is_new(trans.app)\n can_push = trans.app.security_agent.can_push(trans.app, trans.user,\n repository)\n can_download = not is_new and (not is_malicious or can_push)\n can_browse_contents = not is_new\n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key,\n __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push',\n 'can_browse_contents', 'is_new', 'can_download'] if __M_key in\n __M_locals_builtin_stored]))\n __M_writer(u'\\n\\n')\n __M_writer(unicode(render_tool_shed_repository_actions(repository)))\n __M_writer(u'\\n\\n')\n if message:\n __M_writer(u' ')\n __M_writer(unicode(render_msg(message, status)))\n __M_writer(u'\\n')\n pass\n __M_writer(u'\\n')\n if can_browse_contents:\n __M_writer(\n u\"\"\" <div class=\"toolForm\">\n <div class=\"toolFormTitle\">Repository '\"\"\"\n )\n __M_writer(filters.html_escape(unicode(repository.name)))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip(trans.app))))\n __M_writer(u' (repository tip)</div>\\n')\n if can_download:\n __M_writer(\n u\"\"\" <div class=\"form-row\">\n <label>Clone this repository:</label>\n \"\"\"\n )\n __M_writer(unicode(render_clone_str(repository)))\n __M_writer(u'\\n </div>\\n')\n pass\n __M_writer(u' <form name=\"repository_type\">\\n ')\n __M_writer(unicode(render_repository_type_select_field(\n repository_type_select_field, render_help=False)))\n __M_writer(u'\\n </form>\\n')\n if can_push:\n __M_writer(\n u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"'\n )\n __M_writer(unicode(h.url_for(controller='repository',\n action='select_files_to_delete', id=trans.security.\n encode_id(repository.id))))\n __M_writer(\n u\"\"\"\" method=\"post\" >\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n Click on a file to display it's contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\n </div>\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\n </div>\n <div class=\"form-row\">\n <label>Message:</label>\n <div class=\"form-row-input\">\n\"\"\"\n )\n if commit_message:\n __M_writer(\n u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">'\n )\n __M_writer(filters.html_escape(unicode(commit_message)))\n __M_writer(u'</textarea>\\n')\n else:\n __M_writer(\n u\"\"\" <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\n\"\"\"\n )\n pass\n __M_writer(\n u\"\"\" </div>\n <div class=\"toolParamHelp\" style=\"clear: both;\">\n This is the commit message for the mercurial change set that will be created if you delete selected files.\n </div>\n <div style=\"clear: both\"></div>\n </div>\n <div class=\"form-row\">\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </form>\n\"\"\"\n )\n else:\n __M_writer(\n u\"\"\" <div class=\"toolFormBody\">\n <div class=\"form-row\" >\n <label>Contents:</label>\n <div id=\"tree\" >\n Loading...\n </div>\n </div>\n <div class=\"form-row\">\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\n </div>\n </div>\n\"\"\"\n )\n pass\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_stylesheets(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.stylesheets()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.css('jquery.rating', 'dynatree_skin/ui.dynatree'))\n )\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns,\n [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns,\n [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns,\n [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.\n get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository',\n UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n __M_writer(u'\\n ')\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n __M_writer(unicode(h.js('libs/jquery/jquery.rating',\n 'libs/jquery/jquery-ui', 'libs/jquery/jquery.cookie',\n 'libs/jquery/jquery.dynatree')))\n __M_writer(u'\\n ')\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n",
"step-5": "# -*- encoding:ascii -*-\nfrom mako import runtime, filters, cache\nUNDEFINED = runtime.UNDEFINED\n__M_dict_builtin = dict\n__M_locals_builtin = locals\n_magic_number = 6\n_modified_time = 1383550959.0389481\n_template_filename='templates/webapps/tool_shed/repository/browse_repository.mako'\n_template_uri='/webapps/tool_shed/repository/browse_repository.mako'\n_template_cache=cache.Cache(__name__, _modified_time)\n_source_encoding='ascii'\n_exports = ['stylesheets', 'javascripts']\n\n\n# SOURCE LINE 7\n\ndef inherit(context):\n if context.get('use_panels'):\n return '/webapps/tool_shed/base_panels.mako'\n else:\n return '/base.mako'\n\n\ndef _mako_get_namespace(context, name):\n try:\n return context.namespaces[(__name__, name)]\n except KeyError:\n _mako_generate_namespaces(context)\n return context.namespaces[(__name__, name)]\ndef _mako_generate_namespaces(context):\n # SOURCE LINE 2\n ns = runtime.TemplateNamespace('__anon_0x88e2e50', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x88e2e50')] = ns\n\n # SOURCE LINE 4\n ns = runtime.TemplateNamespace('__anon_0x7ee9750', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/common.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x7ee9750')] = ns\n\n # SOURCE LINE 5\n ns = runtime.TemplateNamespace('__anon_0x8a2fd90', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/repository/common.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x8a2fd90')] = ns\n\n # SOURCE LINE 3\n ns = runtime.TemplateNamespace('__anon_0x88e21d0', context._clean_inheritance_tokens(), templateuri=u'/webapps/tool_shed/common/repository_actions_menu.mako', callables=None, calling_uri=_template_uri)\n context.namespaces[(__name__, '__anon_0x88e21d0')] = ns\n\ndef _mako_inherit(template, context):\n _mako_generate_namespaces(context)\n return runtime._inherit_from(context, (inherit(context)), _template_uri)\ndef render_body(context,**pageargs):\n context.caller_stack._push_frame()\n try:\n __M_locals = __M_dict_builtin(pageargs=pageargs)\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])\n status = _import_ns.get('status', context.get('status', UNDEFINED))\n render_clone_str = _import_ns.get('render_clone_str', context.get('render_clone_str', UNDEFINED))\n render_repository_type_select_field = _import_ns.get('render_repository_type_select_field', context.get('render_repository_type_select_field', UNDEFINED))\n render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n render_tool_shed_repository_actions = _import_ns.get('render_tool_shed_repository_actions', context.get('render_tool_shed_repository_actions', UNDEFINED))\n is_malicious = _import_ns.get('is_malicious', context.get('is_malicious', UNDEFINED))\n repository_type_select_field = _import_ns.get('repository_type_select_field', context.get('repository_type_select_field', UNDEFINED))\n commit_message = _import_ns.get('commit_message', context.get('commit_message', UNDEFINED))\n message = _import_ns.get('message', context.get('message', UNDEFINED))\n trans = _import_ns.get('trans', context.get('trans', UNDEFINED))\n __M_writer = context.writer()\n # SOURCE LINE 1\n __M_writer(u'\\n')\n # SOURCE LINE 2\n __M_writer(u'\\n')\n # SOURCE LINE 3\n __M_writer(u'\\n')\n # SOURCE LINE 4\n __M_writer(u'\\n')\n # SOURCE LINE 5\n __M_writer(u'\\n\\n')\n # SOURCE LINE 13\n __M_writer(u'\\n')\n # SOURCE LINE 14\n __M_writer(u'\\n\\n')\n # SOURCE LINE 19\n __M_writer(u'\\n\\n')\n # SOURCE LINE 25\n __M_writer(u'\\n\\n')\n # SOURCE LINE 27\n\n is_new = repository.is_new( trans.app )\n can_push = trans.app.security_agent.can_push( trans.app, trans.user, repository )\n can_download = not is_new and ( not is_malicious or can_push )\n can_browse_contents = not is_new\n \n \n __M_locals_builtin_stored = __M_locals_builtin()\n __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['can_push','can_browse_contents','is_new','can_download'] if __M_key in __M_locals_builtin_stored]))\n # SOURCE LINE 32\n __M_writer(u'\\n\\n')\n # SOURCE LINE 34\n __M_writer(unicode(render_tool_shed_repository_actions( repository )))\n __M_writer(u'\\n\\n')\n # SOURCE LINE 36\n if message:\n # SOURCE LINE 37\n __M_writer(u' ')\n __M_writer(unicode(render_msg( message, status )))\n __M_writer(u'\\n')\n pass\n # SOURCE LINE 39\n __M_writer(u'\\n')\n # SOURCE LINE 40\n if can_browse_contents:\n # SOURCE LINE 41\n __M_writer(u' <div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Repository \\'')\n # SOURCE LINE 42\n __M_writer(filters.html_escape(unicode(repository.name )))\n __M_writer(u\"' revision \")\n __M_writer(filters.html_escape(unicode(repository.tip( trans.app ) )))\n __M_writer(u' (repository tip)</div>\\n')\n # SOURCE LINE 43\n if can_download:\n # SOURCE LINE 44\n __M_writer(u' <div class=\"form-row\">\\n <label>Clone this repository:</label>\\n ')\n # SOURCE LINE 46\n __M_writer(unicode(render_clone_str( repository )))\n __M_writer(u'\\n </div>\\n')\n pass\n # SOURCE LINE 49\n __M_writer(u' <form name=\"repository_type\">\\n ')\n # SOURCE LINE 50\n __M_writer(unicode(render_repository_type_select_field( repository_type_select_field, render_help=False )))\n __M_writer(u'\\n </form>\\n')\n # SOURCE LINE 52\n if can_push:\n # SOURCE LINE 53\n __M_writer(u' <form name=\"select_files_to_delete\" id=\"select_files_to_delete\" action=\"')\n __M_writer(unicode(h.url_for( controller='repository', action='select_files_to_delete', id=trans.security.encode_id( repository.id ))))\n __M_writer(u'\" method=\"post\" >\\n <div class=\"form-row\" >\\n <label>Contents:</label>\\n <div id=\"tree\" >\\n Loading...\\n </div>\\n <div class=\"toolParamHelp\" style=\"clear: both;\">\\n Click on a file to display it\\'s contents below. You may delete files from the repository by clicking the check box next to each file and clicking the <b>Delete selected files</b> button.\\n </div>\\n <input id=\"selected_files_to_delete\" name=\"selected_files_to_delete\" type=\"hidden\" value=\"\"/>\\n </div>\\n <div class=\"form-row\">\\n <label>Message:</label>\\n <div class=\"form-row-input\">\\n')\n # SOURCE LINE 67\n if commit_message:\n # SOURCE LINE 68\n __M_writer(u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\">')\n __M_writer(filters.html_escape(unicode(commit_message )))\n __M_writer(u'</textarea>\\n')\n # SOURCE LINE 69\n else:\n # SOURCE LINE 70\n __M_writer(u' <textarea name=\"commit_message\" rows=\"3\" cols=\"35\"></textarea>\\n')\n pass\n # SOURCE LINE 72\n __M_writer(u' </div>\\n <div class=\"toolParamHelp\" style=\"clear: both;\">\\n This is the commit message for the mercurial change set that will be created if you delete selected files.\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n <div class=\"form-row\">\\n <input type=\"submit\" name=\"select_files_to_delete_button\" value=\"Delete selected files\"/>\\n </div>\\n <div class=\"form-row\">\\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\\n </div>\\n </form>\\n')\n # SOURCE LINE 85\n else:\n # SOURCE LINE 86\n __M_writer(u' <div class=\"toolFormBody\">\\n <div class=\"form-row\" >\\n <label>Contents:</label>\\n <div id=\"tree\" >\\n Loading...\\n </div>\\n </div>\\n <div class=\"form-row\">\\n <div id=\"file_contents\" class=\"toolParamHelp\" style=\"clear: both;background-color:#FAFAFA;\"></div>\\n </div>\\n </div>\\n')\n pass\n # SOURCE LINE 98\n __M_writer(u' </div>\\n <p/>\\n')\n pass\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_stylesheets(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n # SOURCE LINE 16\n __M_writer(u'\\n ')\n # SOURCE LINE 17\n __M_writer(unicode(parent.stylesheets()))\n __M_writer(u'\\n ')\n # SOURCE LINE 18\n __M_writer(unicode(h.css( \"jquery.rating\", \"dynatree_skin/ui.dynatree\" )))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\ndef render_javascripts(context):\n context.caller_stack._push_frame()\n try:\n _import_ns = {}\n _mako_get_namespace(context, '__anon_0x88e2e50')._populate(_import_ns, [u'render_msg'])\n _mako_get_namespace(context, '__anon_0x7ee9750')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x8a2fd90')._populate(_import_ns, [u'*'])\n _mako_get_namespace(context, '__anon_0x88e21d0')._populate(_import_ns, [u'render_tool_shed_repository_actions'])\n common_javascripts = _import_ns.get('common_javascripts', context.get('common_javascripts', UNDEFINED))\n h = _import_ns.get('h', context.get('h', UNDEFINED))\n repository = _import_ns.get('repository', context.get('repository', UNDEFINED))\n parent = _import_ns.get('parent', context.get('parent', UNDEFINED))\n __M_writer = context.writer()\n # SOURCE LINE 21\n __M_writer(u'\\n ')\n # SOURCE LINE 22\n __M_writer(unicode(parent.javascripts()))\n __M_writer(u'\\n ')\n # SOURCE LINE 23\n __M_writer(unicode(h.js( \"libs/jquery/jquery.rating\", \"libs/jquery/jquery-ui\", \"libs/jquery/jquery.cookie\", \"libs/jquery/jquery.dynatree\" )))\n __M_writer(u'\\n ')\n # SOURCE LINE 24\n __M_writer(unicode(common_javascripts(repository)))\n __M_writer(u'\\n')\n return ''\n finally:\n context.caller_stack._pop_frame()\n\n\n",
"step-ids": [
3,
5,
7,
9,
10
]
}
|
[
3,
5,
7,
9,
10
] |
#-*- coding: utf-8 -*-
from django.db import models
from authentication.models import Account
class QuestionFaq(models.Model):
title = models.CharField(max_length=50, verbose_name=u'Тема вопроса')
question = models.TextField(verbose_name=u'Задайте вопрос')
date = models.DateField(auto_now_add=True)
checked = models.BooleanField(default=False)
class Meta:
verbose_name = u'Вопрос в FAQ'
verbose_name_plural = u'Вопросы в FAQ'
def __unicode__(self):
return self.title
class AnswerFaq(models.Model):
account = models.ForeignKey(Account)
answer = models.TextField(verbose_name=u'Ответ на вопрос в FAQ')
question = models.ForeignKey(QuestionFaq)
date = models.DateField(auto_now_add=True)
class Meta:
verbose_name = u'Ответ на вопрос в FAQ'
verbose_name_plural = u'Ответы на вопросы в FAQ'
def __unicode__(self):
return u'%s - вопрос: "%s"' % (
self.account.get_full_name(),
self.question.title)
|
normal
|
{
"blob_id": "b00c9f099fcb31262df947f47d7190912ee66965",
"index": 6159,
"step-1": "<mask token>\n\n\nclass AnswerFaq(models.Model):\n account = models.ForeignKey(Account)\n answer = models.TextField(verbose_name=u'Ответ на вопрос в FAQ')\n question = models.ForeignKey(QuestionFaq)\n date = models.DateField(auto_now_add=True)\n\n\n class Meta:\n verbose_name = u'Ответ на вопрос в FAQ'\n verbose_name_plural = u'Ответы на вопросы в FAQ'\n\n def __unicode__(self):\n return u'%s - вопрос: \"%s\"' % (self.account.get_full_name(), self.\n question.title)\n",
"step-2": "<mask token>\n\n\nclass QuestionFaq(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = u'Вопрос в FAQ'\n verbose_name_plural = u'Вопросы в FAQ'\n <mask token>\n\n\nclass AnswerFaq(models.Model):\n account = models.ForeignKey(Account)\n answer = models.TextField(verbose_name=u'Ответ на вопрос в FAQ')\n question = models.ForeignKey(QuestionFaq)\n date = models.DateField(auto_now_add=True)\n\n\n class Meta:\n verbose_name = u'Ответ на вопрос в FAQ'\n verbose_name_plural = u'Ответы на вопросы в FAQ'\n\n def __unicode__(self):\n return u'%s - вопрос: \"%s\"' % (self.account.get_full_name(), self.\n question.title)\n",
"step-3": "<mask token>\n\n\nclass QuestionFaq(models.Model):\n title = models.CharField(max_length=50, verbose_name=u'Тема вопроса')\n question = models.TextField(verbose_name=u'Задайте вопрос')\n date = models.DateField(auto_now_add=True)\n checked = models.BooleanField(default=False)\n\n\n class Meta:\n verbose_name = u'Вопрос в FAQ'\n verbose_name_plural = u'Вопросы в FAQ'\n\n def __unicode__(self):\n return self.title\n\n\nclass AnswerFaq(models.Model):\n account = models.ForeignKey(Account)\n answer = models.TextField(verbose_name=u'Ответ на вопрос в FAQ')\n question = models.ForeignKey(QuestionFaq)\n date = models.DateField(auto_now_add=True)\n\n\n class Meta:\n verbose_name = u'Ответ на вопрос в FAQ'\n verbose_name_plural = u'Ответы на вопросы в FAQ'\n\n def __unicode__(self):\n return u'%s - вопрос: \"%s\"' % (self.account.get_full_name(), self.\n question.title)\n",
"step-4": "from django.db import models\nfrom authentication.models import Account\n\n\nclass QuestionFaq(models.Model):\n title = models.CharField(max_length=50, verbose_name=u'Тема вопроса')\n question = models.TextField(verbose_name=u'Задайте вопрос')\n date = models.DateField(auto_now_add=True)\n checked = models.BooleanField(default=False)\n\n\n class Meta:\n verbose_name = u'Вопрос в FAQ'\n verbose_name_plural = u'Вопросы в FAQ'\n\n def __unicode__(self):\n return self.title\n\n\nclass AnswerFaq(models.Model):\n account = models.ForeignKey(Account)\n answer = models.TextField(verbose_name=u'Ответ на вопрос в FAQ')\n question = models.ForeignKey(QuestionFaq)\n date = models.DateField(auto_now_add=True)\n\n\n class Meta:\n verbose_name = u'Ответ на вопрос в FAQ'\n verbose_name_plural = u'Ответы на вопросы в FAQ'\n\n def __unicode__(self):\n return u'%s - вопрос: \"%s\"' % (self.account.get_full_name(), self.\n question.title)\n",
"step-5": "#-*- coding: utf-8 -*-\nfrom django.db import models\nfrom authentication.models import Account\n\n\nclass QuestionFaq(models.Model):\n title = models.CharField(max_length=50, verbose_name=u'Тема вопроса')\n question = models.TextField(verbose_name=u'Задайте вопрос')\n date = models.DateField(auto_now_add=True)\n checked = models.BooleanField(default=False)\n\n class Meta:\n verbose_name = u'Вопрос в FAQ'\n verbose_name_plural = u'Вопросы в FAQ'\n\n def __unicode__(self):\n return self.title\n\n\nclass AnswerFaq(models.Model):\n account = models.ForeignKey(Account)\n answer = models.TextField(verbose_name=u'Ответ на вопрос в FAQ')\n question = models.ForeignKey(QuestionFaq)\n date = models.DateField(auto_now_add=True)\n\n class Meta:\n verbose_name = u'Ответ на вопрос в FAQ'\n verbose_name_plural = u'Ответы на вопросы в FAQ'\n\n def __unicode__(self):\n return u'%s - вопрос: \"%s\"' % (\n self.account.get_full_name(),\n self.question.title)\n",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
# coding=utf-8
# http://rate.tmall.com/list_detail_rate.htm?itemId=41464129793&sellerId=1652490016¤tPage=1
import requests, re
from Tkinter import *
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import random
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签
plt.rcParams['axes.unicode_minus']=False #用来正常显示负号
def worker():
goods_url = L_entry.get()
pages = P_entry.get()
detail_list = []
detail_dict = {}
for i in range(int(pages)):
page = i + 1
goods_url = re.sub(r"currentPage=\d", "currentPage=%s" % page, goods_url)
rsp = requests.get(goods_url, headers=header)
data = rsp.text
data = eval(re.search(r"\{.*", data).group().strip(')').replace("false", "0").replace("true", "1"))
for detail in data['rateDetail']['rateList']:
#for detail in data['rateList']:
try:
size = detail["auctionSku"]
except Exception as e:
print e
continue
size = size.split(";")
s1 = size[0].split(":")[1] if size else ''
s2 = size[1].split(":")[1] if len(size)>1 else ''
s = str(s1) + str(s2)
if s in detail_list:
detail_dict[s] = detail_dict[s] + 1
else:
detail_list.append(s)
detail_dict[s] = 1
root.wm_title("page%d" % page)
root.wm_title("下载完成")
make_image(detail_list,detail_dict)
def make_image(detail_list,detail_dict,goods_name):
print detail_list
print detail_dict
colors = ['#ff0000', '#eb4310', '#f6941d', '#fbb417', '#ffff00', '#cdd541', '#99cc33', '#3f9337', '#219167',
'#239676', '#24998d', '#1f9baa', '#0080ff', '#3366cc', '#333399', '#003366', '#800080', '#a1488e',
'#c71585', '#bd2158']
people = [detail.decode('utf8') for detail in detail_list]
colors = colors[0:len(people)]
#y轴元素数量
y_pos = np.arange(len(people))
#每个元素对应的值,array
performance = [detail_dict[x] for x in detail_list]
bars = plt.barh(y_pos, performance, align='center')#这里是产生横向柱状图 barh h--horizontal
#设置颜色
for bar,colors in zip(bars,colors):
bar.set_color(colors)
#y轴每个元素标签
plt.yticks(y_pos, people)
plt.yticks(fontsize=7)
#x轴标题
plt.xlabel('count')
#x轴范围
plt.xlim(0,max(performance))
plt.title('size and colors count about taobao')
plt.show()
if __name__ == '__main__':
# goods_url = "https://rate.tmall.com/list_detail_rate.htm?itemId=527956695986&spuId=517713513&sellerId=2615125783&order=3¤tPage=1&append=0&content=1&tagId=&posi=&picture=&ua=146UW5TcyMNYQwiAiwZTXFIdUh1SHJOe0BuOG4%3D%7CUm5Ockt%2FRH1IdUB%2BRXpOdiA%3D%7CU2xMHDJ7G2AHYg8hAS8XLwEhD0ghSmQyZA%3D%3D%7CVGhXd1llXGhTal9iV2lSbVlhVmtJfUN4QHpAf0ZyT3JPekB0TGI0%7CVWldfS0SMg01ACAcIAAuE2JbZlInGiYcIAUrfSs%3D%7CVmhIGCcZOQQkGCccJAQ6ADwHJxskESwMOQQ5GSUaLxIyCDcCVAI%3D%7CV25Tbk5zU2xMcEl1VWtTaUlwJg%3D%3D&isg=Ar29SH8guO4XdhyBmwNtPy2rzB938vDSpl9fGH8C9JRDtt3oR6oBfItkFN0K&needFold=0&_ksTS=1496480841428_649&callback=jsonp650"
header = {
"authority": "rate.tmall.com",
"method": "GET",
"scheme": "https",
"accept": "*/*",
"accept-encoding": "gzip, deflate, sdch, br",
"accept-language": "zh-CN,zh;q=0.8",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36",
}
root = Tk()
root.wm_title("淘宝牛统计")
L_label = Label(root, text="链接").grid(row=0, sticky=W)
L_entry = Entry(root,width = 240)
L_entry.grid(row=0, column=1, stick=E)
P_label = Label(root, text="页数").grid(row=1, sticky=W)
P_entry = Entry(root, width = 240)
P_entry.grid(row=1, column=1, stick=E)
start_btn = Button(root, text="开始",anchor = 'center', command=worker).grid(row=3)
width = 300
height = 100
screenwidth = root.winfo_screenwidth()
screenheight = root.winfo_screenheight()
size = '%dx%d+%d+%d' % (width, height, (screenwidth - width) / 2, (screenheight - height) / 2)
print(size)
root.geometry(size)
root.mainloop()
|
normal
|
{
"blob_id": "123d3906ce040a4daa5309eae555bad5509f805e",
"index": 671,
"step-1": "# coding=utf-8\n# http://rate.tmall.com/list_detail_rate.htm?itemId=41464129793&sellerId=1652490016¤tPage=1\nimport requests, re\nfrom Tkinter import *\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nimport random\nimport matplotlib.pyplot as plt\nplt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签\nplt.rcParams['axes.unicode_minus']=False #用来正常显示负号\n\ndef worker():\n goods_url = L_entry.get()\n pages = P_entry.get()\n\n detail_list = []\n detail_dict = {}\n for i in range(int(pages)):\n page = i + 1\n goods_url = re.sub(r\"currentPage=\\d\", \"currentPage=%s\" % page, goods_url)\n rsp = requests.get(goods_url, headers=header)\n\n data = rsp.text\n data = eval(re.search(r\"\\{.*\", data).group().strip(')').replace(\"false\", \"0\").replace(\"true\", \"1\"))\n\n for detail in data['rateDetail']['rateList']:\n #for detail in data['rateList']:\n try:\n size = detail[\"auctionSku\"]\n except Exception as e:\n print e\n continue\n size = size.split(\";\")\n\n s1 = size[0].split(\":\")[1] if size else ''\n s2 = size[1].split(\":\")[1] if len(size)>1 else ''\n\n s = str(s1) + str(s2)\n if s in detail_list:\n detail_dict[s] = detail_dict[s] + 1\n else:\n detail_list.append(s)\n detail_dict[s] = 1\n\n root.wm_title(\"page%d\" % page)\n root.wm_title(\"下载完成\")\n make_image(detail_list,detail_dict)\n\ndef make_image(detail_list,detail_dict,goods_name):\n print detail_list\n print detail_dict\n colors = ['#ff0000', '#eb4310', '#f6941d', '#fbb417', '#ffff00', '#cdd541', '#99cc33', '#3f9337', '#219167',\n '#239676', '#24998d', '#1f9baa', '#0080ff', '#3366cc', '#333399', '#003366', '#800080', '#a1488e',\n '#c71585', '#bd2158']\n people = [detail.decode('utf8') for detail in detail_list]\n colors = colors[0:len(people)]\n #y轴元素数量\n y_pos = np.arange(len(people))\n #每个元素对应的值,array\n performance = [detail_dict[x] for x in detail_list]\n \n bars = plt.barh(y_pos, performance, align='center')#这里是产生横向柱状图 barh h--horizontal\n #设置颜色\n for bar,colors in zip(bars,colors):\n bar.set_color(colors)\n #y轴每个元素标签\n\n plt.yticks(y_pos, people)\n plt.yticks(fontsize=7)\n\n #x轴标题\n plt.xlabel('count')\n #x轴范围\n plt.xlim(0,max(performance))\n plt.title('size and colors count about taobao')\n plt.show()\nif __name__ == '__main__':\n # goods_url = \"https://rate.tmall.com/list_detail_rate.htm?itemId=527956695986&spuId=517713513&sellerId=2615125783&order=3¤tPage=1&append=0&content=1&tagId=&posi=&picture=&ua=146UW5TcyMNYQwiAiwZTXFIdUh1SHJOe0BuOG4%3D%7CUm5Ockt%2FRH1IdUB%2BRXpOdiA%3D%7CU2xMHDJ7G2AHYg8hAS8XLwEhD0ghSmQyZA%3D%3D%7CVGhXd1llXGhTal9iV2lSbVlhVmtJfUN4QHpAf0ZyT3JPekB0TGI0%7CVWldfS0SMg01ACAcIAAuE2JbZlInGiYcIAUrfSs%3D%7CVmhIGCcZOQQkGCccJAQ6ADwHJxskESwMOQQ5GSUaLxIyCDcCVAI%3D%7CV25Tbk5zU2xMcEl1VWtTaUlwJg%3D%3D&isg=Ar29SH8guO4XdhyBmwNtPy2rzB938vDSpl9fGH8C9JRDtt3oR6oBfItkFN0K&needFold=0&_ksTS=1496480841428_649&callback=jsonp650\"\n header = {\n \"authority\": \"rate.tmall.com\",\n \"method\": \"GET\",\n \"scheme\": \"https\",\n \"accept\": \"*/*\",\n \"accept-encoding\": \"gzip, deflate, sdch, br\",\n \"accept-language\": \"zh-CN,zh;q=0.8\",\n \"user-agent\": \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36\",\n }\n root = Tk()\n root.wm_title(\"淘宝牛统计\")\n L_label = Label(root, text=\"链接\").grid(row=0, sticky=W)\n L_entry = Entry(root,width = 240)\n L_entry.grid(row=0, column=1, stick=E)\n P_label = Label(root, text=\"页数\").grid(row=1, sticky=W)\n P_entry = Entry(root, width = 240)\n P_entry.grid(row=1, column=1, stick=E)\n start_btn = Button(root, text=\"开始\",anchor = 'center', command=worker).grid(row=3)\n width = 300\n height = 100\n screenwidth = root.winfo_screenwidth()\n screenheight = root.winfo_screenheight()\n size = '%dx%d+%d+%d' % (width, height, (screenwidth - width) / 2, (screenheight - height) / 2)\n print(size)\n root.geometry(size)\n root.mainloop()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DashboardArgs', 'Dashboard']
@pulumi.input_type
class DashboardArgs:
def __init__(__self__, *,
dashboard_definition: pulumi.Input[str],
dashboard_description: pulumi.Input[str],
dashboard_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]] = None):
"""
The set of arguments for constructing a Dashboard resource.
:param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.
:param pulumi.Input[str] dashboard_description: A description for the dashboard.
:param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.
:param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.
:param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.
"""
pulumi.set(__self__, "dashboard_definition", dashboard_definition)
pulumi.set(__self__, "dashboard_description", dashboard_description)
if dashboard_name is not None:
pulumi.set(__self__, "dashboard_name", dashboard_name)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="dashboardDefinition")
def dashboard_definition(self) -> pulumi.Input[str]:
"""
The dashboard definition specified in a JSON literal.
"""
return pulumi.get(self, "dashboard_definition")
@dashboard_definition.setter
def dashboard_definition(self, value: pulumi.Input[str]):
pulumi.set(self, "dashboard_definition", value)
@property
@pulumi.getter(name="dashboardDescription")
def dashboard_description(self) -> pulumi.Input[str]:
"""
A description for the dashboard.
"""
return pulumi.get(self, "dashboard_description")
@dashboard_description.setter
def dashboard_description(self, value: pulumi.Input[str]):
pulumi.set(self, "dashboard_description", value)
@property
@pulumi.getter(name="dashboardName")
def dashboard_name(self) -> Optional[pulumi.Input[str]]:
"""
A friendly name for the dashboard.
"""
return pulumi.get(self, "dashboard_name")
@dashboard_name.setter
def dashboard_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dashboard_name", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which to create the dashboard.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]:
"""
A list of key-value pairs that contain metadata for the dashboard.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]):
pulumi.set(self, "tags", value)
class Dashboard(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dashboard_definition: Optional[pulumi.Input[str]] = None,
dashboard_description: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,
__props__=None):
"""
Resource schema for AWS::IoTSiteWise::Dashboard
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.
:param pulumi.Input[str] dashboard_description: A description for the dashboard.
:param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.
:param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DashboardArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Resource schema for AWS::IoTSiteWise::Dashboard
:param str resource_name: The name of the resource.
:param DashboardArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dashboard_definition: Optional[pulumi.Input[str]] = None,
dashboard_description: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DashboardArgs.__new__(DashboardArgs)
if dashboard_definition is None and not opts.urn:
raise TypeError("Missing required property 'dashboard_definition'")
__props__.__dict__["dashboard_definition"] = dashboard_definition
if dashboard_description is None and not opts.urn:
raise TypeError("Missing required property 'dashboard_description'")
__props__.__dict__["dashboard_description"] = dashboard_description
__props__.__dict__["dashboard_name"] = dashboard_name
__props__.__dict__["project_id"] = project_id
__props__.__dict__["tags"] = tags
__props__.__dict__["dashboard_arn"] = None
__props__.__dict__["dashboard_id"] = None
super(Dashboard, __self__).__init__(
'aws-native:iotsitewise:Dashboard',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Dashboard':
"""
Get an existing Dashboard resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = DashboardArgs.__new__(DashboardArgs)
__props__.__dict__["dashboard_arn"] = None
__props__.__dict__["dashboard_definition"] = None
__props__.__dict__["dashboard_description"] = None
__props__.__dict__["dashboard_id"] = None
__props__.__dict__["dashboard_name"] = None
__props__.__dict__["project_id"] = None
__props__.__dict__["tags"] = None
return Dashboard(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dashboardArn")
def dashboard_arn(self) -> pulumi.Output[str]:
"""
The ARN of the dashboard.
"""
return pulumi.get(self, "dashboard_arn")
@property
@pulumi.getter(name="dashboardDefinition")
def dashboard_definition(self) -> pulumi.Output[str]:
"""
The dashboard definition specified in a JSON literal.
"""
return pulumi.get(self, "dashboard_definition")
@property
@pulumi.getter(name="dashboardDescription")
def dashboard_description(self) -> pulumi.Output[str]:
"""
A description for the dashboard.
"""
return pulumi.get(self, "dashboard_description")
@property
@pulumi.getter(name="dashboardId")
def dashboard_id(self) -> pulumi.Output[str]:
"""
The ID of the dashboard.
"""
return pulumi.get(self, "dashboard_id")
@property
@pulumi.getter(name="dashboardName")
def dashboard_name(self) -> pulumi.Output[str]:
"""
A friendly name for the dashboard.
"""
return pulumi.get(self, "dashboard_name")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the project in which to create the dashboard.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:
"""
A list of key-value pairs that contain metadata for the dashboard.
"""
return pulumi.get(self, "tags")
|
normal
|
{
"blob_id": "2332783c96b24caa383bf47d82384e1c40a48e94",
"index": 8566,
"step-1": "<mask token>\n\n\[email protected]_type\nclass DashboardArgs:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-2": "<mask token>\n\n\[email protected]_type\nclass DashboardArgs:\n\n def __init__(__self__, *, dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str], dashboard_name: Optional[\n pulumi.Input[str]]=None, project_id: Optional[pulumi.Input[str]]=\n None, tags: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]=None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, 'dashboard_definition', dashboard_definition)\n pulumi.set(__self__, 'dashboard_description', dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, 'dashboard_name', dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, 'project_id', project_id)\n if tags is not None:\n pulumi.set(__self__, 'tags', tags)\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, 'dashboard_definition', value)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'project_id', value)\n\n @property\n @pulumi.getter\n def tags(self) ->Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]):\n pulumi.set(self, 'tags', value)\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-3": "<mask token>\n\n\[email protected]_type\nclass DashboardArgs:\n\n def __init__(__self__, *, dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str], dashboard_name: Optional[\n pulumi.Input[str]]=None, project_id: Optional[pulumi.Input[str]]=\n None, tags: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]=None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, 'dashboard_definition', dashboard_definition)\n pulumi.set(__self__, 'dashboard_description', dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, 'dashboard_name', dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, 'project_id', project_id)\n if tags is not None:\n pulumi.set(__self__, 'tags', tags)\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, 'dashboard_definition', value)\n <mask token>\n <mask token>\n <mask token>\n\n @dashboard_name.setter\n def dashboard_name(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'dashboard_name', value)\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'project_id', value)\n\n @property\n @pulumi.getter\n def tags(self) ->Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]):\n pulumi.set(self, 'tags', value)\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-4": "<mask token>\n\n\[email protected]_type\nclass DashboardArgs:\n\n def __init__(__self__, *, dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str], dashboard_name: Optional[\n pulumi.Input[str]]=None, project_id: Optional[pulumi.Input[str]]=\n None, tags: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]=None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, 'dashboard_definition', dashboard_definition)\n pulumi.set(__self__, 'dashboard_description', dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, 'dashboard_name', dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, 'project_id', project_id)\n if tags is not None:\n pulumi.set(__self__, 'tags', tags)\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, 'dashboard_definition', value)\n <mask token>\n <mask token>\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @dashboard_name.setter\n def dashboard_name(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'dashboard_name', value)\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, 'project_id', value)\n\n @property\n @pulumi.getter\n def tags(self) ->Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[\n 'DashboardTagArgs']]]]):\n pulumi.set(self, 'tags', value)\n\n\nclass Dashboard(pulumi.CustomResource):\n\n @overload\n def __init__(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n\n @overload\n def __init__(__self__, resource_name: str, args: DashboardArgs, opts:\n Optional[pulumi.ResourceOptions]=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs,\n pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.\n __dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.\n ResourceOptions]=None, dashboard_definition: Optional[pulumi.Input[\n str]]=None, dashboard_description: Optional[pulumi.Input[str]]=None,\n dashboard_name: Optional[pulumi.Input[str]]=None, project_id:\n Optional[pulumi.Input[str]]=None, tags: Optional[pulumi.Input[\n Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]]=None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.\n get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError(\n 'Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError(\n '__props__ is only valid when passed in combination with a valid opts.id to get an existing resource'\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_definition'\")\n __props__.__dict__['dashboard_definition'] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\n \"Missing required property 'dashboard_description'\")\n __props__.__dict__['dashboard_description'] = dashboard_description\n __props__.__dict__['dashboard_name'] = dashboard_name\n __props__.__dict__['project_id'] = project_id\n __props__.__dict__['tags'] = tags\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_id'] = None\n super(Dashboard, __self__).__init__('aws-native:iotsitewise:Dashboard',\n resource_name, __props__, opts)\n\n @staticmethod\n def get(resource_name: str, id: pulumi.Input[str], opts: Optional[\n pulumi.ResourceOptions]=None) ->'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)\n )\n __props__ = DashboardArgs.__new__(DashboardArgs)\n __props__.__dict__['dashboard_arn'] = None\n __props__.__dict__['dashboard_definition'] = None\n __props__.__dict__['dashboard_description'] = None\n __props__.__dict__['dashboard_id'] = None\n __props__.__dict__['dashboard_name'] = None\n __props__.__dict__['project_id'] = None\n __props__.__dict__['tags'] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name='dashboardArn')\n def dashboard_arn(self) ->pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_arn')\n\n @property\n @pulumi.getter(name='dashboardDefinition')\n def dashboard_definition(self) ->pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, 'dashboard_definition')\n\n @property\n @pulumi.getter(name='dashboardDescription')\n def dashboard_description(self) ->pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_description')\n\n @property\n @pulumi.getter(name='dashboardId')\n def dashboard_id(self) ->pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_id')\n\n @property\n @pulumi.getter(name='dashboardName')\n def dashboard_name(self) ->pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, 'dashboard_name')\n\n @property\n @pulumi.getter(name='projectId')\n def project_id(self) ->pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, 'project_id')\n\n @property\n @pulumi.getter\n def tags(self) ->pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, 'tags')\n",
"step-5": "# coding=utf-8\n# *** WARNING: this file was generated by the Pulumi SDK Generator. ***\n# *** Do not edit by hand unless you're certain you know what you are doing! ***\n\nimport copy\nimport warnings\nimport pulumi\nimport pulumi.runtime\nfrom typing import Any, Mapping, Optional, Sequence, Union, overload\nfrom .. import _utilities\nfrom . import outputs\nfrom ._inputs import *\n\n__all__ = ['DashboardArgs', 'Dashboard']\n\[email protected]_type\nclass DashboardArgs:\n def __init__(__self__, *,\n dashboard_definition: pulumi.Input[str],\n dashboard_description: pulumi.Input[str],\n dashboard_name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]] = None):\n \"\"\"\n The set of arguments for constructing a Dashboard resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n pulumi.set(__self__, \"dashboard_definition\", dashboard_definition)\n pulumi.set(__self__, \"dashboard_description\", dashboard_description)\n if dashboard_name is not None:\n pulumi.set(__self__, \"dashboard_name\", dashboard_name)\n if project_id is not None:\n pulumi.set(__self__, \"project_id\", project_id)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n\n @property\n @pulumi.getter(name=\"dashboardDefinition\")\n def dashboard_definition(self) -> pulumi.Input[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, \"dashboard_definition\")\n\n @dashboard_definition.setter\n def dashboard_definition(self, value: pulumi.Input[str]):\n pulumi.set(self, \"dashboard_definition\", value)\n\n @property\n @pulumi.getter(name=\"dashboardDescription\")\n def dashboard_description(self) -> pulumi.Input[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_description\")\n\n @dashboard_description.setter\n def dashboard_description(self, value: pulumi.Input[str]):\n pulumi.set(self, \"dashboard_description\", value)\n\n @property\n @pulumi.getter(name=\"dashboardName\")\n def dashboard_name(self) -> Optional[pulumi.Input[str]]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_name\")\n\n @dashboard_name.setter\n def dashboard_name(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, \"dashboard_name\", value)\n\n @property\n @pulumi.getter(name=\"projectId\")\n def project_id(self) -> Optional[pulumi.Input[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, \"project_id\")\n\n @project_id.setter\n def project_id(self, value: Optional[pulumi.Input[str]]):\n pulumi.set(self, \"project_id\", value)\n\n @property\n @pulumi.getter\n def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, \"tags\")\n\n @tags.setter\n def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DashboardTagArgs']]]]):\n pulumi.set(self, \"tags\", value)\n\n\nclass Dashboard(pulumi.CustomResource):\n @overload\n def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n dashboard_definition: Optional[pulumi.Input[str]] = None,\n dashboard_description: Optional[pulumi.Input[str]] = None,\n dashboard_name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,\n __props__=None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] dashboard_definition: The dashboard definition specified in a JSON literal.\n :param pulumi.Input[str] dashboard_description: A description for the dashboard.\n :param pulumi.Input[str] dashboard_name: A friendly name for the dashboard.\n :param pulumi.Input[str] project_id: The ID of the project in which to create the dashboard.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]] tags: A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n ...\n @overload\n def __init__(__self__,\n resource_name: str,\n args: DashboardArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n \"\"\"\n Resource schema for AWS::IoTSiteWise::Dashboard\n\n :param str resource_name: The name of the resource.\n :param DashboardArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n ...\n def __init__(__self__, resource_name: str, *args, **kwargs):\n resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs, pulumi.ResourceOptions, *args, **kwargs)\n if resource_args is not None:\n __self__._internal_init(resource_name, opts, **resource_args.__dict__)\n else:\n __self__._internal_init(resource_name, *args, **kwargs)\n\n def _internal_init(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n dashboard_definition: Optional[pulumi.Input[str]] = None,\n dashboard_description: Optional[pulumi.Input[str]] = None,\n dashboard_name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DashboardTagArgs']]]]] = None,\n __props__=None):\n opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n if opts.id is None:\n if __props__ is not None:\n raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')\n __props__ = DashboardArgs.__new__(DashboardArgs)\n\n if dashboard_definition is None and not opts.urn:\n raise TypeError(\"Missing required property 'dashboard_definition'\")\n __props__.__dict__[\"dashboard_definition\"] = dashboard_definition\n if dashboard_description is None and not opts.urn:\n raise TypeError(\"Missing required property 'dashboard_description'\")\n __props__.__dict__[\"dashboard_description\"] = dashboard_description\n __props__.__dict__[\"dashboard_name\"] = dashboard_name\n __props__.__dict__[\"project_id\"] = project_id\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"dashboard_arn\"] = None\n __props__.__dict__[\"dashboard_id\"] = None\n super(Dashboard, __self__).__init__(\n 'aws-native:iotsitewise:Dashboard',\n resource_name,\n __props__,\n opts)\n\n @staticmethod\n def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Dashboard':\n \"\"\"\n Get an existing Dashboard resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n \"\"\"\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = DashboardArgs.__new__(DashboardArgs)\n\n __props__.__dict__[\"dashboard_arn\"] = None\n __props__.__dict__[\"dashboard_definition\"] = None\n __props__.__dict__[\"dashboard_description\"] = None\n __props__.__dict__[\"dashboard_id\"] = None\n __props__.__dict__[\"dashboard_name\"] = None\n __props__.__dict__[\"project_id\"] = None\n __props__.__dict__[\"tags\"] = None\n return Dashboard(resource_name, opts=opts, __props__=__props__)\n\n @property\n @pulumi.getter(name=\"dashboardArn\")\n def dashboard_arn(self) -> pulumi.Output[str]:\n \"\"\"\n The ARN of the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_arn\")\n\n @property\n @pulumi.getter(name=\"dashboardDefinition\")\n def dashboard_definition(self) -> pulumi.Output[str]:\n \"\"\"\n The dashboard definition specified in a JSON literal.\n \"\"\"\n return pulumi.get(self, \"dashboard_definition\")\n\n @property\n @pulumi.getter(name=\"dashboardDescription\")\n def dashboard_description(self) -> pulumi.Output[str]:\n \"\"\"\n A description for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_description\")\n\n @property\n @pulumi.getter(name=\"dashboardId\")\n def dashboard_id(self) -> pulumi.Output[str]:\n \"\"\"\n The ID of the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_id\")\n\n @property\n @pulumi.getter(name=\"dashboardName\")\n def dashboard_name(self) -> pulumi.Output[str]:\n \"\"\"\n A friendly name for the dashboard.\n \"\"\"\n return pulumi.get(self, \"dashboard_name\")\n\n @property\n @pulumi.getter(name=\"projectId\")\n def project_id(self) -> pulumi.Output[Optional[str]]:\n \"\"\"\n The ID of the project in which to create the dashboard.\n \"\"\"\n return pulumi.get(self, \"project_id\")\n\n @property\n @pulumi.getter\n def tags(self) -> pulumi.Output[Optional[Sequence['outputs.DashboardTag']]]:\n \"\"\"\n A list of key-value pairs that contain metadata for the dashboard.\n \"\"\"\n return pulumi.get(self, \"tags\")\n\n",
"step-ids": [
14,
21,
22,
23,
28
]
}
|
[
14,
21,
22,
23,
28
] |
def tobin(n):
bin = "";
while(n/2!=0):
if n%2==0:
bin = bin + "0"
else:
bin = bin + "1"
if n%2==1:
bin = bin + "1"
return bin
n = int(input())
bin = tobin(5)
print(bin)
|
normal
|
{
"blob_id": "1c5ca920fe1f116a5bc52c9e5c53c13b1e1c925f",
"index": 2412,
"step-1": "<mask token>\n",
"step-2": "def tobin(n):\n bin = ''\n while n / 2 != 0:\n if n % 2 == 0:\n bin = bin + '0'\n else:\n bin = bin + '1'\n if n % 2 == 1:\n bin = bin + '1'\n return bin\n\n\n<mask token>\n",
"step-3": "def tobin(n):\n bin = ''\n while n / 2 != 0:\n if n % 2 == 0:\n bin = bin + '0'\n else:\n bin = bin + '1'\n if n % 2 == 1:\n bin = bin + '1'\n return bin\n\n\n<mask token>\nprint(bin)\n",
"step-4": "def tobin(n):\n bin = ''\n while n / 2 != 0:\n if n % 2 == 0:\n bin = bin + '0'\n else:\n bin = bin + '1'\n if n % 2 == 1:\n bin = bin + '1'\n return bin\n\n\nn = int(input())\nbin = tobin(5)\nprint(bin)\n",
"step-5": "def tobin(n):\r\n bin = \"\";\r\n while(n/2!=0):\r\n if n%2==0:\r\n bin = bin + \"0\"\r\n else:\r\n bin = bin + \"1\"\r\n if n%2==1:\r\n bin = bin + \"1\"\r\n return bin\r\n\r\nn = int(input())\r\nbin = tobin(5)\r\nprint(bin)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# cook your dish here
t=int(input())
while t:
n=int(input())
a=list(map(int,input().split()))
a.sort(reverse=True)
s=0
for i in range(n):
k=a[i]-i
if k>=0:
s+=k
print(s%1000000007)
t-=1
|
normal
|
{
"blob_id": "44bf409d627a6029ab4c4f1fff99f102b8d57279",
"index": 3954,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile t:\n n = int(input())\n a = list(map(int, input().split()))\n a.sort(reverse=True)\n s = 0\n for i in range(n):\n k = a[i] - i\n if k >= 0:\n s += k\n print(s % 1000000007)\n t -= 1\n",
"step-3": "t = int(input())\nwhile t:\n n = int(input())\n a = list(map(int, input().split()))\n a.sort(reverse=True)\n s = 0\n for i in range(n):\n k = a[i] - i\n if k >= 0:\n s += k\n print(s % 1000000007)\n t -= 1\n",
"step-4": "# cook your dish here\nt=int(input())\nwhile t:\n n=int(input())\n a=list(map(int,input().split()))\n a.sort(reverse=True)\n s=0\n for i in range(n):\n k=a[i]-i\n if k>=0:\n s+=k\n print(s%1000000007)\n t-=1\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import mysql.connector
from mysql.connector import errorcode
DB_NAME = 'PieDB'
TABLES = {}
# TABLES['pietweets'] = (
# "CREATE TABLE `pietweets` ("
# " `id` int NOT NULL AUTO_INCREMENT,"
# " `tweet_id` bigint NOT NULL,"
# " `username` varchar(32) NOT NULL,"
# " `geo_lat` float(53) NOT NULL,"
# " `geo_long` float(53) NOT NULL,"
# " `text` varchar(255) NOT NULL,"
# " `timestamp` datetime NOT NULL,"
# " PRIMARY KEY (`id`)"
# ") ENGINE=InnoDB")
TABLES['lemonpie'] = (
"CREATE TABLE `lemonpie` ("
" `id` int NOT NULL AUTO_INCREMENT,"
" `tweet_id` bigint NOT NULL,"
" `username` varchar(32) NOT NULL,"
" `geo_lat` float(53) NOT NULL,"
" `geo_long` float(53) NOT NULL,"
" `text` varchar(255) NOT NULL,"
" `timestamp` datetime NOT NULL,"
" PRIMARY KEY (`id`)"
") ENGINE=InnoDB")
# DB credentials
config = {
'user': 'piemaster',
'password': 'piemaster123',
'host': 'piedb.chhtgdmxqekc.us-east-1.rds.amazonaws.com',
'database': 'PieDB',
'raise_on_warnings': True,
}
# establish connection with DB config credentials
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
def create_database(cursor):
try:
cursor.execute(
"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(DB_NAME))
except mysql.connector.Error as err:
print("Failed creating database: {}".format(err))
exit(1)
# try connecting to designated DB, if not exist - create this DB
try:
cnx.database = DB_NAME
except mysql.connector.Error as err:
if err.errno == errorcode.ER_BAD_DB_ERROR:
create_database(cursor)
cnx.database = DB_NAME
else:
print(err)
exit(1)
# iterate through TABLES and create each table
for name, ddl in TABLES.iteritems():
try:
print("Creating table {}: ".format(name))
cursor.execute(ddl)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print("already exists.")
else:
print(err.msg)
else:
print("OK")
# closing db connection
cursor.close()
cnx.close()
|
normal
|
{
"blob_id": "38abc4bc99f3b15b416c77481818464a6c7f11ef",
"index": 3844,
"step-1": "<mask token>\n\n\ndef create_database(cursor):\n try:\n cursor.execute(\"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'\".\n format(DB_NAME))\n except mysql.connector.Error as err:\n print('Failed creating database: {}'.format(err))\n exit(1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_database(cursor):\n try:\n cursor.execute(\"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'\".\n format(DB_NAME))\n except mysql.connector.Error as err:\n print('Failed creating database: {}'.format(err))\n exit(1)\n\n\ntry:\n cnx.database = DB_NAME\nexcept mysql.connector.Error as err:\n if err.errno == errorcode.ER_BAD_DB_ERROR:\n create_database(cursor)\n cnx.database = DB_NAME\n else:\n print(err)\n exit(1)\nfor name, ddl in TABLES.iteritems():\n try:\n print('Creating table {}: '.format(name))\n cursor.execute(ddl)\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:\n print('already exists.')\n else:\n print(err.msg)\n else:\n print('OK')\ncursor.close()\ncnx.close()\n",
"step-3": "<mask token>\nDB_NAME = 'PieDB'\nTABLES = {}\nTABLES['lemonpie'] = (\n 'CREATE TABLE `lemonpie` ( `id` int NOT NULL AUTO_INCREMENT, `tweet_id` bigint NOT NULL, `username` varchar(32) NOT NULL, `geo_lat` float(53) NOT NULL, `geo_long` float(53) NOT NULL, `text` varchar(255) NOT NULL, `timestamp` datetime NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB'\n )\nconfig = {'user': 'piemaster', 'password': 'piemaster123', 'host':\n 'piedb.chhtgdmxqekc.us-east-1.rds.amazonaws.com', 'database': 'PieDB',\n 'raise_on_warnings': True}\ncnx = mysql.connector.connect(**config)\ncursor = cnx.cursor()\n\n\ndef create_database(cursor):\n try:\n cursor.execute(\"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'\".\n format(DB_NAME))\n except mysql.connector.Error as err:\n print('Failed creating database: {}'.format(err))\n exit(1)\n\n\ntry:\n cnx.database = DB_NAME\nexcept mysql.connector.Error as err:\n if err.errno == errorcode.ER_BAD_DB_ERROR:\n create_database(cursor)\n cnx.database = DB_NAME\n else:\n print(err)\n exit(1)\nfor name, ddl in TABLES.iteritems():\n try:\n print('Creating table {}: '.format(name))\n cursor.execute(ddl)\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:\n print('already exists.')\n else:\n print(err.msg)\n else:\n print('OK')\ncursor.close()\ncnx.close()\n",
"step-4": "import mysql.connector\nfrom mysql.connector import errorcode\nDB_NAME = 'PieDB'\nTABLES = {}\nTABLES['lemonpie'] = (\n 'CREATE TABLE `lemonpie` ( `id` int NOT NULL AUTO_INCREMENT, `tweet_id` bigint NOT NULL, `username` varchar(32) NOT NULL, `geo_lat` float(53) NOT NULL, `geo_long` float(53) NOT NULL, `text` varchar(255) NOT NULL, `timestamp` datetime NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB'\n )\nconfig = {'user': 'piemaster', 'password': 'piemaster123', 'host':\n 'piedb.chhtgdmxqekc.us-east-1.rds.amazonaws.com', 'database': 'PieDB',\n 'raise_on_warnings': True}\ncnx = mysql.connector.connect(**config)\ncursor = cnx.cursor()\n\n\ndef create_database(cursor):\n try:\n cursor.execute(\"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'\".\n format(DB_NAME))\n except mysql.connector.Error as err:\n print('Failed creating database: {}'.format(err))\n exit(1)\n\n\ntry:\n cnx.database = DB_NAME\nexcept mysql.connector.Error as err:\n if err.errno == errorcode.ER_BAD_DB_ERROR:\n create_database(cursor)\n cnx.database = DB_NAME\n else:\n print(err)\n exit(1)\nfor name, ddl in TABLES.iteritems():\n try:\n print('Creating table {}: '.format(name))\n cursor.execute(ddl)\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:\n print('already exists.')\n else:\n print(err.msg)\n else:\n print('OK')\ncursor.close()\ncnx.close()\n",
"step-5": "import mysql.connector\nfrom mysql.connector import errorcode\n\nDB_NAME = 'PieDB'\n\nTABLES = {}\n# TABLES['pietweets'] = (\n# \t\"CREATE TABLE `pietweets` (\"\n# \t\" `id` int NOT NULL AUTO_INCREMENT,\"\t\t\n# \t\" `tweet_id` bigint NOT NULL,\"\n# \t\" `username` varchar(32) NOT NULL,\"\n# \t\" `geo_lat` float(53) NOT NULL,\"\n# \t\" `geo_long` float(53) NOT NULL,\"\n# \t\" `text` varchar(255) NOT NULL,\"\n# \t\" `timestamp` datetime NOT NULL,\"\n# \t\" PRIMARY KEY (`id`)\"\n# \t\") ENGINE=InnoDB\")\nTABLES['lemonpie'] = (\n \"CREATE TABLE `lemonpie` (\"\n \" `id` int NOT NULL AUTO_INCREMENT,\" \n \" `tweet_id` bigint NOT NULL,\"\n \" `username` varchar(32) NOT NULL,\"\n \" `geo_lat` float(53) NOT NULL,\"\n \" `geo_long` float(53) NOT NULL,\"\n \" `text` varchar(255) NOT NULL,\"\n \" `timestamp` datetime NOT NULL,\"\n \" PRIMARY KEY (`id`)\"\n \") ENGINE=InnoDB\")\n\n# DB credentials\nconfig = {\n 'user': 'piemaster',\n 'password': 'piemaster123',\n 'host': 'piedb.chhtgdmxqekc.us-east-1.rds.amazonaws.com',\n 'database': 'PieDB',\n 'raise_on_warnings': True,\n}\n\n# establish connection with DB config credentials\ncnx = mysql.connector.connect(**config)\ncursor = cnx.cursor()\n\ndef create_database(cursor):\n try:\n cursor.execute(\n \"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'\".format(DB_NAME))\n except mysql.connector.Error as err:\n print(\"Failed creating database: {}\".format(err))\n exit(1)\n\n# try connecting to designated DB, if not exist - create this DB\ntry:\n cnx.database = DB_NAME \nexcept mysql.connector.Error as err:\n if err.errno == errorcode.ER_BAD_DB_ERROR:\n create_database(cursor)\n cnx.database = DB_NAME\n else:\n print(err)\n exit(1)\n\n# iterate through TABLES and create each table\nfor name, ddl in TABLES.iteritems():\n try:\n print(\"Creating table {}: \".format(name))\n cursor.execute(ddl)\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:\n print(\"already exists.\")\n else:\n print(err.msg)\n else:\n print(\"OK\")\n\n# closing db connection\ncursor.close()\ncnx.close()\n\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import tkinter as tk
import random
root = tk.Tk()
main_frame = tk.Frame(root)
var = tk.StringVar()
ch = [ "hello world" , "HI Pyton", "Mar Java", "Mit Java", "Lut Java" ]
var.set("Hello world I am a Label")
label = tk.Label(main_frame,textvariable=var,
bg="black",fg="white",font=("Times New Roman",24,"bold"))
label.pack()
def change_label():
var.set(random.choice(ch))
b1 = tk.Button(main_frame,text="click",command=change_label,
font=("Arial",15,'bold'),bg="pink",fg="red")
b1.pack()
expr = tk.StringVar()
e1 = tk.Entry(root,textvariable=expr,font=("Arial",20,'bold'),
bg='gray',fg='white')
main_frame.pack()
button = tk.Button(root,text="!!EXIT!!",command=root.destroy,
font=("Arial",15,'bold'),bg="pink",fg="red")
button.pack()
def slove():
expr.set(eval(expr.get()))
result_button= tk.Button(root,text="!!Result!!",command=slove,
font=("Arial",15,'bold'),bg="pink",fg="red")
def clear():
expr.set("")
clr_button= tk.Button(root,text="!!clear!!",command=clear,
font=("Arial",15,'bold'),bg="pink",fg="red")
e1.pack()
result_button.pack()
clr_button.pack(anchor='sw')
root.title("My Appliction")
root.wm_minsize(400,400)
root.wm_maxsize(500,500)
root.geometry("+500+200")
root.mainloop()
|
normal
|
{
"blob_id": "33938a28aad29e996255827825a0cdb1db6b70b7",
"index": 5842,
"step-1": "<mask token>\n\n\ndef change_label():\n var.set(random.choice(ch))\n\n\n<mask token>\n\n\ndef slove():\n expr.set(eval(expr.get()))\n\n\n<mask token>\n\n\ndef clear():\n expr.set('')\n\n\n<mask token>\n",
"step-2": "<mask token>\nvar.set('Hello world I am a Label')\n<mask token>\nlabel.pack()\n\n\ndef change_label():\n var.set(random.choice(ch))\n\n\n<mask token>\nb1.pack()\n<mask token>\nmain_frame.pack()\n<mask token>\nbutton.pack()\n\n\ndef slove():\n expr.set(eval(expr.get()))\n\n\n<mask token>\n\n\ndef clear():\n expr.set('')\n\n\n<mask token>\ne1.pack()\nresult_button.pack()\nclr_button.pack(anchor='sw')\nroot.title('My Appliction')\nroot.wm_minsize(400, 400)\nroot.wm_maxsize(500, 500)\nroot.geometry('+500+200')\nroot.mainloop()\n",
"step-3": "<mask token>\nroot = tk.Tk()\nmain_frame = tk.Frame(root)\nvar = tk.StringVar()\nch = ['hello world', 'HI Pyton', 'Mar Java', 'Mit Java', 'Lut Java']\nvar.set('Hello world I am a Label')\nlabel = tk.Label(main_frame, textvariable=var, bg='black', fg='white', font\n =('Times New Roman', 24, 'bold'))\nlabel.pack()\n\n\ndef change_label():\n var.set(random.choice(ch))\n\n\nb1 = tk.Button(main_frame, text='click', command=change_label, font=(\n 'Arial', 15, 'bold'), bg='pink', fg='red')\nb1.pack()\nexpr = tk.StringVar()\ne1 = tk.Entry(root, textvariable=expr, font=('Arial', 20, 'bold'), bg=\n 'gray', fg='white')\nmain_frame.pack()\nbutton = tk.Button(root, text='!!EXIT!!', command=root.destroy, font=(\n 'Arial', 15, 'bold'), bg='pink', fg='red')\nbutton.pack()\n\n\ndef slove():\n expr.set(eval(expr.get()))\n\n\nresult_button = tk.Button(root, text='!!Result!!', command=slove, font=(\n 'Arial', 15, 'bold'), bg='pink', fg='red')\n\n\ndef clear():\n expr.set('')\n\n\nclr_button = tk.Button(root, text='!!clear!!', command=clear, font=('Arial',\n 15, 'bold'), bg='pink', fg='red')\ne1.pack()\nresult_button.pack()\nclr_button.pack(anchor='sw')\nroot.title('My Appliction')\nroot.wm_minsize(400, 400)\nroot.wm_maxsize(500, 500)\nroot.geometry('+500+200')\nroot.mainloop()\n",
"step-4": "import tkinter as tk\nimport random\nroot = tk.Tk()\nmain_frame = tk.Frame(root)\nvar = tk.StringVar()\nch = ['hello world', 'HI Pyton', 'Mar Java', 'Mit Java', 'Lut Java']\nvar.set('Hello world I am a Label')\nlabel = tk.Label(main_frame, textvariable=var, bg='black', fg='white', font\n =('Times New Roman', 24, 'bold'))\nlabel.pack()\n\n\ndef change_label():\n var.set(random.choice(ch))\n\n\nb1 = tk.Button(main_frame, text='click', command=change_label, font=(\n 'Arial', 15, 'bold'), bg='pink', fg='red')\nb1.pack()\nexpr = tk.StringVar()\ne1 = tk.Entry(root, textvariable=expr, font=('Arial', 20, 'bold'), bg=\n 'gray', fg='white')\nmain_frame.pack()\nbutton = tk.Button(root, text='!!EXIT!!', command=root.destroy, font=(\n 'Arial', 15, 'bold'), bg='pink', fg='red')\nbutton.pack()\n\n\ndef slove():\n expr.set(eval(expr.get()))\n\n\nresult_button = tk.Button(root, text='!!Result!!', command=slove, font=(\n 'Arial', 15, 'bold'), bg='pink', fg='red')\n\n\ndef clear():\n expr.set('')\n\n\nclr_button = tk.Button(root, text='!!clear!!', command=clear, font=('Arial',\n 15, 'bold'), bg='pink', fg='red')\ne1.pack()\nresult_button.pack()\nclr_button.pack(anchor='sw')\nroot.title('My Appliction')\nroot.wm_minsize(400, 400)\nroot.wm_maxsize(500, 500)\nroot.geometry('+500+200')\nroot.mainloop()\n",
"step-5": "import tkinter as tk \nimport random\nroot = tk.Tk()\nmain_frame = tk.Frame(root)\nvar = tk.StringVar()\nch = [ \"hello world\" , \"HI Pyton\", \"Mar Java\", \"Mit Java\", \"Lut Java\" ]\nvar.set(\"Hello world I am a Label\")\nlabel = tk.Label(main_frame,textvariable=var,\n bg=\"black\",fg=\"white\",font=(\"Times New Roman\",24,\"bold\"))\nlabel.pack()\ndef change_label():\n var.set(random.choice(ch))\nb1 = tk.Button(main_frame,text=\"click\",command=change_label,\n font=(\"Arial\",15,'bold'),bg=\"pink\",fg=\"red\")\n\nb1.pack()\n\nexpr = tk.StringVar()\ne1 = tk.Entry(root,textvariable=expr,font=(\"Arial\",20,'bold'),\n bg='gray',fg='white')\n\nmain_frame.pack()\n\nbutton = tk.Button(root,text=\"!!EXIT!!\",command=root.destroy,\n font=(\"Arial\",15,'bold'),bg=\"pink\",fg=\"red\")\nbutton.pack()\ndef slove():\n expr.set(eval(expr.get()))\nresult_button= tk.Button(root,text=\"!!Result!!\",command=slove,\n font=(\"Arial\",15,'bold'),bg=\"pink\",fg=\"red\")\ndef clear():\n expr.set(\"\")\nclr_button= tk.Button(root,text=\"!!clear!!\",command=clear,\n font=(\"Arial\",15,'bold'),bg=\"pink\",fg=\"red\")\ne1.pack()\nresult_button.pack()\nclr_button.pack(anchor='sw')\nroot.title(\"My Appliction\")\nroot.wm_minsize(400,400)\nroot.wm_maxsize(500,500)\nroot.geometry(\"+500+200\")\nroot.mainloop()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
################################################################################
# #
# This file is part of the Potato Engine (PE). #
# #
# Copyright (C) 2007-2010 ElectroMagnetic Potatoes (EMP). #
# See the AUTHORS file for more information. #
# #
# This library is free software; you can redistribute it and/or #
# modify it under the terms of the GNU Lesser General Public #
# License as published by the Free Software Foundation; either #
# version 2.1 of the License, or (at your option) any later version. #
# #
# This library is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
# Lesser General Public License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import os
import build
################################################################
# Default options (will be overriden by command line switches) #
################################################################
# Parallel build
SetOption('num_jobs', 4)
# include cache
SetOption('implicit_cache', 1)
##########################################################
# Command-line parameters (overriden by localconfig.py) #
##########################################################
buildVariables = Variables("localconfig.py")
buildVariables.Add(PathVariable("QTDIR", "Qt4 root directory", "/usr/share/qt4", PathVariable.PathIsDir))
buildVariables.Add(PathVariable("OGRE_HOME", "Ogre1.6 root directory (windows only)", None, PathVariable.PathIsDir))
buildVariables.Add(PathVariable("PTHREADWIN32_HOME", "PthreadWin32 root directory (windows only)", None, PathVariable.PathIsDir))
buildVariables.Add(PathVariable("ODE_HOME", "ODE 0.11 root directory", None, PathVariable.PathIsDir))
buildVariables.Add(BoolVariable("DEBUG", "If true, build in debug configuration", False))
buildVariables.Add(BoolVariable("FORCE_MINGW", "When both MinGW and VC++ are installed, force the use of the MinGW compiler instead of the default (windows only)", False))
buildVariables.Add(BoolVariable("DISABLE_GRAPH", "Disable dependency graph generation", False))
##############################################################################
# Variable value extraction (nasty, should be updated when the API evolves) #
# The reason for having this here is that we have to access variables before #
# we can create the real construction environment (for tools selection) #
##############################################################################
currentVariables = Environment(variables = buildVariables).Dictionary()
####################
# Base environment #
####################
baseTools = ["qt"]
if os.name == "nt":
if currentVariables["FORCE_MINGW"]:
baseTools.append("mingw")
else:
baseTools.append("default")
else:
baseTools.append("default")
baseEnvironment = Environment(tools = baseTools, variables = buildVariables)
# additional variables
baseEnvironment["OSNAME"] = os.name
baseEnvironment["SYSPATH"] = os.environ["PATH"].split(os.pathsep)
if baseEnvironment["CC"] == "cl":
baseEnvironment.AppendUnique(CPPFLAGS = ["/EHsc"])
# debug symbols vs. optimization
if baseEnvironment["DEBUG"]:
if baseEnvironment["CC"] == "cl":
baseEnvironment.AppendUnique(CPPFLAGS = ["/Z7"])
else:
baseEnvironment.AppendUnique(CPPFLAGS = ["-g"])
else:
if baseEnvironment["CC"] == "cl":
baseEnvironment.AppendUnique(CPPFLAGS = ["/Ox"])
else:
baseEnvironment.AppendUnique(CPPFLAGS = ["-O2"])
# Qt tool workaround
baseEnvironment.Replace(LIBS = [])
baseEnvironment.Replace(LIBPATH = [])
baseEnvironment.Replace(CPPPATH = [])
# Qt UI builder
uiBuilder = Builder(action = '$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE')
baseEnvironment.Append(BUILDERS = {'Ui' : uiBuilder})
# Qt RC builder
rcBuilder = Builder(action = '$QT_BINPATH/rcc $QT_RCCDECLFLAGS -o ${TARGETS[0]} $SOURCE')
baseEnvironment.Append(BUILDERS = {'Rc' : rcBuilder})
# Under windows, add the platform SDK
if os.name == "nt" and baseEnvironment["CC"] == "cl":
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Microsoft SDKs\\Windows")
winSdkHome = _winreg.QueryValueEx(key, "CurrentInstallFolder")[0]
_winreg.CloseKey(key)
baseEnvironment["WINSDK_HOME"] = winSdkHome
baseEnvironment.AppendUnique(CPPPATH = ["$WINSDK_HOME/Include"])
baseEnvironment.AppendUnique(LIBPATH = ["$WINSDK_HOME/Lib"])
# Do not rely on VC++ runtime library
if os.name == "nt" and baseEnvironment["CC"] == "cl":
baseEnvironment.AppendUnique(CPPFLAGS = ["/MD"])
# Speed up change analysis
baseEnvironment.Decider('MD5-timestamp')
#####################
# Command-line help #
#####################
Help(buildVariables.GenerateHelpText(baseEnvironment))
##################################
# SCons environment declarations #
##################################
walker = build.DependencyWalker()
# external component database
for script in Glob("components.*.py"):
SConscript(script, exports = "walker", variant_dir = "build", duplicate = 0)
walker.makeEnvironments(baseEnvironment)
if not baseEnvironment["DISABLE_GRAPH"]:
walker.makeDependencyGraph("dependencies.png")
|
normal
|
{
"blob_id": "595912753d778a0fa8332f0df00e06a9da5cde93",
"index": 447,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nSetOption('num_jobs', 4)\nSetOption('implicit_cache', 1)\n<mask token>\nbuildVariables.Add(PathVariable('QTDIR', 'Qt4 root directory',\n '/usr/share/qt4', PathVariable.PathIsDir))\nbuildVariables.Add(PathVariable('OGRE_HOME',\n 'Ogre1.6 root directory (windows only)', None, PathVariable.PathIsDir))\nbuildVariables.Add(PathVariable('PTHREADWIN32_HOME',\n 'PthreadWin32 root directory (windows only)', None, PathVariable.PathIsDir)\n )\nbuildVariables.Add(PathVariable('ODE_HOME', 'ODE 0.11 root directory', None,\n PathVariable.PathIsDir))\nbuildVariables.Add(BoolVariable('DEBUG',\n 'If true, build in debug configuration', False))\nbuildVariables.Add(BoolVariable('FORCE_MINGW',\n 'When both MinGW and VC++ are installed, force the use of the MinGW compiler instead of the default (windows only)'\n , False))\nbuildVariables.Add(BoolVariable('DISABLE_GRAPH',\n 'Disable dependency graph generation', False))\n<mask token>\nif os.name == 'nt':\n if currentVariables['FORCE_MINGW']:\n baseTools.append('mingw')\n else:\n baseTools.append('default')\nelse:\n baseTools.append('default')\n<mask token>\nif baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/EHsc'])\nif baseEnvironment['DEBUG']:\n if baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/Z7'])\n else:\n baseEnvironment.AppendUnique(CPPFLAGS=['-g'])\nelif baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/Ox'])\nelse:\n baseEnvironment.AppendUnique(CPPFLAGS=['-O2'])\nbaseEnvironment.Replace(LIBS=[])\nbaseEnvironment.Replace(LIBPATH=[])\nbaseEnvironment.Replace(CPPPATH=[])\n<mask token>\nbaseEnvironment.Append(BUILDERS={'Ui': uiBuilder})\n<mask token>\nbaseEnvironment.Append(BUILDERS={'Rc': rcBuilder})\nif os.name == 'nt' and baseEnvironment['CC'] == 'cl':\n import _winreg\n key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,\n 'Software\\\\Microsoft\\\\Microsoft SDKs\\\\Windows')\n winSdkHome = _winreg.QueryValueEx(key, 'CurrentInstallFolder')[0]\n _winreg.CloseKey(key)\n baseEnvironment['WINSDK_HOME'] = winSdkHome\n baseEnvironment.AppendUnique(CPPPATH=['$WINSDK_HOME/Include'])\n baseEnvironment.AppendUnique(LIBPATH=['$WINSDK_HOME/Lib'])\nif os.name == 'nt' and baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/MD'])\nbaseEnvironment.Decider('MD5-timestamp')\nHelp(buildVariables.GenerateHelpText(baseEnvironment))\n<mask token>\nfor script in Glob('components.*.py'):\n SConscript(script, exports='walker', variant_dir='build', duplicate=0)\nwalker.makeEnvironments(baseEnvironment)\nif not baseEnvironment['DISABLE_GRAPH']:\n walker.makeDependencyGraph('dependencies.png')\n",
"step-3": "<mask token>\nSetOption('num_jobs', 4)\nSetOption('implicit_cache', 1)\nbuildVariables = Variables('localconfig.py')\nbuildVariables.Add(PathVariable('QTDIR', 'Qt4 root directory',\n '/usr/share/qt4', PathVariable.PathIsDir))\nbuildVariables.Add(PathVariable('OGRE_HOME',\n 'Ogre1.6 root directory (windows only)', None, PathVariable.PathIsDir))\nbuildVariables.Add(PathVariable('PTHREADWIN32_HOME',\n 'PthreadWin32 root directory (windows only)', None, PathVariable.PathIsDir)\n )\nbuildVariables.Add(PathVariable('ODE_HOME', 'ODE 0.11 root directory', None,\n PathVariable.PathIsDir))\nbuildVariables.Add(BoolVariable('DEBUG',\n 'If true, build in debug configuration', False))\nbuildVariables.Add(BoolVariable('FORCE_MINGW',\n 'When both MinGW and VC++ are installed, force the use of the MinGW compiler instead of the default (windows only)'\n , False))\nbuildVariables.Add(BoolVariable('DISABLE_GRAPH',\n 'Disable dependency graph generation', False))\ncurrentVariables = Environment(variables=buildVariables).Dictionary()\nbaseTools = ['qt']\nif os.name == 'nt':\n if currentVariables['FORCE_MINGW']:\n baseTools.append('mingw')\n else:\n baseTools.append('default')\nelse:\n baseTools.append('default')\nbaseEnvironment = Environment(tools=baseTools, variables=buildVariables)\nbaseEnvironment['OSNAME'] = os.name\nbaseEnvironment['SYSPATH'] = os.environ['PATH'].split(os.pathsep)\nif baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/EHsc'])\nif baseEnvironment['DEBUG']:\n if baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/Z7'])\n else:\n baseEnvironment.AppendUnique(CPPFLAGS=['-g'])\nelif baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/Ox'])\nelse:\n baseEnvironment.AppendUnique(CPPFLAGS=['-O2'])\nbaseEnvironment.Replace(LIBS=[])\nbaseEnvironment.Replace(LIBPATH=[])\nbaseEnvironment.Replace(CPPPATH=[])\nuiBuilder = Builder(action='$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE')\nbaseEnvironment.Append(BUILDERS={'Ui': uiBuilder})\nrcBuilder = Builder(action=\n '$QT_BINPATH/rcc $QT_RCCDECLFLAGS -o ${TARGETS[0]} $SOURCE')\nbaseEnvironment.Append(BUILDERS={'Rc': rcBuilder})\nif os.name == 'nt' and baseEnvironment['CC'] == 'cl':\n import _winreg\n key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,\n 'Software\\\\Microsoft\\\\Microsoft SDKs\\\\Windows')\n winSdkHome = _winreg.QueryValueEx(key, 'CurrentInstallFolder')[0]\n _winreg.CloseKey(key)\n baseEnvironment['WINSDK_HOME'] = winSdkHome\n baseEnvironment.AppendUnique(CPPPATH=['$WINSDK_HOME/Include'])\n baseEnvironment.AppendUnique(LIBPATH=['$WINSDK_HOME/Lib'])\nif os.name == 'nt' and baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/MD'])\nbaseEnvironment.Decider('MD5-timestamp')\nHelp(buildVariables.GenerateHelpText(baseEnvironment))\nwalker = build.DependencyWalker()\nfor script in Glob('components.*.py'):\n SConscript(script, exports='walker', variant_dir='build', duplicate=0)\nwalker.makeEnvironments(baseEnvironment)\nif not baseEnvironment['DISABLE_GRAPH']:\n walker.makeDependencyGraph('dependencies.png')\n",
"step-4": "import os\nimport build\nSetOption('num_jobs', 4)\nSetOption('implicit_cache', 1)\nbuildVariables = Variables('localconfig.py')\nbuildVariables.Add(PathVariable('QTDIR', 'Qt4 root directory',\n '/usr/share/qt4', PathVariable.PathIsDir))\nbuildVariables.Add(PathVariable('OGRE_HOME',\n 'Ogre1.6 root directory (windows only)', None, PathVariable.PathIsDir))\nbuildVariables.Add(PathVariable('PTHREADWIN32_HOME',\n 'PthreadWin32 root directory (windows only)', None, PathVariable.PathIsDir)\n )\nbuildVariables.Add(PathVariable('ODE_HOME', 'ODE 0.11 root directory', None,\n PathVariable.PathIsDir))\nbuildVariables.Add(BoolVariable('DEBUG',\n 'If true, build in debug configuration', False))\nbuildVariables.Add(BoolVariable('FORCE_MINGW',\n 'When both MinGW and VC++ are installed, force the use of the MinGW compiler instead of the default (windows only)'\n , False))\nbuildVariables.Add(BoolVariable('DISABLE_GRAPH',\n 'Disable dependency graph generation', False))\ncurrentVariables = Environment(variables=buildVariables).Dictionary()\nbaseTools = ['qt']\nif os.name == 'nt':\n if currentVariables['FORCE_MINGW']:\n baseTools.append('mingw')\n else:\n baseTools.append('default')\nelse:\n baseTools.append('default')\nbaseEnvironment = Environment(tools=baseTools, variables=buildVariables)\nbaseEnvironment['OSNAME'] = os.name\nbaseEnvironment['SYSPATH'] = os.environ['PATH'].split(os.pathsep)\nif baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/EHsc'])\nif baseEnvironment['DEBUG']:\n if baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/Z7'])\n else:\n baseEnvironment.AppendUnique(CPPFLAGS=['-g'])\nelif baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/Ox'])\nelse:\n baseEnvironment.AppendUnique(CPPFLAGS=['-O2'])\nbaseEnvironment.Replace(LIBS=[])\nbaseEnvironment.Replace(LIBPATH=[])\nbaseEnvironment.Replace(CPPPATH=[])\nuiBuilder = Builder(action='$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE')\nbaseEnvironment.Append(BUILDERS={'Ui': uiBuilder})\nrcBuilder = Builder(action=\n '$QT_BINPATH/rcc $QT_RCCDECLFLAGS -o ${TARGETS[0]} $SOURCE')\nbaseEnvironment.Append(BUILDERS={'Rc': rcBuilder})\nif os.name == 'nt' and baseEnvironment['CC'] == 'cl':\n import _winreg\n key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,\n 'Software\\\\Microsoft\\\\Microsoft SDKs\\\\Windows')\n winSdkHome = _winreg.QueryValueEx(key, 'CurrentInstallFolder')[0]\n _winreg.CloseKey(key)\n baseEnvironment['WINSDK_HOME'] = winSdkHome\n baseEnvironment.AppendUnique(CPPPATH=['$WINSDK_HOME/Include'])\n baseEnvironment.AppendUnique(LIBPATH=['$WINSDK_HOME/Lib'])\nif os.name == 'nt' and baseEnvironment['CC'] == 'cl':\n baseEnvironment.AppendUnique(CPPFLAGS=['/MD'])\nbaseEnvironment.Decider('MD5-timestamp')\nHelp(buildVariables.GenerateHelpText(baseEnvironment))\nwalker = build.DependencyWalker()\nfor script in Glob('components.*.py'):\n SConscript(script, exports='walker', variant_dir='build', duplicate=0)\nwalker.makeEnvironments(baseEnvironment)\nif not baseEnvironment['DISABLE_GRAPH']:\n walker.makeDependencyGraph('dependencies.png')\n",
"step-5": "################################################################################\r\n# #\r\n# This file is part of the Potato Engine (PE). #\r\n# #\r\n# Copyright (C) 2007-2010 ElectroMagnetic Potatoes (EMP). #\r\n# See the AUTHORS file for more information. #\r\n# #\r\n# This library is free software; you can redistribute it and/or #\r\n# modify it under the terms of the GNU Lesser General Public #\r\n# License as published by the Free Software Foundation; either #\r\n# version 2.1 of the License, or (at your option) any later version. #\r\n# #\r\n# This library is distributed in the hope that it will be useful, #\r\n# but WITHOUT ANY WARRANTY; without even the implied warranty of #\r\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #\r\n# Lesser General Public License for more details. #\r\n# #\r\n# You should have received a copy of the GNU Lesser General Public License #\r\n# along with this program. If not, see <http://www.gnu.org/licenses/>. #\r\n# #\r\n################################################################################\r\n\r\nimport os\r\nimport build\r\n\r\n################################################################\r\n# Default options (will be overriden by command line switches) #\r\n################################################################\r\n\r\n# Parallel build\r\nSetOption('num_jobs', 4)\r\n\r\n# include cache\r\nSetOption('implicit_cache', 1)\r\n\r\n##########################################################\r\n# Command-line parameters (overriden by localconfig.py) #\r\n##########################################################\r\n\r\nbuildVariables = Variables(\"localconfig.py\")\r\nbuildVariables.Add(PathVariable(\"QTDIR\", \"Qt4 root directory\", \"/usr/share/qt4\", PathVariable.PathIsDir))\r\nbuildVariables.Add(PathVariable(\"OGRE_HOME\", \"Ogre1.6 root directory (windows only)\", None, PathVariable.PathIsDir))\r\nbuildVariables.Add(PathVariable(\"PTHREADWIN32_HOME\", \"PthreadWin32 root directory (windows only)\", None, PathVariable.PathIsDir))\r\nbuildVariables.Add(PathVariable(\"ODE_HOME\", \"ODE 0.11 root directory\", None, PathVariable.PathIsDir))\r\nbuildVariables.Add(BoolVariable(\"DEBUG\", \"If true, build in debug configuration\", False))\r\nbuildVariables.Add(BoolVariable(\"FORCE_MINGW\", \"When both MinGW and VC++ are installed, force the use of the MinGW compiler instead of the default (windows only)\", False))\r\nbuildVariables.Add(BoolVariable(\"DISABLE_GRAPH\", \"Disable dependency graph generation\", False))\r\n\r\n##############################################################################\r\n# Variable value extraction (nasty, should be updated when the API evolves) #\r\n# The reason for having this here is that we have to access variables before #\r\n# we can create the real construction environment (for tools selection) #\r\n##############################################################################\r\n\r\ncurrentVariables = Environment(variables = buildVariables).Dictionary()\r\n\r\n####################\r\n# Base environment #\r\n####################\r\n\r\nbaseTools = [\"qt\"]\r\nif os.name == \"nt\":\r\n\tif currentVariables[\"FORCE_MINGW\"]:\r\n\t\tbaseTools.append(\"mingw\")\r\n\telse:\r\n\t\tbaseTools.append(\"default\")\r\nelse:\r\n\tbaseTools.append(\"default\")\r\n\r\nbaseEnvironment = Environment(tools = baseTools, variables = buildVariables)\r\n\r\n# additional variables\r\nbaseEnvironment[\"OSNAME\"] = os.name\r\nbaseEnvironment[\"SYSPATH\"] = os.environ[\"PATH\"].split(os.pathsep)\r\n\r\nif baseEnvironment[\"CC\"] == \"cl\":\r\n\tbaseEnvironment.AppendUnique(CPPFLAGS = [\"/EHsc\"])\r\n\r\n# debug symbols vs. optimization\r\nif baseEnvironment[\"DEBUG\"]:\r\n\tif baseEnvironment[\"CC\"] == \"cl\":\r\n\t\tbaseEnvironment.AppendUnique(CPPFLAGS = [\"/Z7\"])\r\n\telse:\r\n\t\tbaseEnvironment.AppendUnique(CPPFLAGS = [\"-g\"])\r\nelse:\r\n\tif baseEnvironment[\"CC\"] == \"cl\":\r\n\t\tbaseEnvironment.AppendUnique(CPPFLAGS = [\"/Ox\"])\r\n\telse:\r\n\t\tbaseEnvironment.AppendUnique(CPPFLAGS = [\"-O2\"])\r\n\r\n# Qt tool workaround\r\nbaseEnvironment.Replace(LIBS = [])\r\nbaseEnvironment.Replace(LIBPATH = [])\r\nbaseEnvironment.Replace(CPPPATH = [])\r\n\r\n# Qt UI builder\r\nuiBuilder = Builder(action = '$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE')\r\nbaseEnvironment.Append(BUILDERS = {'Ui' : uiBuilder})\r\n\r\n# Qt RC builder\r\nrcBuilder = Builder(action = '$QT_BINPATH/rcc $QT_RCCDECLFLAGS -o ${TARGETS[0]} $SOURCE')\r\nbaseEnvironment.Append(BUILDERS = {'Rc' : rcBuilder})\r\n\r\n# Under windows, add the platform SDK\r\nif os.name == \"nt\" and baseEnvironment[\"CC\"] == \"cl\":\r\n\timport _winreg\r\n\tkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, \"Software\\\\Microsoft\\\\Microsoft SDKs\\\\Windows\")\r\n\twinSdkHome = _winreg.QueryValueEx(key, \"CurrentInstallFolder\")[0]\r\n\t_winreg.CloseKey(key)\r\n\tbaseEnvironment[\"WINSDK_HOME\"] = winSdkHome\r\n\tbaseEnvironment.AppendUnique(CPPPATH = [\"$WINSDK_HOME/Include\"])\r\n\tbaseEnvironment.AppendUnique(LIBPATH = [\"$WINSDK_HOME/Lib\"])\r\n\r\n# Do not rely on VC++ runtime library\r\nif os.name == \"nt\" and baseEnvironment[\"CC\"] == \"cl\":\r\n\tbaseEnvironment.AppendUnique(CPPFLAGS = [\"/MD\"])\r\n\r\n# Speed up change analysis\r\nbaseEnvironment.Decider('MD5-timestamp')\r\n\r\n#####################\r\n# Command-line help #\r\n#####################\r\n\r\nHelp(buildVariables.GenerateHelpText(baseEnvironment))\r\n\r\n##################################\r\n# SCons environment declarations #\r\n##################################\r\n\r\nwalker = build.DependencyWalker()\r\n\r\n# external component database\r\nfor script in Glob(\"components.*.py\"):\r\n\tSConscript(script, exports = \"walker\", variant_dir = \"build\", duplicate = 0)\r\n\r\nwalker.makeEnvironments(baseEnvironment)\r\nif not baseEnvironment[\"DISABLE_GRAPH\"]:\r\n\twalker.makeDependencyGraph(\"dependencies.png\")\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pygame as pg
screen = pg.display.set_mode((640, 380))
|
normal
|
{
"blob_id": "c1374a048187807deac5d28dda4fbc7beeccf8f5",
"index": 5221,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nscreen = pg.display.set_mode((640, 380))\n",
"step-3": "import pygame as pg\nscreen = pg.display.set_mode((640, 380))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: utf-8 -*-
"""
===================================
Demo of DBSCAN clustering algorithm
===================================
Finds core samples of high density and expands clusters from them.
"""
import scipy as sp
import numpy as np
from scipy import spatial
print(__doc__)
from sklearn.cluster import DBSCAN
from sklearn import metrics
from sklearn.datasets.samples_generator import make_blobs
from sklearn.preprocessing import StandardScaler
##############################################################################
# Calcule Distance Haversine Methods
EARTHRADIUS = 6371.0
def getDistanceByHaversine(loc1, loc2):
'''Haversine formula - give coordinates as a 2D numpy array of
(lat_denter link description hereecimal,lon_decimal) pairs'''
#
# "unpack" our numpy array, this extracts column wise arrays
lat1 = loc1[1]
lon1 = loc1[0]
lat2 = loc2[1]
lon2 = loc2[0]
#
# convert to radians ##### Completely identical
lon1 = lon1 * sp.pi / 180.0
lon2 = lon2 * sp.pi / 180.0
lat1 = lat1 * sp.pi / 180.0
lat2 = lat2 * sp.pi / 180.0
#
# haversine formula #### Same, but atan2 named arctan2 in numpy
dlon = lon2 - lon1
dlat = lat2 - lat1
a = (np.sin(dlat/2))**2 + np.cos(lat1) * np.cos(lat2) * (np.sin(dlon/2.0))**2
c = 2.0 * np.arctan2(np.sqrt(a), np.sqrt(1.0-a))
km = EARTHRADIUS * c
return km
##############################################################################
# Create a Matrix with longitude and latitude
import csv
import re
with open('users_bcn.csv', 'rb') as csvfile:
data = csv.reader(csvfile, delimiter=',', quotechar='|')
row_count = sum(1 for row in data)
gps_matrix = [[0 for i in range(row_count)] for j in range(2)]
with open('users_bcn.csv', 'rb') as csvfile:
data = csv.reader(csvfile, delimiter=',', quotechar='|')
for key, row in enumerate(data):
if key != 0:
try:
gps_matrix[0][key] = float(row[2].replace('"',''))
gps_matrix[1][key] = float(row[1].replace('"',''))
except:
a = float(row[1].replace(',',''))
print('problem string to float')
##############################################################################
# Calculate the Distance matrix
D = spatial.distance.pdist(gps_matrix, lambda u, v: getDistanceByHaversine(u,v))
##############################################################################
# Generate sample data
centers = [[1, 1], [-1, -1], [1, -1]]
X, labels_true = make_blobs(n_samples=750, centers=centers, cluster_std=0.4,
random_state=0)
X = StandardScaler().fit_transform(X)
##############################################################################
# Compute DBSCAN
db = DBSCAN(eps=0.3, min_samples=10).fit(X)
core_samples_mask = np.zeros_like(db.labels_, dtype=bool)
core_samples_mask[db.core_sample_indices_] = True
labels = db.labels_
# Number of clusters in labels, ignoring noise if present.
n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)
print('Estimated number of clusters: %d' % n_clusters_)
print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels_true, labels))
print("Completeness: %0.3f" % metrics.completeness_score(labels_true, labels))
print("V-measure: %0.3f" % metrics.v_measure_score(labels_true, labels))
print("Adjusted Rand Index: %0.3f"
% metrics.adjusted_rand_score(labels_true, labels))
print("Adjusted Mutual Information: %0.3f"
% metrics.adjusted_mutual_info_score(labels_true, labels))
print("Silhouette Coefficient: %0.3f"
% metrics.silhouette_score(X, labels))
##############################################################################
# Plot result
import matplotlib.pyplot as plt
# Black removed and is used for noise instead.
unique_labels = set(labels)
colors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))
for k, col in zip(unique_labels, colors):
if k == -1:
# Black used for noise.
col = 'k'
class_member_mask = (labels == k)
xy = X[class_member_mask & core_samples_mask]
plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,
markeredgecolor='k', markersize=14)
xy = X[class_member_mask & ~core_samples_mask]
plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,
markeredgecolor='k', markersize=6)
plt.title('Estimated number of clusters: %d' % n_clusters_)
plt.show()
|
normal
|
{
"blob_id": "d2e3ac490ce5fdc20976567fa320a9e6a53cbe34",
"index": 1037,
"step-1": "<mask token>\n\n\ndef getDistanceByHaversine(loc1, loc2):\n \"\"\"Haversine formula - give coordinates as a 2D numpy array of\n (lat_denter link description hereecimal,lon_decimal) pairs\"\"\"\n lat1 = loc1[1]\n lon1 = loc1[0]\n lat2 = loc2[1]\n lon2 = loc2[0]\n lon1 = lon1 * sp.pi / 180.0\n lon2 = lon2 * sp.pi / 180.0\n lat1 = lat1 * sp.pi / 180.0\n lat2 = lat2 * sp.pi / 180.0\n dlon = lon2 - lon1\n dlat = lat2 - lat1\n a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.0\n ) ** 2\n c = 2.0 * np.arctan2(np.sqrt(a), np.sqrt(1.0 - a))\n km = EARTHRADIUS * c\n return km\n\n\n<mask token>\n",
"step-2": "<mask token>\nprint(__doc__)\n<mask token>\n\n\ndef getDistanceByHaversine(loc1, loc2):\n \"\"\"Haversine formula - give coordinates as a 2D numpy array of\n (lat_denter link description hereecimal,lon_decimal) pairs\"\"\"\n lat1 = loc1[1]\n lon1 = loc1[0]\n lat2 = loc2[1]\n lon2 = loc2[0]\n lon1 = lon1 * sp.pi / 180.0\n lon2 = lon2 * sp.pi / 180.0\n lat1 = lat1 * sp.pi / 180.0\n lat2 = lat2 * sp.pi / 180.0\n dlon = lon2 - lon1\n dlat = lat2 - lat1\n a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.0\n ) ** 2\n c = 2.0 * np.arctan2(np.sqrt(a), np.sqrt(1.0 - a))\n km = EARTHRADIUS * c\n return km\n\n\n<mask token>\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n row_count = sum(1 for row in data)\n gps_matrix = [[(0) for i in range(row_count)] for j in range(2)]\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n for key, row in enumerate(data):\n if key != 0:\n try:\n gps_matrix[0][key] = float(row[2].replace('\"', ''))\n gps_matrix[1][key] = float(row[1].replace('\"', ''))\n except:\n a = float(row[1].replace(',', ''))\n print('problem string to float')\n<mask token>\nprint('Estimated number of clusters: %d' % n_clusters_)\nprint('Homogeneity: %0.3f' % metrics.homogeneity_score(labels_true, labels))\nprint('Completeness: %0.3f' % metrics.completeness_score(labels_true, labels))\nprint('V-measure: %0.3f' % metrics.v_measure_score(labels_true, labels))\nprint('Adjusted Rand Index: %0.3f' % metrics.adjusted_rand_score(\n labels_true, labels))\nprint('Adjusted Mutual Information: %0.3f' % metrics.\n adjusted_mutual_info_score(labels_true, labels))\nprint('Silhouette Coefficient: %0.3f' % metrics.silhouette_score(X, labels))\n<mask token>\nfor k, col in zip(unique_labels, colors):\n if k == -1:\n col = 'k'\n class_member_mask = labels == k\n xy = X[class_member_mask & core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor=\n 'k', markersize=14)\n xy = X[class_member_mask & ~core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor=\n 'k', markersize=6)\nplt.title('Estimated number of clusters: %d' % n_clusters_)\nplt.show()\n",
"step-3": "<mask token>\nprint(__doc__)\n<mask token>\nEARTHRADIUS = 6371.0\n\n\ndef getDistanceByHaversine(loc1, loc2):\n \"\"\"Haversine formula - give coordinates as a 2D numpy array of\n (lat_denter link description hereecimal,lon_decimal) pairs\"\"\"\n lat1 = loc1[1]\n lon1 = loc1[0]\n lat2 = loc2[1]\n lon2 = loc2[0]\n lon1 = lon1 * sp.pi / 180.0\n lon2 = lon2 * sp.pi / 180.0\n lat1 = lat1 * sp.pi / 180.0\n lat2 = lat2 * sp.pi / 180.0\n dlon = lon2 - lon1\n dlat = lat2 - lat1\n a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.0\n ) ** 2\n c = 2.0 * np.arctan2(np.sqrt(a), np.sqrt(1.0 - a))\n km = EARTHRADIUS * c\n return km\n\n\n<mask token>\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n row_count = sum(1 for row in data)\n gps_matrix = [[(0) for i in range(row_count)] for j in range(2)]\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n for key, row in enumerate(data):\n if key != 0:\n try:\n gps_matrix[0][key] = float(row[2].replace('\"', ''))\n gps_matrix[1][key] = float(row[1].replace('\"', ''))\n except:\n a = float(row[1].replace(',', ''))\n print('problem string to float')\nD = spatial.distance.pdist(gps_matrix, lambda u, v: getDistanceByHaversine(\n u, v))\ncenters = [[1, 1], [-1, -1], [1, -1]]\nX, labels_true = make_blobs(n_samples=750, centers=centers, cluster_std=0.4,\n random_state=0)\nX = StandardScaler().fit_transform(X)\ndb = DBSCAN(eps=0.3, min_samples=10).fit(X)\ncore_samples_mask = np.zeros_like(db.labels_, dtype=bool)\ncore_samples_mask[db.core_sample_indices_] = True\nlabels = db.labels_\nn_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)\nprint('Estimated number of clusters: %d' % n_clusters_)\nprint('Homogeneity: %0.3f' % metrics.homogeneity_score(labels_true, labels))\nprint('Completeness: %0.3f' % metrics.completeness_score(labels_true, labels))\nprint('V-measure: %0.3f' % metrics.v_measure_score(labels_true, labels))\nprint('Adjusted Rand Index: %0.3f' % metrics.adjusted_rand_score(\n labels_true, labels))\nprint('Adjusted Mutual Information: %0.3f' % metrics.\n adjusted_mutual_info_score(labels_true, labels))\nprint('Silhouette Coefficient: %0.3f' % metrics.silhouette_score(X, labels))\n<mask token>\nunique_labels = set(labels)\ncolors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))\nfor k, col in zip(unique_labels, colors):\n if k == -1:\n col = 'k'\n class_member_mask = labels == k\n xy = X[class_member_mask & core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor=\n 'k', markersize=14)\n xy = X[class_member_mask & ~core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor=\n 'k', markersize=6)\nplt.title('Estimated number of clusters: %d' % n_clusters_)\nplt.show()\n",
"step-4": "<mask token>\nimport scipy as sp\nimport numpy as np\nfrom scipy import spatial\nprint(__doc__)\nfrom sklearn.cluster import DBSCAN\nfrom sklearn import metrics\nfrom sklearn.datasets.samples_generator import make_blobs\nfrom sklearn.preprocessing import StandardScaler\nEARTHRADIUS = 6371.0\n\n\ndef getDistanceByHaversine(loc1, loc2):\n \"\"\"Haversine formula - give coordinates as a 2D numpy array of\n (lat_denter link description hereecimal,lon_decimal) pairs\"\"\"\n lat1 = loc1[1]\n lon1 = loc1[0]\n lat2 = loc2[1]\n lon2 = loc2[0]\n lon1 = lon1 * sp.pi / 180.0\n lon2 = lon2 * sp.pi / 180.0\n lat1 = lat1 * sp.pi / 180.0\n lat2 = lat2 * sp.pi / 180.0\n dlon = lon2 - lon1\n dlat = lat2 - lat1\n a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.0\n ) ** 2\n c = 2.0 * np.arctan2(np.sqrt(a), np.sqrt(1.0 - a))\n km = EARTHRADIUS * c\n return km\n\n\nimport csv\nimport re\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n row_count = sum(1 for row in data)\n gps_matrix = [[(0) for i in range(row_count)] for j in range(2)]\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n for key, row in enumerate(data):\n if key != 0:\n try:\n gps_matrix[0][key] = float(row[2].replace('\"', ''))\n gps_matrix[1][key] = float(row[1].replace('\"', ''))\n except:\n a = float(row[1].replace(',', ''))\n print('problem string to float')\nD = spatial.distance.pdist(gps_matrix, lambda u, v: getDistanceByHaversine(\n u, v))\ncenters = [[1, 1], [-1, -1], [1, -1]]\nX, labels_true = make_blobs(n_samples=750, centers=centers, cluster_std=0.4,\n random_state=0)\nX = StandardScaler().fit_transform(X)\ndb = DBSCAN(eps=0.3, min_samples=10).fit(X)\ncore_samples_mask = np.zeros_like(db.labels_, dtype=bool)\ncore_samples_mask[db.core_sample_indices_] = True\nlabels = db.labels_\nn_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)\nprint('Estimated number of clusters: %d' % n_clusters_)\nprint('Homogeneity: %0.3f' % metrics.homogeneity_score(labels_true, labels))\nprint('Completeness: %0.3f' % metrics.completeness_score(labels_true, labels))\nprint('V-measure: %0.3f' % metrics.v_measure_score(labels_true, labels))\nprint('Adjusted Rand Index: %0.3f' % metrics.adjusted_rand_score(\n labels_true, labels))\nprint('Adjusted Mutual Information: %0.3f' % metrics.\n adjusted_mutual_info_score(labels_true, labels))\nprint('Silhouette Coefficient: %0.3f' % metrics.silhouette_score(X, labels))\nimport matplotlib.pyplot as plt\nunique_labels = set(labels)\ncolors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))\nfor k, col in zip(unique_labels, colors):\n if k == -1:\n col = 'k'\n class_member_mask = labels == k\n xy = X[class_member_mask & core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor=\n 'k', markersize=14)\n xy = X[class_member_mask & ~core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor=\n 'k', markersize=6)\nplt.title('Estimated number of clusters: %d' % n_clusters_)\nplt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\n===================================\nDemo of DBSCAN clustering algorithm\n===================================\n\nFinds core samples of high density and expands clusters from them.\n\n\"\"\"\nimport scipy as sp\nimport numpy as np\n\nfrom scipy import spatial\nprint(__doc__)\n\n\nfrom sklearn.cluster import DBSCAN\nfrom sklearn import metrics\nfrom sklearn.datasets.samples_generator import make_blobs\nfrom sklearn.preprocessing import StandardScaler\n\n##############################################################################\n# Calcule Distance Haversine Methods\n\nEARTHRADIUS = 6371.0\n\ndef getDistanceByHaversine(loc1, loc2):\n '''Haversine formula - give coordinates as a 2D numpy array of\n (lat_denter link description hereecimal,lon_decimal) pairs'''\n #\n # \"unpack\" our numpy array, this extracts column wise arrays\n lat1 = loc1[1]\n lon1 = loc1[0]\n lat2 = loc2[1]\n lon2 = loc2[0]\n #\n # convert to radians ##### Completely identical\n lon1 = lon1 * sp.pi / 180.0\n lon2 = lon2 * sp.pi / 180.0\n lat1 = lat1 * sp.pi / 180.0\n lat2 = lat2 * sp.pi / 180.0\n #\n # haversine formula #### Same, but atan2 named arctan2 in numpy\n dlon = lon2 - lon1\n dlat = lat2 - lat1\n a = (np.sin(dlat/2))**2 + np.cos(lat1) * np.cos(lat2) * (np.sin(dlon/2.0))**2\n c = 2.0 * np.arctan2(np.sqrt(a), np.sqrt(1.0-a))\n km = EARTHRADIUS * c\n return km\n\n\n##############################################################################\n# Create a Matrix with longitude and latitude\n\nimport csv\nimport re\n\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n\n row_count = sum(1 for row in data)\n gps_matrix = [[0 for i in range(row_count)] for j in range(2)]\n\nwith open('users_bcn.csv', 'rb') as csvfile:\n data = csv.reader(csvfile, delimiter=',', quotechar='|')\n\n for key, row in enumerate(data):\n if key != 0:\n try:\n gps_matrix[0][key] = float(row[2].replace('\"',''))\n gps_matrix[1][key] = float(row[1].replace('\"',''))\n except:\n a = float(row[1].replace(',',''))\n print('problem string to float')\n\n##############################################################################\n# Calculate the Distance matrix\n\nD = spatial.distance.pdist(gps_matrix, lambda u, v: getDistanceByHaversine(u,v))\n\n\n##############################################################################\n# Generate sample data\ncenters = [[1, 1], [-1, -1], [1, -1]]\nX, labels_true = make_blobs(n_samples=750, centers=centers, cluster_std=0.4,\n random_state=0)\n\nX = StandardScaler().fit_transform(X)\n\n##############################################################################\n# Compute DBSCAN\ndb = DBSCAN(eps=0.3, min_samples=10).fit(X)\ncore_samples_mask = np.zeros_like(db.labels_, dtype=bool)\ncore_samples_mask[db.core_sample_indices_] = True\nlabels = db.labels_\n\n# Number of clusters in labels, ignoring noise if present.\nn_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)\n\nprint('Estimated number of clusters: %d' % n_clusters_)\nprint(\"Homogeneity: %0.3f\" % metrics.homogeneity_score(labels_true, labels))\nprint(\"Completeness: %0.3f\" % metrics.completeness_score(labels_true, labels))\nprint(\"V-measure: %0.3f\" % metrics.v_measure_score(labels_true, labels))\nprint(\"Adjusted Rand Index: %0.3f\"\n % metrics.adjusted_rand_score(labels_true, labels))\nprint(\"Adjusted Mutual Information: %0.3f\"\n % metrics.adjusted_mutual_info_score(labels_true, labels))\nprint(\"Silhouette Coefficient: %0.3f\"\n % metrics.silhouette_score(X, labels))\n\n##############################################################################\n# Plot result\nimport matplotlib.pyplot as plt\n\n# Black removed and is used for noise instead.\nunique_labels = set(labels)\ncolors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))\nfor k, col in zip(unique_labels, colors):\n if k == -1:\n # Black used for noise.\n col = 'k'\n\n class_member_mask = (labels == k)\n\n xy = X[class_member_mask & core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,\n markeredgecolor='k', markersize=14)\n\n xy = X[class_member_mask & ~core_samples_mask]\n plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,\n markeredgecolor='k', markersize=6)\n\nplt.title('Estimated number of clusters: %d' % n_clusters_)\nplt.show()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import copy
import datetime
from sacred import Experiment
from tqdm import tqdm
from mms_msg.databases.classical.full_overlap import WSJ2Mix
import paderbox as pb
import padertorch as pt
ex = Experiment('mixture_generator_create_json')
@ex.config
def defaults():
json_path = 'database.json'
database = {
'factory': WSJ2Mix,
}
pt.Configurable.get_config(database)
@ex.automain
def main(json_path, database, _log):
database_config = database
database = pt.configurable.config_to_instance(database)
database_dict = {
'datasets': {
dataset_name: dict(tqdm(
database.get_dataset(dataset_name).items(),
desc=dataset_name,
)) for dataset_name in database.dataset_names
},
'meta': {
'config': pt.configurable.recursive_class_to_str(
copy.deepcopy(database_config)
),
'generated': datetime.datetime.now(),
}
}
pb.io.dump(database_dict, json_path)
_log.info(f'Wrote file: {json_path}')
|
normal
|
{
"blob_id": "f39130099ccf467623d65ac328fd02538044d36a",
"index": 6476,
"step-1": "<mask token>\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-2": "<mask token>\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-3": "<mask token>\nex = Experiment('mixture_generator_create_json')\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-4": "import copy\nimport datetime\nfrom sacred import Experiment\nfrom tqdm import tqdm\nfrom mms_msg.databases.classical.full_overlap import WSJ2Mix\nimport paderbox as pb\nimport padertorch as pt\nex = Experiment('mixture_generator_create_json')\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {'factory': WSJ2Mix}\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {'datasets': {dataset_name: dict(tqdm(database.\n get_dataset(dataset_name).items(), desc=dataset_name)) for\n dataset_name in database.dataset_names}, 'meta': {'config': pt.\n configurable.recursive_class_to_str(copy.deepcopy(database_config)),\n 'generated': datetime.datetime.now()}}\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-5": "import copy\nimport datetime\n\nfrom sacred import Experiment\nfrom tqdm import tqdm\n\nfrom mms_msg.databases.classical.full_overlap import WSJ2Mix\nimport paderbox as pb\nimport padertorch as pt\n\nex = Experiment('mixture_generator_create_json')\n\n\[email protected]\ndef defaults():\n json_path = 'database.json'\n database = {\n 'factory': WSJ2Mix,\n }\n pt.Configurable.get_config(database)\n\n\[email protected]\ndef main(json_path, database, _log):\n database_config = database\n database = pt.configurable.config_to_instance(database)\n database_dict = {\n 'datasets': {\n dataset_name: dict(tqdm(\n database.get_dataset(dataset_name).items(),\n desc=dataset_name,\n )) for dataset_name in database.dataset_names\n },\n 'meta': {\n 'config': pt.configurable.recursive_class_to_str(\n copy.deepcopy(database_config)\n ),\n 'generated': datetime.datetime.now(),\n }\n }\n pb.io.dump(database_dict, json_path)\n _log.info(f'Wrote file: {json_path}')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
import random
import gym
import numpy as np
from collections import deque
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
from simulation_utils import box, simulation
from kinematics import pose3D
a = np.log(2)/25
apdataX = np.random.random((5, 35))
quarter_way_arr = [False, False, False]
quarter_way_arr[0] = True
quarter_way_arr[1] = True
quarter_way_arr[2] = True
mat = np.eye(3)
print(np.linalg.norm(mat))
|
normal
|
{
"blob_id": "7e7e96fb9377e4dc59a46a46951f5057ecae419a",
"index": 201,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(np.linalg.norm(mat))\n",
"step-3": "<mask token>\na = np.log(2) / 25\napdataX = np.random.random((5, 35))\nquarter_way_arr = [False, False, False]\nquarter_way_arr[0] = True\nquarter_way_arr[1] = True\nquarter_way_arr[2] = True\nmat = np.eye(3)\nprint(np.linalg.norm(mat))\n",
"step-4": "import random\nimport gym\nimport numpy as np\nfrom collections import deque\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.optimizers import Adam\nfrom simulation_utils import box, simulation\nfrom kinematics import pose3D\na = np.log(2) / 25\napdataX = np.random.random((5, 35))\nquarter_way_arr = [False, False, False]\nquarter_way_arr[0] = True\nquarter_way_arr[1] = True\nquarter_way_arr[2] = True\nmat = np.eye(3)\nprint(np.linalg.norm(mat))\n",
"step-5": "# -*- coding: utf-8 -*-\nimport random\nimport gym\nimport numpy as np\nfrom collections import deque\nfrom keras.models import Sequential\nfrom keras.layers import Dense\nfrom keras.optimizers import Adam\nfrom simulation_utils import box, simulation\nfrom kinematics import pose3D\n\na = np.log(2)/25\n\napdataX = np.random.random((5, 35))\nquarter_way_arr = [False, False, False]\n\nquarter_way_arr[0] = True\nquarter_way_arr[1] = True\nquarter_way_arr[2] = True\n\nmat = np.eye(3)\nprint(np.linalg.norm(mat))\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Country, TouristPlaces, Users
# Create database and create a shortcut for easier to update database
engine = create_engine('sqlite:///country_catalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Creating an user
user_1 = Users(name="admin", email="[email protected]")
session.add(user_1)
session.commit()
# India
country_1 = Country(user_id=1, name="India")
session.add(country_1)
session.commit()
# Australia
country_2 = Country(user_id=1, name="Australia")
session.add(country_2)
session.commit()
# England
country_3 = Country(user_id=1, name="England")
session.add(country_3)
session.commit()
# Paris
country_4 = Country(user_id=1, name="Paris")
session.add(country_4)
session.commit()
# USA
country_5 = Country(user_id=1, name="USA")
session.add(country_5)
session.commit()
# Mexico
country_6 = Country(user_id=1, name="Mexico")
session.add(country_6)
session.commit()
# SriLanka
country_7 = Country(user_id=1, name="Srilanka")
session.add(country_7)
session.commit()
# MAldives
country_8 = Country(user_id=1, name="Maldives")
session.add(country_8)
session.commit()
# Adding touristAttractions to Countries
places = TouristPlaces(user_id=1, name="Taj Mahal",
description="Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal "
"It is bultby using white marbel",
country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Red Fort",
description="Red fort is the histroric fort in the city of Delhi,India."
"It is the main residence of the emperors of mughal Dynasty.",
country=country_1)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Canberra",
description="It is the home for National GAllery of Australia"
"and a wide varierty of cultural and historic sites",
country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Perth",
description="The west side ofAustralia is home to the city of Perth"
"It is bordered by Indian Ocean",
country=country_2)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Tower Of London",
description="It is one of the world Heritage site"
"Other highlights are Crown Jewels Exhibition",
country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="British Museum",
description="It contains the collection of worlds finest antiquites"
"The famous artifacts are Eglin marbles",
country=country_3)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Eiffel Tower",
description="The Eiffel-tower is wrought iron lattice"
"It is named after the Engineer Gustav Eiffel",
country=country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="places of Versallies",
description="The Palce of Versallies is the Principle Royal"
"residence.",
country=country_4)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Grand Canyon Village",
description="Grand Canyon is located in south Rim of Grand Canyon"
"It is focussed on accomadating tourists visiting Grand Canyon",
country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Statue if Liberty",
description="Statue of Liberty is Colossal neo-classical sculpture"
"In New-york Hourbor Newyork",
country=country_5)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Mexico City",
description="Mexico city is densely populated and high altitude capital Of Mexico"
"It is the home for zoo,Muesuem of modern Art.",
country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Tulum",
description="Tulum is a town in the Carribean coatline of Mexico"
"It is well-known for beaches and ruins of Ancient Mayan port city",
country=country_6)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Colombo",
description="It is the Capital city of Srilanka"
"It sheritage is reflected in its Architecture",
country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Kandy",
description="Kandy is the largest city of central Sri Lanka."
"It is surrounded by mountains which is home to tea Plantations.",
country=country_7)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Male",
description="It is among the tooped tourist Attractions of Maldives"
"It has considerably moderate tempaerature through out the year",
country=country_8)
session.add(places)
session.commit()
places = TouristPlaces(user_id=1, name="Sun Island",
description="It is adorned with some sparkling beaches"
"beuatigul flowers and lavish greenary that pulls a great number of tourists",
country=country_8)
session.add(places)
session.commit()
print("added countries and Tourist Places added")
|
normal
|
{
"blob_id": "21b9844fce10d16a14050a782ce7e15e3f6fb657",
"index": 5737,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsession.add(user_1)\nsession.commit()\n<mask token>\nsession.add(country_1)\nsession.commit()\n<mask token>\nsession.add(country_2)\nsession.commit()\n<mask token>\nsession.add(country_3)\nsession.commit()\n<mask token>\nsession.add(country_4)\nsession.commit()\n<mask token>\nsession.add(country_5)\nsession.commit()\n<mask token>\nsession.add(country_6)\nsession.commit()\n<mask token>\nsession.add(country_7)\nsession.commit()\n<mask token>\nsession.add(country_8)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\n<mask token>\nsession.add(places)\nsession.commit()\nprint('added countries and Tourist Places added')\n",
"step-3": "<mask token>\nengine = create_engine('sqlite:///country_catalog.db')\nBase.metadata.bind = engine\nDBSession = sessionmaker(bind=engine)\nsession = DBSession()\nuser_1 = Users(name='admin', email='[email protected]')\nsession.add(user_1)\nsession.commit()\ncountry_1 = Country(user_id=1, name='India')\nsession.add(country_1)\nsession.commit()\ncountry_2 = Country(user_id=1, name='Australia')\nsession.add(country_2)\nsession.commit()\ncountry_3 = Country(user_id=1, name='England')\nsession.add(country_3)\nsession.commit()\ncountry_4 = Country(user_id=1, name='Paris')\nsession.add(country_4)\nsession.commit()\ncountry_5 = Country(user_id=1, name='USA')\nsession.add(country_5)\nsession.commit()\ncountry_6 = Country(user_id=1, name='Mexico')\nsession.add(country_6)\nsession.commit()\ncountry_7 = Country(user_id=1, name='Srilanka')\nsession.add(country_7)\nsession.commit()\ncountry_8 = Country(user_id=1, name='Maldives')\nsession.add(country_8)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Taj Mahal', description=\n 'Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal It is bultby using white marbel'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Red Fort', description=\n 'Red fort is the histroric fort in the city of Delhi,India.It is the main residence of the emperors of mughal Dynasty.'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Canberra', description=\n 'It is the home for National GAllery of Australiaand a wide varierty of cultural and historic sites'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Perth', description=\n 'The west side ofAustralia is home to the city of PerthIt is bordered by Indian Ocean'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tower Of London', description=\n 'It is one of the world Heritage siteOther highlights are Crown Jewels Exhibition'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='British Museum', description=\n 'It contains the collection of worlds finest antiquitesThe famous artifacts are Eglin marbles'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Eiffel Tower', description=\n 'The Eiffel-tower is wrought iron latticeIt is named after the Engineer Gustav Eiffel'\n , country=country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='places of Versallies', description=\n 'The Palce of Versallies is the Principle Royalresidence.', country=\n country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Grand Canyon Village', description=\n 'Grand Canyon is located in south Rim of Grand CanyonIt is focussed on accomadating tourists visiting Grand Canyon'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Statue if Liberty', description=\n 'Statue of Liberty is Colossal neo-classical sculptureIn New-york Hourbor Newyork'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Mexico City', description=\n 'Mexico city is densely populated and high altitude capital Of MexicoIt is the home for zoo,Muesuem of modern Art.'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tulum', description=\n 'Tulum is a town in the Carribean coatline of MexicoIt is well-known for beaches and ruins of Ancient Mayan port city'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Colombo', description=\n 'It is the Capital city of SrilankaIt sheritage is reflected in its Architecture'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Kandy', description=\n 'Kandy is the largest city of central Sri Lanka.It is surrounded by mountains which is home to tea Plantations.'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Male', description=\n 'It is among the tooped tourist Attractions of MaldivesIt has considerably moderate tempaerature through out the year'\n , country=country_8)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Sun Island', description=\n 'It is adorned with some sparkling beachesbeuatigul flowers and lavish greenary that pulls a great number of tourists'\n , country=country_8)\nsession.add(places)\nsession.commit()\nprint('added countries and Tourist Places added')\n",
"step-4": "from sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\nfrom database_setup import Base, Country, TouristPlaces, Users\nengine = create_engine('sqlite:///country_catalog.db')\nBase.metadata.bind = engine\nDBSession = sessionmaker(bind=engine)\nsession = DBSession()\nuser_1 = Users(name='admin', email='[email protected]')\nsession.add(user_1)\nsession.commit()\ncountry_1 = Country(user_id=1, name='India')\nsession.add(country_1)\nsession.commit()\ncountry_2 = Country(user_id=1, name='Australia')\nsession.add(country_2)\nsession.commit()\ncountry_3 = Country(user_id=1, name='England')\nsession.add(country_3)\nsession.commit()\ncountry_4 = Country(user_id=1, name='Paris')\nsession.add(country_4)\nsession.commit()\ncountry_5 = Country(user_id=1, name='USA')\nsession.add(country_5)\nsession.commit()\ncountry_6 = Country(user_id=1, name='Mexico')\nsession.add(country_6)\nsession.commit()\ncountry_7 = Country(user_id=1, name='Srilanka')\nsession.add(country_7)\nsession.commit()\ncountry_8 = Country(user_id=1, name='Maldives')\nsession.add(country_8)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Taj Mahal', description=\n 'Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal It is bultby using white marbel'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Red Fort', description=\n 'Red fort is the histroric fort in the city of Delhi,India.It is the main residence of the emperors of mughal Dynasty.'\n , country=country_1)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Canberra', description=\n 'It is the home for National GAllery of Australiaand a wide varierty of cultural and historic sites'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Perth', description=\n 'The west side ofAustralia is home to the city of PerthIt is bordered by Indian Ocean'\n , country=country_2)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tower Of London', description=\n 'It is one of the world Heritage siteOther highlights are Crown Jewels Exhibition'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='British Museum', description=\n 'It contains the collection of worlds finest antiquitesThe famous artifacts are Eglin marbles'\n , country=country_3)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Eiffel Tower', description=\n 'The Eiffel-tower is wrought iron latticeIt is named after the Engineer Gustav Eiffel'\n , country=country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='places of Versallies', description=\n 'The Palce of Versallies is the Principle Royalresidence.', country=\n country_4)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Grand Canyon Village', description=\n 'Grand Canyon is located in south Rim of Grand CanyonIt is focussed on accomadating tourists visiting Grand Canyon'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Statue if Liberty', description=\n 'Statue of Liberty is Colossal neo-classical sculptureIn New-york Hourbor Newyork'\n , country=country_5)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Mexico City', description=\n 'Mexico city is densely populated and high altitude capital Of MexicoIt is the home for zoo,Muesuem of modern Art.'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Tulum', description=\n 'Tulum is a town in the Carribean coatline of MexicoIt is well-known for beaches and ruins of Ancient Mayan port city'\n , country=country_6)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Colombo', description=\n 'It is the Capital city of SrilankaIt sheritage is reflected in its Architecture'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Kandy', description=\n 'Kandy is the largest city of central Sri Lanka.It is surrounded by mountains which is home to tea Plantations.'\n , country=country_7)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Male', description=\n 'It is among the tooped tourist Attractions of MaldivesIt has considerably moderate tempaerature through out the year'\n , country=country_8)\nsession.add(places)\nsession.commit()\nplaces = TouristPlaces(user_id=1, name='Sun Island', description=\n 'It is adorned with some sparkling beachesbeuatigul flowers and lavish greenary that pulls a great number of tourists'\n , country=country_8)\nsession.add(places)\nsession.commit()\nprint('added countries and Tourist Places added')\n",
"step-5": "from sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\nfrom database_setup import Base, Country, TouristPlaces, Users\n\n# Create database and create a shortcut for easier to update database\nengine = create_engine('sqlite:///country_catalog.db')\nBase.metadata.bind = engine\nDBSession = sessionmaker(bind=engine)\nsession = DBSession()\n\n# Creating an user\nuser_1 = Users(name=\"admin\", email=\"[email protected]\")\nsession.add(user_1)\nsession.commit()\n\n# India\ncountry_1 = Country(user_id=1, name=\"India\")\nsession.add(country_1)\nsession.commit()\n\n\n# Australia\ncountry_2 = Country(user_id=1, name=\"Australia\")\nsession.add(country_2)\nsession.commit()\n\n# England\ncountry_3 = Country(user_id=1, name=\"England\")\nsession.add(country_3)\nsession.commit()\n\n# Paris\ncountry_4 = Country(user_id=1, name=\"Paris\")\nsession.add(country_4)\nsession.commit()\n\n# USA\ncountry_5 = Country(user_id=1, name=\"USA\")\nsession.add(country_5)\nsession.commit()\n\n# Mexico\ncountry_6 = Country(user_id=1, name=\"Mexico\")\nsession.add(country_6)\nsession.commit()\n\n# SriLanka\ncountry_7 = Country(user_id=1, name=\"Srilanka\")\nsession.add(country_7)\nsession.commit()\n\n# MAldives\ncountry_8 = Country(user_id=1, name=\"Maldives\")\nsession.add(country_8)\nsession.commit()\n\n# Adding touristAttractions to Countries\nplaces = TouristPlaces(user_id=1, name=\"Taj Mahal\",\n description=\"Taj Mahal is mausolem by Mughal ruler Shah Jahan for his Wife Mumtaz Mahal \"\n \"It is bultby using white marbel\",\n country=country_1)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Red Fort\",\n description=\"Red fort is the histroric fort in the city of Delhi,India.\"\n \"It is the main residence of the emperors of mughal Dynasty.\",\n country=country_1)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Canberra\",\n description=\"It is the home for National GAllery of Australia\"\n \"and a wide varierty of cultural and historic sites\",\n country=country_2)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Perth\",\n description=\"The west side ofAustralia is home to the city of Perth\"\n \"It is bordered by Indian Ocean\",\n country=country_2)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Tower Of London\",\n description=\"It is one of the world Heritage site\"\n \"Other highlights are Crown Jewels Exhibition\",\n country=country_3)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"British Museum\",\n description=\"It contains the collection of worlds finest antiquites\"\n \"The famous artifacts are Eglin marbles\",\n country=country_3)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Eiffel Tower\",\n description=\"The Eiffel-tower is wrought iron lattice\"\n \"It is named after the Engineer Gustav Eiffel\",\n country=country_4)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"places of Versallies\",\n description=\"The Palce of Versallies is the Principle Royal\"\n \"residence.\",\n country=country_4)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Grand Canyon Village\",\n description=\"Grand Canyon is located in south Rim of Grand Canyon\"\n \"It is focussed on accomadating tourists visiting Grand Canyon\",\n country=country_5)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Statue if Liberty\",\n description=\"Statue of Liberty is Colossal neo-classical sculpture\"\n \"In New-york Hourbor Newyork\",\n country=country_5)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Mexico City\",\n description=\"Mexico city is densely populated and high altitude capital Of Mexico\"\n \"It is the home for zoo,Muesuem of modern Art.\",\n country=country_6)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Tulum\",\n description=\"Tulum is a town in the Carribean coatline of Mexico\"\n \"It is well-known for beaches and ruins of Ancient Mayan port city\",\n country=country_6)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Colombo\",\n description=\"It is the Capital city of Srilanka\"\n \"It sheritage is reflected in its Architecture\",\n country=country_7)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Kandy\",\n description=\"Kandy is the largest city of central Sri Lanka.\"\n \"It is surrounded by mountains which is home to tea Plantations.\",\n country=country_7)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Male\",\n description=\"It is among the tooped tourist Attractions of Maldives\"\n \"It has considerably moderate tempaerature through out the year\",\n country=country_8)\nsession.add(places)\nsession.commit()\n\nplaces = TouristPlaces(user_id=1, name=\"Sun Island\",\n description=\"It is adorned with some sparkling beaches\"\n \"beuatigul flowers and lavish greenary that pulls a great number of tourists\",\n country=country_8)\nsession.add(places)\nsession.commit()\n\nprint(\"added countries and Tourist Places added\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from flask import Flask, request, render_template
from utils import get_result
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route("/result", methods=["POST"])
def result():
form_data = request.form
sentence = form_data['sentence']
output = get_result(sentence)
return render_template('result.html', result=output)
if __name__ == '__main__':
app.run(debug=True)
|
normal
|
{
"blob_id": "264da5a2ab7d5c311d8a59b06c81ea2156cefd76",
"index": 9627,
"step-1": "<mask token>\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/result', methods=['POST'])\ndef result():\n form_data = request.form\n sentence = form_data['sentence']\n output = get_result(sentence)\n return render_template('result.html', result=output)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/result', methods=['POST'])\ndef result():\n form_data = request.form\n sentence = form_data['sentence']\n output = get_result(sentence)\n return render_template('result.html', result=output)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/result', methods=['POST'])\ndef result():\n form_data = request.form\n sentence = form_data['sentence']\n output = get_result(sentence)\n return render_template('result.html', result=output)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask, request, render_template\nfrom utils import get_result\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected]('/result', methods=['POST'])\ndef result():\n form_data = request.form\n sentence = form_data['sentence']\n output = get_result(sentence)\n return render_template('result.html', result=output)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask, request, render_template\n\nfrom utils import get_result\n\napp = Flask(__name__)\n\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\n\[email protected](\"/result\", methods=[\"POST\"])\ndef result():\n form_data = request.form\n sentence = form_data['sentence']\n output = get_result(sentence)\n return render_template('result.html', result=output)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
'''
* EAFS
* Copyright (C) 2009-2011 Adam Etienne <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation version 3.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import math,uuid,sys,os,time,operator,xmlrpclib,random,argparse
from eafslib import EAFSChunkServerRpc
class EAFSClient:
def __init__(self, master_host):
self.master = xmlrpclib.ServerProxy(master_host)
self.chunkservers = {}
def write(self, filename, data):
if self.exists(filename):
self.delete(filename)
num_chunks = self.num_chunks(len(data))
attributes = {"mode":"file", "atime":"", "ctime":"", "mtime":"", "attrs":""}
chunkuuids = self.master.alloc(filename, num_chunks, attributes)
self.write_chunks(chunkuuids, data)
def update_chunkservers(self):
chunkservers = self.master.get_chunkservers()
#print "CHUNKSERVERS[RAW]: ", chunkservers
for chunkserver in chunkservers:
#print chunkserver
if chunkserver['uuid'] not in self.chunkservers:
self.chunkservers[chunkserver['uuid']] = EAFSChunkServerRpc( chunkserver['uuid'], chunkserver['address'] )
def write_chunks(self, chunkuuids, data):
chunks = [ data[x:x+self.master.get_chunksize()] \
for x in range(0, len(data), self.master.get_chunksize()) ]
#chunkservers = self.master.get_chunkservers()
self.update_chunkservers()
#print "CHUNKSERVERS: ", self.chunkservers
for i in range(0, len(chunkuuids)): # write to each chunkserver
chunkuuid = chunkuuids[i]
chunklocs = self.master.get_chunklocs(chunkuuid)
for chunkloc in chunklocs:
#print "chunkloc: ", chunkloc
self.chunkservers[chunkloc].rpc.write(chunkuuid, chunks[i])
def num_chunks(self, size):
return (size // self.master.get_chunksize()) \
+ (1 if size % self.master.get_chunksize() > 0 else 0)
def write_append(self, filename, data):
if not self.exists(filename):
raise Exception("append error, file does not exist: " + filename)
num_append_chunks = self.num_chunks(len(data))
append_chunkuuids = self.master.alloc_append(filename, \
num_append_chunks)
self.write_chunks(append_chunkuuids, data)
def exists(self, filename):
return self.master.exists(filename)
def read(self, filename): # get metadata, then read chunks direct
if not self.exists(filename):
raise Exception("read error, file does not exist: " + filename)
chunks = []
chunkuuids = self.master.get_chunkuuids(filename)
#chunkservers = self.master.get_chunkservers()
self.update_chunkservers()
for chunkuuid in chunkuuids:
chunklocs = self.master.get_chunklocs(chunkuuid)
done_chunkserver = []
chunk = None
chunk_read = False
while not (chunk_read or len(done_chunkserver)==len(chunklocs)):
chunkidrnd = random.randint(0, len(chunklocs)-1)
while chunkidrnd not in done_chunkserver and len(done_chunkserver)>0:
chunkidrnd = random.randint(0, len(chunklocs)-1)
chunkloc = chunklocs[chunkidrnd]
print "Select chunkloc %s from %d choices" % (chunkloc, len(chunklocs))
try:
chunk = self.chunkservers[chunkloc].rpc.read(chunkuuid)
chunk_read = True
done_chunkserver.append(chunkidrnd)
except:
print "Chunkserver %d failed" % chunkidrnd
if not chunk_read:
raise Exception("read error, chunkserver unavailable: " + filename)
chunks.append(chunk)
data = reduce(lambda x, y: x + y, chunks) # reassemble in order
return data
def delete(self, filename):
self.master.delete(filename)
def main():
parser = argparse.ArgumentParser(description='EAFS Simple Client')
parser.add_argument('--master', dest='master', default='localhost:6799', help='Master server address')
args = parser.parse_args()
master = 'http://' + args.master
client = EAFSClient(master)
# test write, exist, read
print "\nWriting..."
#try:
if False:
client.write("/usr/python/readme.txt", """
This file tells you all about python that you ever wanted to know.
Not every README is as informative as this one, but we aim to please.
Never yet has there been so much information in so little space.
""")
#except:
# print client.master.dump_metadata()
print "File exists? ", client.exists("/usr/python/readme.txt")
print client.read("/usr/python/readme.txt")
# show structure of the filesystem
print "\nMetadata Dump..."
print client.master.dump_metadata()
if __name__ == "__main__":
main()
"""
# test append, read after append
#print "\nAppending..."
#client.write_append("/usr/python/readme.txt", \
# "I'm a little sentence that just snuck in at the end.\n")
#print client.read("/usr/python/readme.txt")
# test delete
#print "\nDeleting..."
#client.delete("/usr/python/readme.txt")
#print "File exists? ", client.exists("/usr/python/readme.txt")
# test exceptions
#print "\nTesting Exceptions..."
#try:
# client.read("/usr/python/readme.txt")
#except Exception as e:
# print "This exception should be thrown:", e
#try:
# client.write_append("/usr/python/readme.txt", "foo")
#except Exception as e:
# print "This exception should be thrown:", e
"""
|
normal
|
{
"blob_id": "2f5244c6144f5aafce29e5aba32bd7e3fc7ecf5b",
"index": 3632,
"step-1": "# -*- coding: utf-8 -*-\n'''\n * EAFS\n * Copyright (C) 2009-2011 Adam Etienne <[email protected]>\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation version 3.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with this program. If not, see <http://www.gnu.org/licenses/>.\n'''\n\nimport math,uuid,sys,os,time,operator,xmlrpclib,random,argparse\nfrom eafslib import EAFSChunkServerRpc\n\n\nclass EAFSClient:\n\tdef __init__(self, master_host):\n\t\tself.master = xmlrpclib.ServerProxy(master_host)\n\t\tself.chunkservers = {}\n\n\tdef write(self, filename, data):\n\t\tif self.exists(filename):\n\t\t\tself.delete(filename)\n\t\tnum_chunks = self.num_chunks(len(data))\n\t\tattributes = {\"mode\":\"file\", \"atime\":\"\", \"ctime\":\"\", \"mtime\":\"\", \"attrs\":\"\"}\n\t\tchunkuuids = self.master.alloc(filename, num_chunks, attributes)\n\t\tself.write_chunks(chunkuuids, data)\n\t\n\tdef update_chunkservers(self):\n\t\tchunkservers = self.master.get_chunkservers()\n\t\t#print \"CHUNKSERVERS[RAW]: \", chunkservers\n\t\tfor chunkserver in chunkservers:\n\t\t\t#print chunkserver\n\t\t\tif chunkserver['uuid'] not in self.chunkservers:\n\t\t\t\tself.chunkservers[chunkserver['uuid']] = EAFSChunkServerRpc( chunkserver['uuid'], chunkserver['address'] )\n\t\t\n\tdef write_chunks(self, chunkuuids, data):\n\t\tchunks = [ data[x:x+self.master.get_chunksize()] \\\n\t\t\tfor x in range(0, len(data), self.master.get_chunksize()) ]\n\t\t#chunkservers = self.master.get_chunkservers()\n\t\tself.update_chunkservers()\n\t\t#print \"CHUNKSERVERS: \", self.chunkservers\n\t\tfor i in range(0, len(chunkuuids)): # write to each chunkserver\n\t\t\tchunkuuid = chunkuuids[i]\n\t\t\tchunklocs = self.master.get_chunklocs(chunkuuid)\n\t\t\tfor chunkloc in chunklocs:\n\t\t\t\t#print \"chunkloc: \", chunkloc\n\t\t\t\tself.chunkservers[chunkloc].rpc.write(chunkuuid, chunks[i])\n\n\tdef num_chunks(self, size):\n\t\treturn (size // self.master.get_chunksize()) \\\n\t\t\t+ (1 if size % self.master.get_chunksize() > 0 else 0)\n\n\tdef write_append(self, filename, data):\n\t\tif not self.exists(filename):\n\t\t\traise Exception(\"append error, file does not exist: \" + filename)\n\t\tnum_append_chunks = self.num_chunks(len(data))\n\t\tappend_chunkuuids = self.master.alloc_append(filename, \\\n\t\t\tnum_append_chunks)\n\t\tself.write_chunks(append_chunkuuids, data) \n\n\tdef exists(self, filename):\n\t\treturn self.master.exists(filename)\n\t\t\n\tdef read(self, filename): # get metadata, then read chunks direct\n\t\tif not self.exists(filename):\n\t\t\traise Exception(\"read error, file does not exist: \" + filename)\n\t\tchunks = []\n\t\tchunkuuids = self.master.get_chunkuuids(filename)\n\t\t#chunkservers = self.master.get_chunkservers()\n\t\tself.update_chunkservers()\n\t\tfor chunkuuid in chunkuuids:\n\t\t\tchunklocs = self.master.get_chunklocs(chunkuuid)\n\t\t\tdone_chunkserver = []\n\t\t\tchunk = None\n\t\t\tchunk_read = False\n\t\t\twhile not (chunk_read or len(done_chunkserver)==len(chunklocs)):\n\t\t\t\tchunkidrnd = random.randint(0, len(chunklocs)-1)\n\t\t\t\twhile chunkidrnd not in done_chunkserver and len(done_chunkserver)>0:\n\t\t\t\t\tchunkidrnd = random.randint(0, len(chunklocs)-1)\n\t\t\t\tchunkloc = chunklocs[chunkidrnd]\n\t\t\t\tprint \"Select chunkloc %s from %d choices\" % (chunkloc, len(chunklocs))\n\t\t\t\ttry:\n\t\t\t\t\tchunk = self.chunkservers[chunkloc].rpc.read(chunkuuid)\n\t\t\t\t\tchunk_read = True\n\t\t\t\t\tdone_chunkserver.append(chunkidrnd)\n\t\t\t\texcept:\n\t\t\t\t\tprint \"Chunkserver %d failed\" % chunkidrnd\n\t\t\tif not chunk_read:\n\t\t\t\traise Exception(\"read error, chunkserver unavailable: \" + filename)\n\t\t\tchunks.append(chunk)\n\t\tdata = reduce(lambda x, y: x + y, chunks) # reassemble in order\n\t\treturn data\n\n\tdef delete(self, filename):\n\t\tself.master.delete(filename)\n\ndef main():\n\tparser = argparse.ArgumentParser(description='EAFS Simple Client')\n\tparser.add_argument('--master', dest='master', default='localhost:6799', help='Master server address')\n\targs = parser.parse_args()\n\tmaster = 'http://' + args.master\n\t\n\tclient = EAFSClient(master)\n\t\n\t# test write, exist, read\n\tprint \"\\nWriting...\"\n\t#try:\n\tif False:\n\t\tclient.write(\"/usr/python/readme.txt\", \"\"\"\n\t\tThis file tells you all about python that you ever wanted to know.\n\t\tNot every README is as informative as this one, but we aim to please.\n\t\tNever yet has there been so much information in so little space.\n\t\t\"\"\")\n\t#except:\n\t# print client.master.dump_metadata()\n\tprint \"File exists? \", client.exists(\"/usr/python/readme.txt\")\n\tprint client.read(\"/usr/python/readme.txt\")\n\t# show structure of the filesystem\n\tprint \"\\nMetadata Dump...\" \n\tprint client.master.dump_metadata()\n\nif __name__ == \"__main__\":\n\tmain()\n\n\"\"\"\n\t# test append, read after append\n\t#print \"\\nAppending...\"\n\t#client.write_append(\"/usr/python/readme.txt\", \\\n\t# \"I'm a little sentence that just snuck in at the end.\\n\")\n\t#print client.read(\"/usr/python/readme.txt\")\n\n\t# test delete\n\t#print \"\\nDeleting...\"\n\t#client.delete(\"/usr/python/readme.txt\")\n\t#print \"File exists? \", client.exists(\"/usr/python/readme.txt\")\n\t\n\t# test exceptions\n\t#print \"\\nTesting Exceptions...\"\n\t#try:\n\t# client.read(\"/usr/python/readme.txt\")\n\t#except Exception as e:\n\t# print \"This exception should be thrown:\", e\n\t#try:\n\t# client.write_append(\"/usr/python/readme.txt\", \"foo\")\n\t#except Exception as e:\n\t# print \"This exception should be thrown:\", e\n\"\"\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
def fit(x, iters=1000, eps=1e-6):
"""
Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.
:param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.
:param iters: Maximum number of iterations
:param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.
:return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.
Impossible fits may be due to 0-values in x.
"""
# fit k via MLE
ln_x = np.log(x)
k = 1.
k_t_1 = k
for t in range(iters):
x_k = x ** k
x_k_ln_x = x_k * ln_x
ff = np.sum(x_k_ln_x)
fg = np.sum(x_k)
f = ff / fg - np.mean(ln_x) - (1. / k)
# Calculate second derivative d^2f/dk^2
ff_prime = np.sum(x_k_ln_x * ln_x)
fg_prime = ff
f_prime = (ff_prime / fg - (ff / fg * fg_prime / fg)) + (
1. / (k * k))
# Newton-Raphson method k = k - f(k;x)/f'(k;x)
k -= f / f_prime
if np.isnan(f):
return np.nan, np.nan
if abs(k - k_t_1) < eps:
break
k_t_1 = k
lam = np.mean(x ** k) ** (1.0 / k)
return k, lam
def my_test():
weibull = np.random.weibull(2.0, 100000)
x = 2 * weibull
mle_shape, mle_scale = fit(x)
x.sort()
print(mle_shape)
print(mle_scale)
# p0, p1, p2 = stats.weibull_min.fit(x, floc=0)
# print(p0, p1, p2)
ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,
mle_scale)
plt.plot(np.linspace(0, x.max(), 10), ydata, '-')
plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)
plt.show()
if __name__ == '__main__':
my_test()
|
normal
|
{
"blob_id": "b10d3d8d0ded0d2055c1abdaf40a97abd4cb2cb8",
"index": 1631,
"step-1": "<mask token>\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\ndef my_test():\n weibull = np.random.weibull(2.0, 100000)\n x = 2 * weibull\n mle_shape, mle_scale = fit(x)\n x.sort()\n print(mle_shape)\n print(mle_scale)\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\n mle_scale)\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\n plt.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\ndef my_test():\n weibull = np.random.weibull(2.0, 100000)\n x = 2 * weibull\n mle_shape, mle_scale = fit(x)\n x.sort()\n print(mle_shape)\n print(mle_scale)\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\n mle_scale)\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\n plt.show()\n\n\nif __name__ == '__main__':\n my_test()\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy import stats\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\ndef my_test():\n weibull = np.random.weibull(2.0, 100000)\n x = 2 * weibull\n mle_shape, mle_scale = fit(x)\n x.sort()\n print(mle_shape)\n print(mle_scale)\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\n mle_scale)\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\n plt.show()\n\n\nif __name__ == '__main__':\n my_test()\n",
"step-5": "import numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom scipy import stats\r\n\r\n\r\ndef fit(x, iters=1000, eps=1e-6):\r\n \"\"\"\r\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\r\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\r\n :param iters: Maximum number of iterations\r\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\r\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\r\n Impossible fits may be due to 0-values in x.\r\n \"\"\"\r\n # fit k via MLE\r\n ln_x = np.log(x)\r\n k = 1.\r\n k_t_1 = k\r\n\r\n for t in range(iters):\r\n x_k = x ** k\r\n x_k_ln_x = x_k * ln_x\r\n ff = np.sum(x_k_ln_x)\r\n fg = np.sum(x_k)\r\n f = ff / fg - np.mean(ln_x) - (1. / k)\r\n\r\n # Calculate second derivative d^2f/dk^2\r\n ff_prime = np.sum(x_k_ln_x * ln_x)\r\n fg_prime = ff\r\n f_prime = (ff_prime / fg - (ff / fg * fg_prime / fg)) + (\r\n 1. / (k * k))\r\n\r\n # Newton-Raphson method k = k - f(k;x)/f'(k;x)\r\n k -= f / f_prime\r\n\r\n if np.isnan(f):\r\n return np.nan, np.nan\r\n if abs(k - k_t_1) < eps:\r\n break\r\n\r\n k_t_1 = k\r\n\r\n lam = np.mean(x ** k) ** (1.0 / k)\r\n\r\n return k, lam\r\n\r\n\r\ndef my_test():\r\n weibull = np.random.weibull(2.0, 100000)\r\n x = 2 * weibull\r\n mle_shape, mle_scale = fit(x)\r\n x.sort()\r\n print(mle_shape)\r\n print(mle_scale)\r\n # p0, p1, p2 = stats.weibull_min.fit(x, floc=0)\r\n # print(p0, p1, p2)\r\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\r\n mle_scale)\r\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\r\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\r\n plt.show()\r\n\r\n\r\nif __name__ == '__main__':\r\n my_test()\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import json
import os
import numpy as np
import pandas as pd
import py4design.py2radiance as py2radiance
import py4design.py3dmodel.calculate as calculate
from py4design import py3dmodel
__author__ = "Jimeno A. Fonseca"
__copyright__ = "Copyright 2017, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Jimeno A. Fonseca", "Kian Wee Chen"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "[email protected]"
__status__ = "Production"
from cea.constants import HOURS_IN_YEAR
from cea.resources.radiation_daysim.geometry_generator import BuildingGeometry
from cea import suppress_3rd_party_debug_loggers
suppress_3rd_party_debug_loggers()
def create_sensor_input_file(rad, chunk_n):
sensor_file_path = os.path.join(rad.data_folder_path, "points_" + str(chunk_n) + ".pts")
sensor_file = open(sensor_file_path, "w")
sensor_pts_data = py2radiance.write_rad.sensor_file(rad.sensor_positions, rad.sensor_normals)
sensor_file.write(sensor_pts_data)
sensor_file.close()
rad.sensor_file_path = sensor_file_path
def generate_sensor_surfaces(occface, wall_dim, roof_dim, srf_type, orientation, normal, intersection):
mid_pt = py3dmodel.calculate.face_midpt(occface)
location_pt = py3dmodel.modify.move_pt(mid_pt, normal, 0.01)
moved_oface = py3dmodel.fetch.topo2topotype(py3dmodel.modify.move(mid_pt, location_pt, occface))
if srf_type == 'roofs':
xdim = ydim = roof_dim
else:
xdim = ydim = wall_dim
# put it into occ and subdivide surfaces
sensor_surfaces = py3dmodel.construct.grid_face(moved_oface, xdim, ydim)
# calculate list of properties per surface
sensor_intersection = [intersection for x in sensor_surfaces]
sensor_dir = [normal for x in sensor_surfaces]
sensor_cord = [py3dmodel.calculate.face_midpt(x) for x in sensor_surfaces]
sensor_type = [srf_type for x in sensor_surfaces]
sensor_orientation = [orientation for x in sensor_surfaces]
sensor_area = [calculate.face_area(x) * (1.0 - scalar) for x, scalar in zip(sensor_surfaces, sensor_intersection)]
return sensor_dir, sensor_cord, sensor_type, sensor_area, sensor_orientation, sensor_intersection
def calc_sensors_building(building_geometry, grid_size):
sensor_dir_list = []
sensor_cord_list = []
sensor_type_list = []
sensor_area_list = []
sensor_orientation_list = []
sensor_intersection_list = []
surfaces_types = ['walls', 'windows', 'roofs']
sensor_vertical_grid_dim = grid_size["walls_grid"]
sensor_horizontal_grid_dim = grid_size["roof_grid"]
for srf_type in surfaces_types:
occface_list = getattr(building_geometry, srf_type)
if srf_type == 'roofs':
orientation_list = ['top'] * len(occface_list)
normals_list = [(0.0, 0.0, 1.0)] * len(occface_list)
interesection_list = [0] * len(occface_list)
elif srf_type == 'windows':
orientation_list = getattr(building_geometry, "orientation_{srf_type}".format(srf_type=srf_type))
normals_list = getattr(building_geometry, "normals_{srf_type}".format(srf_type=srf_type))
interesection_list = [0] * len(occface_list)
else:
orientation_list = getattr(building_geometry, "orientation_{srf_type}".format(srf_type=srf_type))
normals_list = getattr(building_geometry, "normals_{srf_type}".format(srf_type=srf_type))
interesection_list = getattr(building_geometry, "intersect_{srf_type}".format(srf_type=srf_type))
for orientation, normal, face, intersection in zip(orientation_list, normals_list, occface_list,
interesection_list):
sensor_dir, \
sensor_cord, \
sensor_type, \
sensor_area, \
sensor_orientation, \
sensor_intersection = generate_sensor_surfaces(face,
sensor_vertical_grid_dim,
sensor_horizontal_grid_dim,
srf_type,
orientation,
normal,
intersection)
sensor_intersection_list.extend(sensor_intersection)
sensor_dir_list.extend(sensor_dir)
sensor_cord_list.extend(sensor_cord)
sensor_type_list.extend(sensor_type)
sensor_area_list.extend(sensor_area)
sensor_orientation_list.extend(sensor_orientation)
return sensor_dir_list, sensor_cord_list, sensor_type_list, sensor_area_list, sensor_orientation_list, sensor_intersection_list
def calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir):
sensors_coords_zone = []
sensors_dir_zone = []
sensors_total_number_list = []
names_zone = []
sensors_code_zone = []
sensor_intersection_zone = []
for building_name in building_names:
building_geometry = BuildingGeometry.load(os.path.join(geometry_pickle_dir, 'zone', building_name))
# get sensors in the building
sensors_dir_building, \
sensors_coords_building, \
sensors_type_building, \
sensors_area_building, \
sensor_orientation_building, \
sensor_intersection_building = calc_sensors_building(building_geometry, grid_size)
# get the total number of sensors and store in lst
sensors_number = len(sensors_coords_building)
sensors_total_number_list.append(sensors_number)
sensors_code = ['srf' + str(x) for x in range(sensors_number)]
sensors_code_zone.append(sensors_code)
# get the total list of coordinates and directions to send to daysim
sensors_coords_zone.extend(sensors_coords_building)
sensors_dir_zone.extend(sensors_dir_building)
# get total list of intersections
sensor_intersection_zone.append(sensor_intersection_building)
# get the name of all buildings
names_zone.append(building_name)
# save sensors geometry result to disk
pd.DataFrame({'BUILDING': building_name,
'SURFACE': sensors_code,
'orientation': sensor_orientation_building,
'intersection': sensor_intersection_building,
'Xcoor': [x[0] for x in sensors_coords_building],
'Ycoor': [x[1] for x in sensors_coords_building],
'Zcoor': [x[2] for x in sensors_coords_building],
'Xdir': [x[0] for x in sensors_dir_building],
'Ydir': [x[1] for x in sensors_dir_building],
'Zdir': [x[2] for x in sensors_dir_building],
'AREA_m2': sensors_area_building,
'TYPE': sensors_type_building}).to_csv(locator.get_radiation_metadata(building_name), index=None)
return sensors_coords_zone, sensors_dir_zone, sensors_total_number_list, names_zone, sensors_code_zone, sensor_intersection_zone
def isolation_daysim(chunk_n, cea_daysim, building_names, locator, radiance_parameters, write_sensor_data, grid_size,
max_global, weatherfile, geometry_pickle_dir):
# initialize daysim project
daysim_project = cea_daysim.initialize_daysim_project('chunk_{n}'.format(n=chunk_n))
print('Creating daysim project in: {daysim_dir}'.format(daysim_dir=daysim_project.project_path))
# calculate sensors
print("Calculating and sending sensor points")
sensors_coords_zone, \
sensors_dir_zone, \
sensors_number_zone, \
names_zone, \
sensors_code_zone, \
sensor_intersection_zone = calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir)
num_sensors = sum(sensors_number_zone)
daysim_project.create_sensor_input_file(sensors_coords_zone, sensors_dir_zone, num_sensors, "w/m2")
print("Starting Daysim simulation for buildings: {buildings}".format(buildings=names_zone))
print("Total number of sensors: {num_sensors}".format(num_sensors=num_sensors))
print('Writing radiance parameters')
daysim_project.write_radiance_parameters(radiance_parameters["rad_ab"], radiance_parameters["rad_ad"],
radiance_parameters["rad_as"], radiance_parameters["rad_ar"],
radiance_parameters["rad_aa"], radiance_parameters["rad_lr"],
radiance_parameters["rad_st"], radiance_parameters["rad_sj"],
radiance_parameters["rad_lw"], radiance_parameters["rad_dj"],
radiance_parameters["rad_ds"], radiance_parameters["rad_dr"],
radiance_parameters["rad_dp"])
print('Executing hourly solar isolation calculation')
daysim_project.execute_gen_dc()
daysim_project.execute_ds_illum()
print('Reading results...')
solar_res = daysim_project.eval_ill()
# check inconsistencies and replace by max value of weather file
print('Fixing inconsistencies, if any')
solar_res = np.clip(solar_res, a_min=0.0, a_max=max_global)
# Check if leap year and remove extra day
if solar_res.shape[1] == HOURS_IN_YEAR + 24:
print('Removing leap day')
leap_day_hours = range(1416, 1440)
solar_res = np.delete(solar_res, leap_day_hours, axis=1)
print("Writing results to disk")
index = 0
for building_name, \
sensors_number_building, \
sensor_code_building, \
sensor_intersection_building in zip(names_zone,
sensors_number_zone,
sensors_code_zone,
sensor_intersection_zone):
# select sensors data
selection_of_results = solar_res[index:index + sensors_number_building]
selection_of_results[np.array(sensor_intersection_building) == 1] = 0
items_sensor_name_and_result = dict(zip(sensor_code_building, selection_of_results.tolist()))
index = index + sensors_number_building
# create summary and save to disk
write_aggregated_results(building_name, items_sensor_name_and_result, locator, weatherfile)
if write_sensor_data:
write_sensor_results(building_name, items_sensor_name_and_result, locator)
# erase daysim folder to avoid conflicts after every iteration
print('Removing results folder')
daysim_project.cleanup_project()
def write_sensor_results(building_name, items_sensor_name_and_result, locator):
with open(locator.get_radiation_building_sensors(building_name), 'w') as outfile:
json.dump(items_sensor_name_and_result, outfile)
def write_aggregated_results(building_name, items_sensor_name_and_result, locator, weatherfile):
geometry = pd.read_csv(locator.get_radiation_metadata(building_name))
geometry['code'] = geometry['TYPE'] + '_' + geometry['orientation'] + '_kW'
solar_analysis_fields = ['windows_east_kW',
'windows_west_kW',
'windows_south_kW',
'windows_north_kW',
'walls_east_kW',
'walls_west_kW',
'walls_south_kW',
'walls_north_kW',
'roofs_top_kW']
solar_analysis_fields_area = ['windows_east_m2',
'windows_west_m2',
'windows_south_m2',
'windows_north_m2',
'walls_east_m2',
'walls_west_m2',
'walls_south_m2',
'walls_north_m2',
'roofs_top_m2']
dict_not_aggregated = {}
for field, field_area in zip(solar_analysis_fields, solar_analysis_fields_area):
select_sensors = geometry.loc[geometry['code'] == field].set_index('SURFACE')
area_m2 = select_sensors['AREA_m2'].sum()
array_field = np.array([select_sensors.loc[surface, 'AREA_m2'] *
np.array(items_sensor_name_and_result[surface])
for surface in select_sensors.index]).sum(axis=0)
dict_not_aggregated[field] = array_field / 1000 # in kWh
dict_not_aggregated[field_area] = area_m2
data_aggregated_kW = (pd.DataFrame(dict_not_aggregated)).round(2)
data_aggregated_kW["Date"] = weatherfile["date"]
data_aggregated_kW.set_index('Date', inplace=True)
data_aggregated_kW.to_csv(locator.get_radiation_building(building_name))
|
normal
|
{
"blob_id": "164b0afde225119a8fbd4ccfccbbbc3550aa75fe",
"index": 2634,
"step-1": "<mask token>\n\n\ndef create_sensor_input_file(rad, chunk_n):\n sensor_file_path = os.path.join(rad.data_folder_path, 'points_' + str(\n chunk_n) + '.pts')\n sensor_file = open(sensor_file_path, 'w')\n sensor_pts_data = py2radiance.write_rad.sensor_file(rad.\n sensor_positions, rad.sensor_normals)\n sensor_file.write(sensor_pts_data)\n sensor_file.close()\n rad.sensor_file_path = sensor_file_path\n\n\ndef generate_sensor_surfaces(occface, wall_dim, roof_dim, srf_type,\n orientation, normal, intersection):\n mid_pt = py3dmodel.calculate.face_midpt(occface)\n location_pt = py3dmodel.modify.move_pt(mid_pt, normal, 0.01)\n moved_oface = py3dmodel.fetch.topo2topotype(py3dmodel.modify.move(\n mid_pt, location_pt, occface))\n if srf_type == 'roofs':\n xdim = ydim = roof_dim\n else:\n xdim = ydim = wall_dim\n sensor_surfaces = py3dmodel.construct.grid_face(moved_oface, xdim, ydim)\n sensor_intersection = [intersection for x in sensor_surfaces]\n sensor_dir = [normal for x in sensor_surfaces]\n sensor_cord = [py3dmodel.calculate.face_midpt(x) for x in sensor_surfaces]\n sensor_type = [srf_type for x in sensor_surfaces]\n sensor_orientation = [orientation for x in sensor_surfaces]\n sensor_area = [(calculate.face_area(x) * (1.0 - scalar)) for x, scalar in\n zip(sensor_surfaces, sensor_intersection)]\n return (sensor_dir, sensor_cord, sensor_type, sensor_area,\n sensor_orientation, sensor_intersection)\n\n\n<mask token>\n\n\ndef calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir):\n sensors_coords_zone = []\n sensors_dir_zone = []\n sensors_total_number_list = []\n names_zone = []\n sensors_code_zone = []\n sensor_intersection_zone = []\n for building_name in building_names:\n building_geometry = BuildingGeometry.load(os.path.join(\n geometry_pickle_dir, 'zone', building_name))\n (sensors_dir_building, sensors_coords_building,\n sensors_type_building, sensors_area_building,\n sensor_orientation_building, sensor_intersection_building\n ) = calc_sensors_building(building_geometry, grid_size)\n sensors_number = len(sensors_coords_building)\n sensors_total_number_list.append(sensors_number)\n sensors_code = [('srf' + str(x)) for x in range(sensors_number)]\n sensors_code_zone.append(sensors_code)\n sensors_coords_zone.extend(sensors_coords_building)\n sensors_dir_zone.extend(sensors_dir_building)\n sensor_intersection_zone.append(sensor_intersection_building)\n names_zone.append(building_name)\n pd.DataFrame({'BUILDING': building_name, 'SURFACE': sensors_code,\n 'orientation': sensor_orientation_building, 'intersection':\n sensor_intersection_building, 'Xcoor': [x[0] for x in\n sensors_coords_building], 'Ycoor': [x[1] for x in\n sensors_coords_building], 'Zcoor': [x[2] for x in\n sensors_coords_building], 'Xdir': [x[0] for x in\n sensors_dir_building], 'Ydir': [x[1] for x in\n sensors_dir_building], 'Zdir': [x[2] for x in\n sensors_dir_building], 'AREA_m2': sensors_area_building, 'TYPE':\n sensors_type_building}).to_csv(locator.get_radiation_metadata(\n building_name), index=None)\n return (sensors_coords_zone, sensors_dir_zone,\n sensors_total_number_list, names_zone, sensors_code_zone,\n sensor_intersection_zone)\n\n\ndef isolation_daysim(chunk_n, cea_daysim, building_names, locator,\n radiance_parameters, write_sensor_data, grid_size, max_global,\n weatherfile, geometry_pickle_dir):\n daysim_project = cea_daysim.initialize_daysim_project('chunk_{n}'.\n format(n=chunk_n))\n print('Creating daysim project in: {daysim_dir}'.format(daysim_dir=\n daysim_project.project_path))\n print('Calculating and sending sensor points')\n (sensors_coords_zone, sensors_dir_zone, sensors_number_zone, names_zone,\n sensors_code_zone, sensor_intersection_zone) = (calc_sensors_zone(\n building_names, locator, grid_size, geometry_pickle_dir))\n num_sensors = sum(sensors_number_zone)\n daysim_project.create_sensor_input_file(sensors_coords_zone,\n sensors_dir_zone, num_sensors, 'w/m2')\n print('Starting Daysim simulation for buildings: {buildings}'.format(\n buildings=names_zone))\n print('Total number of sensors: {num_sensors}'.format(num_sensors=\n num_sensors))\n print('Writing radiance parameters')\n daysim_project.write_radiance_parameters(radiance_parameters['rad_ab'],\n radiance_parameters['rad_ad'], radiance_parameters['rad_as'],\n radiance_parameters['rad_ar'], radiance_parameters['rad_aa'],\n radiance_parameters['rad_lr'], radiance_parameters['rad_st'],\n radiance_parameters['rad_sj'], radiance_parameters['rad_lw'],\n radiance_parameters['rad_dj'], radiance_parameters['rad_ds'],\n radiance_parameters['rad_dr'], radiance_parameters['rad_dp'])\n print('Executing hourly solar isolation calculation')\n daysim_project.execute_gen_dc()\n daysim_project.execute_ds_illum()\n print('Reading results...')\n solar_res = daysim_project.eval_ill()\n print('Fixing inconsistencies, if any')\n solar_res = np.clip(solar_res, a_min=0.0, a_max=max_global)\n if solar_res.shape[1] == HOURS_IN_YEAR + 24:\n print('Removing leap day')\n leap_day_hours = range(1416, 1440)\n solar_res = np.delete(solar_res, leap_day_hours, axis=1)\n print('Writing results to disk')\n index = 0\n for building_name, sensors_number_building, sensor_code_building, sensor_intersection_building in zip(\n names_zone, sensors_number_zone, sensors_code_zone,\n sensor_intersection_zone):\n selection_of_results = solar_res[index:index + sensors_number_building]\n selection_of_results[np.array(sensor_intersection_building) == 1] = 0\n items_sensor_name_and_result = dict(zip(sensor_code_building,\n selection_of_results.tolist()))\n index = index + sensors_number_building\n write_aggregated_results(building_name,\n items_sensor_name_and_result, locator, weatherfile)\n if write_sensor_data:\n write_sensor_results(building_name,\n items_sensor_name_and_result, locator)\n print('Removing results folder')\n daysim_project.cleanup_project()\n\n\ndef write_sensor_results(building_name, items_sensor_name_and_result, locator):\n with open(locator.get_radiation_building_sensors(building_name), 'w'\n ) as outfile:\n json.dump(items_sensor_name_and_result, outfile)\n\n\ndef write_aggregated_results(building_name, items_sensor_name_and_result,\n locator, weatherfile):\n geometry = pd.read_csv(locator.get_radiation_metadata(building_name))\n geometry['code'] = geometry['TYPE'] + '_' + geometry['orientation'] + '_kW'\n solar_analysis_fields = ['windows_east_kW', 'windows_west_kW',\n 'windows_south_kW', 'windows_north_kW', 'walls_east_kW',\n 'walls_west_kW', 'walls_south_kW', 'walls_north_kW', 'roofs_top_kW']\n solar_analysis_fields_area = ['windows_east_m2', 'windows_west_m2',\n 'windows_south_m2', 'windows_north_m2', 'walls_east_m2',\n 'walls_west_m2', 'walls_south_m2', 'walls_north_m2', 'roofs_top_m2']\n dict_not_aggregated = {}\n for field, field_area in zip(solar_analysis_fields,\n solar_analysis_fields_area):\n select_sensors = geometry.loc[geometry['code'] == field].set_index(\n 'SURFACE')\n area_m2 = select_sensors['AREA_m2'].sum()\n array_field = np.array([(select_sensors.loc[surface, 'AREA_m2'] *\n np.array(items_sensor_name_and_result[surface])) for surface in\n select_sensors.index]).sum(axis=0)\n dict_not_aggregated[field] = array_field / 1000\n dict_not_aggregated[field_area] = area_m2\n data_aggregated_kW = pd.DataFrame(dict_not_aggregated).round(2)\n data_aggregated_kW['Date'] = weatherfile['date']\n data_aggregated_kW.set_index('Date', inplace=True)\n data_aggregated_kW.to_csv(locator.get_radiation_building(building_name))\n",
"step-2": "<mask token>\n\n\ndef create_sensor_input_file(rad, chunk_n):\n sensor_file_path = os.path.join(rad.data_folder_path, 'points_' + str(\n chunk_n) + '.pts')\n sensor_file = open(sensor_file_path, 'w')\n sensor_pts_data = py2radiance.write_rad.sensor_file(rad.\n sensor_positions, rad.sensor_normals)\n sensor_file.write(sensor_pts_data)\n sensor_file.close()\n rad.sensor_file_path = sensor_file_path\n\n\ndef generate_sensor_surfaces(occface, wall_dim, roof_dim, srf_type,\n orientation, normal, intersection):\n mid_pt = py3dmodel.calculate.face_midpt(occface)\n location_pt = py3dmodel.modify.move_pt(mid_pt, normal, 0.01)\n moved_oface = py3dmodel.fetch.topo2topotype(py3dmodel.modify.move(\n mid_pt, location_pt, occface))\n if srf_type == 'roofs':\n xdim = ydim = roof_dim\n else:\n xdim = ydim = wall_dim\n sensor_surfaces = py3dmodel.construct.grid_face(moved_oface, xdim, ydim)\n sensor_intersection = [intersection for x in sensor_surfaces]\n sensor_dir = [normal for x in sensor_surfaces]\n sensor_cord = [py3dmodel.calculate.face_midpt(x) for x in sensor_surfaces]\n sensor_type = [srf_type for x in sensor_surfaces]\n sensor_orientation = [orientation for x in sensor_surfaces]\n sensor_area = [(calculate.face_area(x) * (1.0 - scalar)) for x, scalar in\n zip(sensor_surfaces, sensor_intersection)]\n return (sensor_dir, sensor_cord, sensor_type, sensor_area,\n sensor_orientation, sensor_intersection)\n\n\ndef calc_sensors_building(building_geometry, grid_size):\n sensor_dir_list = []\n sensor_cord_list = []\n sensor_type_list = []\n sensor_area_list = []\n sensor_orientation_list = []\n sensor_intersection_list = []\n surfaces_types = ['walls', 'windows', 'roofs']\n sensor_vertical_grid_dim = grid_size['walls_grid']\n sensor_horizontal_grid_dim = grid_size['roof_grid']\n for srf_type in surfaces_types:\n occface_list = getattr(building_geometry, srf_type)\n if srf_type == 'roofs':\n orientation_list = ['top'] * len(occface_list)\n normals_list = [(0.0, 0.0, 1.0)] * len(occface_list)\n interesection_list = [0] * len(occface_list)\n elif srf_type == 'windows':\n orientation_list = getattr(building_geometry,\n 'orientation_{srf_type}'.format(srf_type=srf_type))\n normals_list = getattr(building_geometry, 'normals_{srf_type}'.\n format(srf_type=srf_type))\n interesection_list = [0] * len(occface_list)\n else:\n orientation_list = getattr(building_geometry,\n 'orientation_{srf_type}'.format(srf_type=srf_type))\n normals_list = getattr(building_geometry, 'normals_{srf_type}'.\n format(srf_type=srf_type))\n interesection_list = getattr(building_geometry,\n 'intersect_{srf_type}'.format(srf_type=srf_type))\n for orientation, normal, face, intersection in zip(orientation_list,\n normals_list, occface_list, interesection_list):\n (sensor_dir, sensor_cord, sensor_type, sensor_area,\n sensor_orientation, sensor_intersection) = (\n generate_sensor_surfaces(face, sensor_vertical_grid_dim,\n sensor_horizontal_grid_dim, srf_type, orientation, normal,\n intersection))\n sensor_intersection_list.extend(sensor_intersection)\n sensor_dir_list.extend(sensor_dir)\n sensor_cord_list.extend(sensor_cord)\n sensor_type_list.extend(sensor_type)\n sensor_area_list.extend(sensor_area)\n sensor_orientation_list.extend(sensor_orientation)\n return (sensor_dir_list, sensor_cord_list, sensor_type_list,\n sensor_area_list, sensor_orientation_list, sensor_intersection_list)\n\n\ndef calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir):\n sensors_coords_zone = []\n sensors_dir_zone = []\n sensors_total_number_list = []\n names_zone = []\n sensors_code_zone = []\n sensor_intersection_zone = []\n for building_name in building_names:\n building_geometry = BuildingGeometry.load(os.path.join(\n geometry_pickle_dir, 'zone', building_name))\n (sensors_dir_building, sensors_coords_building,\n sensors_type_building, sensors_area_building,\n sensor_orientation_building, sensor_intersection_building\n ) = calc_sensors_building(building_geometry, grid_size)\n sensors_number = len(sensors_coords_building)\n sensors_total_number_list.append(sensors_number)\n sensors_code = [('srf' + str(x)) for x in range(sensors_number)]\n sensors_code_zone.append(sensors_code)\n sensors_coords_zone.extend(sensors_coords_building)\n sensors_dir_zone.extend(sensors_dir_building)\n sensor_intersection_zone.append(sensor_intersection_building)\n names_zone.append(building_name)\n pd.DataFrame({'BUILDING': building_name, 'SURFACE': sensors_code,\n 'orientation': sensor_orientation_building, 'intersection':\n sensor_intersection_building, 'Xcoor': [x[0] for x in\n sensors_coords_building], 'Ycoor': [x[1] for x in\n sensors_coords_building], 'Zcoor': [x[2] for x in\n sensors_coords_building], 'Xdir': [x[0] for x in\n sensors_dir_building], 'Ydir': [x[1] for x in\n sensors_dir_building], 'Zdir': [x[2] for x in\n sensors_dir_building], 'AREA_m2': sensors_area_building, 'TYPE':\n sensors_type_building}).to_csv(locator.get_radiation_metadata(\n building_name), index=None)\n return (sensors_coords_zone, sensors_dir_zone,\n sensors_total_number_list, names_zone, sensors_code_zone,\n sensor_intersection_zone)\n\n\ndef isolation_daysim(chunk_n, cea_daysim, building_names, locator,\n radiance_parameters, write_sensor_data, grid_size, max_global,\n weatherfile, geometry_pickle_dir):\n daysim_project = cea_daysim.initialize_daysim_project('chunk_{n}'.\n format(n=chunk_n))\n print('Creating daysim project in: {daysim_dir}'.format(daysim_dir=\n daysim_project.project_path))\n print('Calculating and sending sensor points')\n (sensors_coords_zone, sensors_dir_zone, sensors_number_zone, names_zone,\n sensors_code_zone, sensor_intersection_zone) = (calc_sensors_zone(\n building_names, locator, grid_size, geometry_pickle_dir))\n num_sensors = sum(sensors_number_zone)\n daysim_project.create_sensor_input_file(sensors_coords_zone,\n sensors_dir_zone, num_sensors, 'w/m2')\n print('Starting Daysim simulation for buildings: {buildings}'.format(\n buildings=names_zone))\n print('Total number of sensors: {num_sensors}'.format(num_sensors=\n num_sensors))\n print('Writing radiance parameters')\n daysim_project.write_radiance_parameters(radiance_parameters['rad_ab'],\n radiance_parameters['rad_ad'], radiance_parameters['rad_as'],\n radiance_parameters['rad_ar'], radiance_parameters['rad_aa'],\n radiance_parameters['rad_lr'], radiance_parameters['rad_st'],\n radiance_parameters['rad_sj'], radiance_parameters['rad_lw'],\n radiance_parameters['rad_dj'], radiance_parameters['rad_ds'],\n radiance_parameters['rad_dr'], radiance_parameters['rad_dp'])\n print('Executing hourly solar isolation calculation')\n daysim_project.execute_gen_dc()\n daysim_project.execute_ds_illum()\n print('Reading results...')\n solar_res = daysim_project.eval_ill()\n print('Fixing inconsistencies, if any')\n solar_res = np.clip(solar_res, a_min=0.0, a_max=max_global)\n if solar_res.shape[1] == HOURS_IN_YEAR + 24:\n print('Removing leap day')\n leap_day_hours = range(1416, 1440)\n solar_res = np.delete(solar_res, leap_day_hours, axis=1)\n print('Writing results to disk')\n index = 0\n for building_name, sensors_number_building, sensor_code_building, sensor_intersection_building in zip(\n names_zone, sensors_number_zone, sensors_code_zone,\n sensor_intersection_zone):\n selection_of_results = solar_res[index:index + sensors_number_building]\n selection_of_results[np.array(sensor_intersection_building) == 1] = 0\n items_sensor_name_and_result = dict(zip(sensor_code_building,\n selection_of_results.tolist()))\n index = index + sensors_number_building\n write_aggregated_results(building_name,\n items_sensor_name_and_result, locator, weatherfile)\n if write_sensor_data:\n write_sensor_results(building_name,\n items_sensor_name_and_result, locator)\n print('Removing results folder')\n daysim_project.cleanup_project()\n\n\ndef write_sensor_results(building_name, items_sensor_name_and_result, locator):\n with open(locator.get_radiation_building_sensors(building_name), 'w'\n ) as outfile:\n json.dump(items_sensor_name_and_result, outfile)\n\n\ndef write_aggregated_results(building_name, items_sensor_name_and_result,\n locator, weatherfile):\n geometry = pd.read_csv(locator.get_radiation_metadata(building_name))\n geometry['code'] = geometry['TYPE'] + '_' + geometry['orientation'] + '_kW'\n solar_analysis_fields = ['windows_east_kW', 'windows_west_kW',\n 'windows_south_kW', 'windows_north_kW', 'walls_east_kW',\n 'walls_west_kW', 'walls_south_kW', 'walls_north_kW', 'roofs_top_kW']\n solar_analysis_fields_area = ['windows_east_m2', 'windows_west_m2',\n 'windows_south_m2', 'windows_north_m2', 'walls_east_m2',\n 'walls_west_m2', 'walls_south_m2', 'walls_north_m2', 'roofs_top_m2']\n dict_not_aggregated = {}\n for field, field_area in zip(solar_analysis_fields,\n solar_analysis_fields_area):\n select_sensors = geometry.loc[geometry['code'] == field].set_index(\n 'SURFACE')\n area_m2 = select_sensors['AREA_m2'].sum()\n array_field = np.array([(select_sensors.loc[surface, 'AREA_m2'] *\n np.array(items_sensor_name_and_result[surface])) for surface in\n select_sensors.index]).sum(axis=0)\n dict_not_aggregated[field] = array_field / 1000\n dict_not_aggregated[field_area] = area_m2\n data_aggregated_kW = pd.DataFrame(dict_not_aggregated).round(2)\n data_aggregated_kW['Date'] = weatherfile['date']\n data_aggregated_kW.set_index('Date', inplace=True)\n data_aggregated_kW.to_csv(locator.get_radiation_building(building_name))\n",
"step-3": "<mask token>\n__author__ = 'Jimeno A. Fonseca'\n__copyright__ = (\n 'Copyright 2017, Architecture and Building Systems - ETH Zurich')\n__credits__ = ['Jimeno A. Fonseca', 'Kian Wee Chen']\n__license__ = 'MIT'\n__version__ = '0.1'\n__maintainer__ = 'Daren Thomas'\n__email__ = '[email protected]'\n__status__ = 'Production'\n<mask token>\nsuppress_3rd_party_debug_loggers()\n\n\ndef create_sensor_input_file(rad, chunk_n):\n sensor_file_path = os.path.join(rad.data_folder_path, 'points_' + str(\n chunk_n) + '.pts')\n sensor_file = open(sensor_file_path, 'w')\n sensor_pts_data = py2radiance.write_rad.sensor_file(rad.\n sensor_positions, rad.sensor_normals)\n sensor_file.write(sensor_pts_data)\n sensor_file.close()\n rad.sensor_file_path = sensor_file_path\n\n\ndef generate_sensor_surfaces(occface, wall_dim, roof_dim, srf_type,\n orientation, normal, intersection):\n mid_pt = py3dmodel.calculate.face_midpt(occface)\n location_pt = py3dmodel.modify.move_pt(mid_pt, normal, 0.01)\n moved_oface = py3dmodel.fetch.topo2topotype(py3dmodel.modify.move(\n mid_pt, location_pt, occface))\n if srf_type == 'roofs':\n xdim = ydim = roof_dim\n else:\n xdim = ydim = wall_dim\n sensor_surfaces = py3dmodel.construct.grid_face(moved_oface, xdim, ydim)\n sensor_intersection = [intersection for x in sensor_surfaces]\n sensor_dir = [normal for x in sensor_surfaces]\n sensor_cord = [py3dmodel.calculate.face_midpt(x) for x in sensor_surfaces]\n sensor_type = [srf_type for x in sensor_surfaces]\n sensor_orientation = [orientation for x in sensor_surfaces]\n sensor_area = [(calculate.face_area(x) * (1.0 - scalar)) for x, scalar in\n zip(sensor_surfaces, sensor_intersection)]\n return (sensor_dir, sensor_cord, sensor_type, sensor_area,\n sensor_orientation, sensor_intersection)\n\n\ndef calc_sensors_building(building_geometry, grid_size):\n sensor_dir_list = []\n sensor_cord_list = []\n sensor_type_list = []\n sensor_area_list = []\n sensor_orientation_list = []\n sensor_intersection_list = []\n surfaces_types = ['walls', 'windows', 'roofs']\n sensor_vertical_grid_dim = grid_size['walls_grid']\n sensor_horizontal_grid_dim = grid_size['roof_grid']\n for srf_type in surfaces_types:\n occface_list = getattr(building_geometry, srf_type)\n if srf_type == 'roofs':\n orientation_list = ['top'] * len(occface_list)\n normals_list = [(0.0, 0.0, 1.0)] * len(occface_list)\n interesection_list = [0] * len(occface_list)\n elif srf_type == 'windows':\n orientation_list = getattr(building_geometry,\n 'orientation_{srf_type}'.format(srf_type=srf_type))\n normals_list = getattr(building_geometry, 'normals_{srf_type}'.\n format(srf_type=srf_type))\n interesection_list = [0] * len(occface_list)\n else:\n orientation_list = getattr(building_geometry,\n 'orientation_{srf_type}'.format(srf_type=srf_type))\n normals_list = getattr(building_geometry, 'normals_{srf_type}'.\n format(srf_type=srf_type))\n interesection_list = getattr(building_geometry,\n 'intersect_{srf_type}'.format(srf_type=srf_type))\n for orientation, normal, face, intersection in zip(orientation_list,\n normals_list, occface_list, interesection_list):\n (sensor_dir, sensor_cord, sensor_type, sensor_area,\n sensor_orientation, sensor_intersection) = (\n generate_sensor_surfaces(face, sensor_vertical_grid_dim,\n sensor_horizontal_grid_dim, srf_type, orientation, normal,\n intersection))\n sensor_intersection_list.extend(sensor_intersection)\n sensor_dir_list.extend(sensor_dir)\n sensor_cord_list.extend(sensor_cord)\n sensor_type_list.extend(sensor_type)\n sensor_area_list.extend(sensor_area)\n sensor_orientation_list.extend(sensor_orientation)\n return (sensor_dir_list, sensor_cord_list, sensor_type_list,\n sensor_area_list, sensor_orientation_list, sensor_intersection_list)\n\n\ndef calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir):\n sensors_coords_zone = []\n sensors_dir_zone = []\n sensors_total_number_list = []\n names_zone = []\n sensors_code_zone = []\n sensor_intersection_zone = []\n for building_name in building_names:\n building_geometry = BuildingGeometry.load(os.path.join(\n geometry_pickle_dir, 'zone', building_name))\n (sensors_dir_building, sensors_coords_building,\n sensors_type_building, sensors_area_building,\n sensor_orientation_building, sensor_intersection_building\n ) = calc_sensors_building(building_geometry, grid_size)\n sensors_number = len(sensors_coords_building)\n sensors_total_number_list.append(sensors_number)\n sensors_code = [('srf' + str(x)) for x in range(sensors_number)]\n sensors_code_zone.append(sensors_code)\n sensors_coords_zone.extend(sensors_coords_building)\n sensors_dir_zone.extend(sensors_dir_building)\n sensor_intersection_zone.append(sensor_intersection_building)\n names_zone.append(building_name)\n pd.DataFrame({'BUILDING': building_name, 'SURFACE': sensors_code,\n 'orientation': sensor_orientation_building, 'intersection':\n sensor_intersection_building, 'Xcoor': [x[0] for x in\n sensors_coords_building], 'Ycoor': [x[1] for x in\n sensors_coords_building], 'Zcoor': [x[2] for x in\n sensors_coords_building], 'Xdir': [x[0] for x in\n sensors_dir_building], 'Ydir': [x[1] for x in\n sensors_dir_building], 'Zdir': [x[2] for x in\n sensors_dir_building], 'AREA_m2': sensors_area_building, 'TYPE':\n sensors_type_building}).to_csv(locator.get_radiation_metadata(\n building_name), index=None)\n return (sensors_coords_zone, sensors_dir_zone,\n sensors_total_number_list, names_zone, sensors_code_zone,\n sensor_intersection_zone)\n\n\ndef isolation_daysim(chunk_n, cea_daysim, building_names, locator,\n radiance_parameters, write_sensor_data, grid_size, max_global,\n weatherfile, geometry_pickle_dir):\n daysim_project = cea_daysim.initialize_daysim_project('chunk_{n}'.\n format(n=chunk_n))\n print('Creating daysim project in: {daysim_dir}'.format(daysim_dir=\n daysim_project.project_path))\n print('Calculating and sending sensor points')\n (sensors_coords_zone, sensors_dir_zone, sensors_number_zone, names_zone,\n sensors_code_zone, sensor_intersection_zone) = (calc_sensors_zone(\n building_names, locator, grid_size, geometry_pickle_dir))\n num_sensors = sum(sensors_number_zone)\n daysim_project.create_sensor_input_file(sensors_coords_zone,\n sensors_dir_zone, num_sensors, 'w/m2')\n print('Starting Daysim simulation for buildings: {buildings}'.format(\n buildings=names_zone))\n print('Total number of sensors: {num_sensors}'.format(num_sensors=\n num_sensors))\n print('Writing radiance parameters')\n daysim_project.write_radiance_parameters(radiance_parameters['rad_ab'],\n radiance_parameters['rad_ad'], radiance_parameters['rad_as'],\n radiance_parameters['rad_ar'], radiance_parameters['rad_aa'],\n radiance_parameters['rad_lr'], radiance_parameters['rad_st'],\n radiance_parameters['rad_sj'], radiance_parameters['rad_lw'],\n radiance_parameters['rad_dj'], radiance_parameters['rad_ds'],\n radiance_parameters['rad_dr'], radiance_parameters['rad_dp'])\n print('Executing hourly solar isolation calculation')\n daysim_project.execute_gen_dc()\n daysim_project.execute_ds_illum()\n print('Reading results...')\n solar_res = daysim_project.eval_ill()\n print('Fixing inconsistencies, if any')\n solar_res = np.clip(solar_res, a_min=0.0, a_max=max_global)\n if solar_res.shape[1] == HOURS_IN_YEAR + 24:\n print('Removing leap day')\n leap_day_hours = range(1416, 1440)\n solar_res = np.delete(solar_res, leap_day_hours, axis=1)\n print('Writing results to disk')\n index = 0\n for building_name, sensors_number_building, sensor_code_building, sensor_intersection_building in zip(\n names_zone, sensors_number_zone, sensors_code_zone,\n sensor_intersection_zone):\n selection_of_results = solar_res[index:index + sensors_number_building]\n selection_of_results[np.array(sensor_intersection_building) == 1] = 0\n items_sensor_name_and_result = dict(zip(sensor_code_building,\n selection_of_results.tolist()))\n index = index + sensors_number_building\n write_aggregated_results(building_name,\n items_sensor_name_and_result, locator, weatherfile)\n if write_sensor_data:\n write_sensor_results(building_name,\n items_sensor_name_and_result, locator)\n print('Removing results folder')\n daysim_project.cleanup_project()\n\n\ndef write_sensor_results(building_name, items_sensor_name_and_result, locator):\n with open(locator.get_radiation_building_sensors(building_name), 'w'\n ) as outfile:\n json.dump(items_sensor_name_and_result, outfile)\n\n\ndef write_aggregated_results(building_name, items_sensor_name_and_result,\n locator, weatherfile):\n geometry = pd.read_csv(locator.get_radiation_metadata(building_name))\n geometry['code'] = geometry['TYPE'] + '_' + geometry['orientation'] + '_kW'\n solar_analysis_fields = ['windows_east_kW', 'windows_west_kW',\n 'windows_south_kW', 'windows_north_kW', 'walls_east_kW',\n 'walls_west_kW', 'walls_south_kW', 'walls_north_kW', 'roofs_top_kW']\n solar_analysis_fields_area = ['windows_east_m2', 'windows_west_m2',\n 'windows_south_m2', 'windows_north_m2', 'walls_east_m2',\n 'walls_west_m2', 'walls_south_m2', 'walls_north_m2', 'roofs_top_m2']\n dict_not_aggregated = {}\n for field, field_area in zip(solar_analysis_fields,\n solar_analysis_fields_area):\n select_sensors = geometry.loc[geometry['code'] == field].set_index(\n 'SURFACE')\n area_m2 = select_sensors['AREA_m2'].sum()\n array_field = np.array([(select_sensors.loc[surface, 'AREA_m2'] *\n np.array(items_sensor_name_and_result[surface])) for surface in\n select_sensors.index]).sum(axis=0)\n dict_not_aggregated[field] = array_field / 1000\n dict_not_aggregated[field_area] = area_m2\n data_aggregated_kW = pd.DataFrame(dict_not_aggregated).round(2)\n data_aggregated_kW['Date'] = weatherfile['date']\n data_aggregated_kW.set_index('Date', inplace=True)\n data_aggregated_kW.to_csv(locator.get_radiation_building(building_name))\n",
"step-4": "import json\nimport os\nimport numpy as np\nimport pandas as pd\nimport py4design.py2radiance as py2radiance\nimport py4design.py3dmodel.calculate as calculate\nfrom py4design import py3dmodel\n__author__ = 'Jimeno A. Fonseca'\n__copyright__ = (\n 'Copyright 2017, Architecture and Building Systems - ETH Zurich')\n__credits__ = ['Jimeno A. Fonseca', 'Kian Wee Chen']\n__license__ = 'MIT'\n__version__ = '0.1'\n__maintainer__ = 'Daren Thomas'\n__email__ = '[email protected]'\n__status__ = 'Production'\nfrom cea.constants import HOURS_IN_YEAR\nfrom cea.resources.radiation_daysim.geometry_generator import BuildingGeometry\nfrom cea import suppress_3rd_party_debug_loggers\nsuppress_3rd_party_debug_loggers()\n\n\ndef create_sensor_input_file(rad, chunk_n):\n sensor_file_path = os.path.join(rad.data_folder_path, 'points_' + str(\n chunk_n) + '.pts')\n sensor_file = open(sensor_file_path, 'w')\n sensor_pts_data = py2radiance.write_rad.sensor_file(rad.\n sensor_positions, rad.sensor_normals)\n sensor_file.write(sensor_pts_data)\n sensor_file.close()\n rad.sensor_file_path = sensor_file_path\n\n\ndef generate_sensor_surfaces(occface, wall_dim, roof_dim, srf_type,\n orientation, normal, intersection):\n mid_pt = py3dmodel.calculate.face_midpt(occface)\n location_pt = py3dmodel.modify.move_pt(mid_pt, normal, 0.01)\n moved_oface = py3dmodel.fetch.topo2topotype(py3dmodel.modify.move(\n mid_pt, location_pt, occface))\n if srf_type == 'roofs':\n xdim = ydim = roof_dim\n else:\n xdim = ydim = wall_dim\n sensor_surfaces = py3dmodel.construct.grid_face(moved_oface, xdim, ydim)\n sensor_intersection = [intersection for x in sensor_surfaces]\n sensor_dir = [normal for x in sensor_surfaces]\n sensor_cord = [py3dmodel.calculate.face_midpt(x) for x in sensor_surfaces]\n sensor_type = [srf_type for x in sensor_surfaces]\n sensor_orientation = [orientation for x in sensor_surfaces]\n sensor_area = [(calculate.face_area(x) * (1.0 - scalar)) for x, scalar in\n zip(sensor_surfaces, sensor_intersection)]\n return (sensor_dir, sensor_cord, sensor_type, sensor_area,\n sensor_orientation, sensor_intersection)\n\n\ndef calc_sensors_building(building_geometry, grid_size):\n sensor_dir_list = []\n sensor_cord_list = []\n sensor_type_list = []\n sensor_area_list = []\n sensor_orientation_list = []\n sensor_intersection_list = []\n surfaces_types = ['walls', 'windows', 'roofs']\n sensor_vertical_grid_dim = grid_size['walls_grid']\n sensor_horizontal_grid_dim = grid_size['roof_grid']\n for srf_type in surfaces_types:\n occface_list = getattr(building_geometry, srf_type)\n if srf_type == 'roofs':\n orientation_list = ['top'] * len(occface_list)\n normals_list = [(0.0, 0.0, 1.0)] * len(occface_list)\n interesection_list = [0] * len(occface_list)\n elif srf_type == 'windows':\n orientation_list = getattr(building_geometry,\n 'orientation_{srf_type}'.format(srf_type=srf_type))\n normals_list = getattr(building_geometry, 'normals_{srf_type}'.\n format(srf_type=srf_type))\n interesection_list = [0] * len(occface_list)\n else:\n orientation_list = getattr(building_geometry,\n 'orientation_{srf_type}'.format(srf_type=srf_type))\n normals_list = getattr(building_geometry, 'normals_{srf_type}'.\n format(srf_type=srf_type))\n interesection_list = getattr(building_geometry,\n 'intersect_{srf_type}'.format(srf_type=srf_type))\n for orientation, normal, face, intersection in zip(orientation_list,\n normals_list, occface_list, interesection_list):\n (sensor_dir, sensor_cord, sensor_type, sensor_area,\n sensor_orientation, sensor_intersection) = (\n generate_sensor_surfaces(face, sensor_vertical_grid_dim,\n sensor_horizontal_grid_dim, srf_type, orientation, normal,\n intersection))\n sensor_intersection_list.extend(sensor_intersection)\n sensor_dir_list.extend(sensor_dir)\n sensor_cord_list.extend(sensor_cord)\n sensor_type_list.extend(sensor_type)\n sensor_area_list.extend(sensor_area)\n sensor_orientation_list.extend(sensor_orientation)\n return (sensor_dir_list, sensor_cord_list, sensor_type_list,\n sensor_area_list, sensor_orientation_list, sensor_intersection_list)\n\n\ndef calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir):\n sensors_coords_zone = []\n sensors_dir_zone = []\n sensors_total_number_list = []\n names_zone = []\n sensors_code_zone = []\n sensor_intersection_zone = []\n for building_name in building_names:\n building_geometry = BuildingGeometry.load(os.path.join(\n geometry_pickle_dir, 'zone', building_name))\n (sensors_dir_building, sensors_coords_building,\n sensors_type_building, sensors_area_building,\n sensor_orientation_building, sensor_intersection_building\n ) = calc_sensors_building(building_geometry, grid_size)\n sensors_number = len(sensors_coords_building)\n sensors_total_number_list.append(sensors_number)\n sensors_code = [('srf' + str(x)) for x in range(sensors_number)]\n sensors_code_zone.append(sensors_code)\n sensors_coords_zone.extend(sensors_coords_building)\n sensors_dir_zone.extend(sensors_dir_building)\n sensor_intersection_zone.append(sensor_intersection_building)\n names_zone.append(building_name)\n pd.DataFrame({'BUILDING': building_name, 'SURFACE': sensors_code,\n 'orientation': sensor_orientation_building, 'intersection':\n sensor_intersection_building, 'Xcoor': [x[0] for x in\n sensors_coords_building], 'Ycoor': [x[1] for x in\n sensors_coords_building], 'Zcoor': [x[2] for x in\n sensors_coords_building], 'Xdir': [x[0] for x in\n sensors_dir_building], 'Ydir': [x[1] for x in\n sensors_dir_building], 'Zdir': [x[2] for x in\n sensors_dir_building], 'AREA_m2': sensors_area_building, 'TYPE':\n sensors_type_building}).to_csv(locator.get_radiation_metadata(\n building_name), index=None)\n return (sensors_coords_zone, sensors_dir_zone,\n sensors_total_number_list, names_zone, sensors_code_zone,\n sensor_intersection_zone)\n\n\ndef isolation_daysim(chunk_n, cea_daysim, building_names, locator,\n radiance_parameters, write_sensor_data, grid_size, max_global,\n weatherfile, geometry_pickle_dir):\n daysim_project = cea_daysim.initialize_daysim_project('chunk_{n}'.\n format(n=chunk_n))\n print('Creating daysim project in: {daysim_dir}'.format(daysim_dir=\n daysim_project.project_path))\n print('Calculating and sending sensor points')\n (sensors_coords_zone, sensors_dir_zone, sensors_number_zone, names_zone,\n sensors_code_zone, sensor_intersection_zone) = (calc_sensors_zone(\n building_names, locator, grid_size, geometry_pickle_dir))\n num_sensors = sum(sensors_number_zone)\n daysim_project.create_sensor_input_file(sensors_coords_zone,\n sensors_dir_zone, num_sensors, 'w/m2')\n print('Starting Daysim simulation for buildings: {buildings}'.format(\n buildings=names_zone))\n print('Total number of sensors: {num_sensors}'.format(num_sensors=\n num_sensors))\n print('Writing radiance parameters')\n daysim_project.write_radiance_parameters(radiance_parameters['rad_ab'],\n radiance_parameters['rad_ad'], radiance_parameters['rad_as'],\n radiance_parameters['rad_ar'], radiance_parameters['rad_aa'],\n radiance_parameters['rad_lr'], radiance_parameters['rad_st'],\n radiance_parameters['rad_sj'], radiance_parameters['rad_lw'],\n radiance_parameters['rad_dj'], radiance_parameters['rad_ds'],\n radiance_parameters['rad_dr'], radiance_parameters['rad_dp'])\n print('Executing hourly solar isolation calculation')\n daysim_project.execute_gen_dc()\n daysim_project.execute_ds_illum()\n print('Reading results...')\n solar_res = daysim_project.eval_ill()\n print('Fixing inconsistencies, if any')\n solar_res = np.clip(solar_res, a_min=0.0, a_max=max_global)\n if solar_res.shape[1] == HOURS_IN_YEAR + 24:\n print('Removing leap day')\n leap_day_hours = range(1416, 1440)\n solar_res = np.delete(solar_res, leap_day_hours, axis=1)\n print('Writing results to disk')\n index = 0\n for building_name, sensors_number_building, sensor_code_building, sensor_intersection_building in zip(\n names_zone, sensors_number_zone, sensors_code_zone,\n sensor_intersection_zone):\n selection_of_results = solar_res[index:index + sensors_number_building]\n selection_of_results[np.array(sensor_intersection_building) == 1] = 0\n items_sensor_name_and_result = dict(zip(sensor_code_building,\n selection_of_results.tolist()))\n index = index + sensors_number_building\n write_aggregated_results(building_name,\n items_sensor_name_and_result, locator, weatherfile)\n if write_sensor_data:\n write_sensor_results(building_name,\n items_sensor_name_and_result, locator)\n print('Removing results folder')\n daysim_project.cleanup_project()\n\n\ndef write_sensor_results(building_name, items_sensor_name_and_result, locator):\n with open(locator.get_radiation_building_sensors(building_name), 'w'\n ) as outfile:\n json.dump(items_sensor_name_and_result, outfile)\n\n\ndef write_aggregated_results(building_name, items_sensor_name_and_result,\n locator, weatherfile):\n geometry = pd.read_csv(locator.get_radiation_metadata(building_name))\n geometry['code'] = geometry['TYPE'] + '_' + geometry['orientation'] + '_kW'\n solar_analysis_fields = ['windows_east_kW', 'windows_west_kW',\n 'windows_south_kW', 'windows_north_kW', 'walls_east_kW',\n 'walls_west_kW', 'walls_south_kW', 'walls_north_kW', 'roofs_top_kW']\n solar_analysis_fields_area = ['windows_east_m2', 'windows_west_m2',\n 'windows_south_m2', 'windows_north_m2', 'walls_east_m2',\n 'walls_west_m2', 'walls_south_m2', 'walls_north_m2', 'roofs_top_m2']\n dict_not_aggregated = {}\n for field, field_area in zip(solar_analysis_fields,\n solar_analysis_fields_area):\n select_sensors = geometry.loc[geometry['code'] == field].set_index(\n 'SURFACE')\n area_m2 = select_sensors['AREA_m2'].sum()\n array_field = np.array([(select_sensors.loc[surface, 'AREA_m2'] *\n np.array(items_sensor_name_and_result[surface])) for surface in\n select_sensors.index]).sum(axis=0)\n dict_not_aggregated[field] = array_field / 1000\n dict_not_aggregated[field_area] = area_m2\n data_aggregated_kW = pd.DataFrame(dict_not_aggregated).round(2)\n data_aggregated_kW['Date'] = weatherfile['date']\n data_aggregated_kW.set_index('Date', inplace=True)\n data_aggregated_kW.to_csv(locator.get_radiation_building(building_name))\n",
"step-5": "import json\nimport os\n\nimport numpy as np\nimport pandas as pd\nimport py4design.py2radiance as py2radiance\nimport py4design.py3dmodel.calculate as calculate\nfrom py4design import py3dmodel\n\n__author__ = \"Jimeno A. Fonseca\"\n__copyright__ = \"Copyright 2017, Architecture and Building Systems - ETH Zurich\"\n__credits__ = [\"Jimeno A. Fonseca\", \"Kian Wee Chen\"]\n__license__ = \"MIT\"\n__version__ = \"0.1\"\n__maintainer__ = \"Daren Thomas\"\n__email__ = \"[email protected]\"\n__status__ = \"Production\"\n\nfrom cea.constants import HOURS_IN_YEAR\nfrom cea.resources.radiation_daysim.geometry_generator import BuildingGeometry\nfrom cea import suppress_3rd_party_debug_loggers\n\nsuppress_3rd_party_debug_loggers()\n\n\ndef create_sensor_input_file(rad, chunk_n):\n sensor_file_path = os.path.join(rad.data_folder_path, \"points_\" + str(chunk_n) + \".pts\")\n sensor_file = open(sensor_file_path, \"w\")\n sensor_pts_data = py2radiance.write_rad.sensor_file(rad.sensor_positions, rad.sensor_normals)\n sensor_file.write(sensor_pts_data)\n sensor_file.close()\n rad.sensor_file_path = sensor_file_path\n\n\ndef generate_sensor_surfaces(occface, wall_dim, roof_dim, srf_type, orientation, normal, intersection):\n mid_pt = py3dmodel.calculate.face_midpt(occface)\n location_pt = py3dmodel.modify.move_pt(mid_pt, normal, 0.01)\n moved_oface = py3dmodel.fetch.topo2topotype(py3dmodel.modify.move(mid_pt, location_pt, occface))\n if srf_type == 'roofs':\n xdim = ydim = roof_dim\n else:\n xdim = ydim = wall_dim\n # put it into occ and subdivide surfaces\n sensor_surfaces = py3dmodel.construct.grid_face(moved_oface, xdim, ydim)\n\n # calculate list of properties per surface\n sensor_intersection = [intersection for x in sensor_surfaces]\n sensor_dir = [normal for x in sensor_surfaces]\n sensor_cord = [py3dmodel.calculate.face_midpt(x) for x in sensor_surfaces]\n sensor_type = [srf_type for x in sensor_surfaces]\n sensor_orientation = [orientation for x in sensor_surfaces]\n sensor_area = [calculate.face_area(x) * (1.0 - scalar) for x, scalar in zip(sensor_surfaces, sensor_intersection)]\n\n return sensor_dir, sensor_cord, sensor_type, sensor_area, sensor_orientation, sensor_intersection\n\n\ndef calc_sensors_building(building_geometry, grid_size):\n sensor_dir_list = []\n sensor_cord_list = []\n sensor_type_list = []\n sensor_area_list = []\n sensor_orientation_list = []\n sensor_intersection_list = []\n surfaces_types = ['walls', 'windows', 'roofs']\n sensor_vertical_grid_dim = grid_size[\"walls_grid\"]\n sensor_horizontal_grid_dim = grid_size[\"roof_grid\"]\n for srf_type in surfaces_types:\n occface_list = getattr(building_geometry, srf_type)\n if srf_type == 'roofs':\n orientation_list = ['top'] * len(occface_list)\n normals_list = [(0.0, 0.0, 1.0)] * len(occface_list)\n interesection_list = [0] * len(occface_list)\n elif srf_type == 'windows':\n orientation_list = getattr(building_geometry, \"orientation_{srf_type}\".format(srf_type=srf_type))\n normals_list = getattr(building_geometry, \"normals_{srf_type}\".format(srf_type=srf_type))\n interesection_list = [0] * len(occface_list)\n else:\n orientation_list = getattr(building_geometry, \"orientation_{srf_type}\".format(srf_type=srf_type))\n normals_list = getattr(building_geometry, \"normals_{srf_type}\".format(srf_type=srf_type))\n interesection_list = getattr(building_geometry, \"intersect_{srf_type}\".format(srf_type=srf_type))\n for orientation, normal, face, intersection in zip(orientation_list, normals_list, occface_list,\n interesection_list):\n sensor_dir, \\\n sensor_cord, \\\n sensor_type, \\\n sensor_area, \\\n sensor_orientation, \\\n sensor_intersection = generate_sensor_surfaces(face,\n sensor_vertical_grid_dim,\n sensor_horizontal_grid_dim,\n srf_type,\n orientation,\n normal,\n intersection)\n sensor_intersection_list.extend(sensor_intersection)\n sensor_dir_list.extend(sensor_dir)\n sensor_cord_list.extend(sensor_cord)\n sensor_type_list.extend(sensor_type)\n sensor_area_list.extend(sensor_area)\n sensor_orientation_list.extend(sensor_orientation)\n\n return sensor_dir_list, sensor_cord_list, sensor_type_list, sensor_area_list, sensor_orientation_list, sensor_intersection_list\n\n\ndef calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir):\n sensors_coords_zone = []\n sensors_dir_zone = []\n sensors_total_number_list = []\n names_zone = []\n sensors_code_zone = []\n sensor_intersection_zone = []\n for building_name in building_names:\n building_geometry = BuildingGeometry.load(os.path.join(geometry_pickle_dir, 'zone', building_name))\n # get sensors in the building\n sensors_dir_building, \\\n sensors_coords_building, \\\n sensors_type_building, \\\n sensors_area_building, \\\n sensor_orientation_building, \\\n sensor_intersection_building = calc_sensors_building(building_geometry, grid_size)\n\n # get the total number of sensors and store in lst\n sensors_number = len(sensors_coords_building)\n sensors_total_number_list.append(sensors_number)\n\n sensors_code = ['srf' + str(x) for x in range(sensors_number)]\n sensors_code_zone.append(sensors_code)\n\n # get the total list of coordinates and directions to send to daysim\n sensors_coords_zone.extend(sensors_coords_building)\n sensors_dir_zone.extend(sensors_dir_building)\n\n # get total list of intersections\n sensor_intersection_zone.append(sensor_intersection_building)\n\n # get the name of all buildings\n names_zone.append(building_name)\n\n # save sensors geometry result to disk\n pd.DataFrame({'BUILDING': building_name,\n 'SURFACE': sensors_code,\n 'orientation': sensor_orientation_building,\n 'intersection': sensor_intersection_building,\n 'Xcoor': [x[0] for x in sensors_coords_building],\n 'Ycoor': [x[1] for x in sensors_coords_building],\n 'Zcoor': [x[2] for x in sensors_coords_building],\n 'Xdir': [x[0] for x in sensors_dir_building],\n 'Ydir': [x[1] for x in sensors_dir_building],\n 'Zdir': [x[2] for x in sensors_dir_building],\n 'AREA_m2': sensors_area_building,\n 'TYPE': sensors_type_building}).to_csv(locator.get_radiation_metadata(building_name), index=None)\n\n return sensors_coords_zone, sensors_dir_zone, sensors_total_number_list, names_zone, sensors_code_zone, sensor_intersection_zone\n\n\ndef isolation_daysim(chunk_n, cea_daysim, building_names, locator, radiance_parameters, write_sensor_data, grid_size,\n max_global, weatherfile, geometry_pickle_dir):\n # initialize daysim project\n daysim_project = cea_daysim.initialize_daysim_project('chunk_{n}'.format(n=chunk_n))\n print('Creating daysim project in: {daysim_dir}'.format(daysim_dir=daysim_project.project_path))\n\n # calculate sensors\n print(\"Calculating and sending sensor points\")\n sensors_coords_zone, \\\n sensors_dir_zone, \\\n sensors_number_zone, \\\n names_zone, \\\n sensors_code_zone, \\\n sensor_intersection_zone = calc_sensors_zone(building_names, locator, grid_size, geometry_pickle_dir)\n\n num_sensors = sum(sensors_number_zone)\n daysim_project.create_sensor_input_file(sensors_coords_zone, sensors_dir_zone, num_sensors, \"w/m2\")\n\n print(\"Starting Daysim simulation for buildings: {buildings}\".format(buildings=names_zone))\n print(\"Total number of sensors: {num_sensors}\".format(num_sensors=num_sensors))\n\n print('Writing radiance parameters')\n daysim_project.write_radiance_parameters(radiance_parameters[\"rad_ab\"], radiance_parameters[\"rad_ad\"],\n radiance_parameters[\"rad_as\"], radiance_parameters[\"rad_ar\"],\n radiance_parameters[\"rad_aa\"], radiance_parameters[\"rad_lr\"],\n radiance_parameters[\"rad_st\"], radiance_parameters[\"rad_sj\"],\n radiance_parameters[\"rad_lw\"], radiance_parameters[\"rad_dj\"],\n radiance_parameters[\"rad_ds\"], radiance_parameters[\"rad_dr\"],\n radiance_parameters[\"rad_dp\"])\n\n print('Executing hourly solar isolation calculation')\n daysim_project.execute_gen_dc()\n daysim_project.execute_ds_illum()\n\n print('Reading results...')\n solar_res = daysim_project.eval_ill()\n\n # check inconsistencies and replace by max value of weather file\n print('Fixing inconsistencies, if any')\n solar_res = np.clip(solar_res, a_min=0.0, a_max=max_global)\n\n # Check if leap year and remove extra day\n if solar_res.shape[1] == HOURS_IN_YEAR + 24:\n print('Removing leap day')\n leap_day_hours = range(1416, 1440)\n solar_res = np.delete(solar_res, leap_day_hours, axis=1)\n\n print(\"Writing results to disk\")\n index = 0\n for building_name, \\\n sensors_number_building, \\\n sensor_code_building, \\\n sensor_intersection_building in zip(names_zone,\n sensors_number_zone,\n sensors_code_zone,\n sensor_intersection_zone):\n # select sensors data\n selection_of_results = solar_res[index:index + sensors_number_building]\n selection_of_results[np.array(sensor_intersection_building) == 1] = 0\n items_sensor_name_and_result = dict(zip(sensor_code_building, selection_of_results.tolist()))\n index = index + sensors_number_building\n\n # create summary and save to disk\n write_aggregated_results(building_name, items_sensor_name_and_result, locator, weatherfile)\n\n if write_sensor_data:\n write_sensor_results(building_name, items_sensor_name_and_result, locator)\n\n # erase daysim folder to avoid conflicts after every iteration\n print('Removing results folder')\n daysim_project.cleanup_project()\n\n\ndef write_sensor_results(building_name, items_sensor_name_and_result, locator):\n with open(locator.get_radiation_building_sensors(building_name), 'w') as outfile:\n json.dump(items_sensor_name_and_result, outfile)\n\n\ndef write_aggregated_results(building_name, items_sensor_name_and_result, locator, weatherfile):\n geometry = pd.read_csv(locator.get_radiation_metadata(building_name))\n geometry['code'] = geometry['TYPE'] + '_' + geometry['orientation'] + '_kW'\n solar_analysis_fields = ['windows_east_kW',\n 'windows_west_kW',\n 'windows_south_kW',\n 'windows_north_kW',\n 'walls_east_kW',\n 'walls_west_kW',\n 'walls_south_kW',\n 'walls_north_kW',\n 'roofs_top_kW']\n solar_analysis_fields_area = ['windows_east_m2',\n 'windows_west_m2',\n 'windows_south_m2',\n 'windows_north_m2',\n 'walls_east_m2',\n 'walls_west_m2',\n 'walls_south_m2',\n 'walls_north_m2',\n 'roofs_top_m2']\n dict_not_aggregated = {}\n\n for field, field_area in zip(solar_analysis_fields, solar_analysis_fields_area):\n select_sensors = geometry.loc[geometry['code'] == field].set_index('SURFACE')\n area_m2 = select_sensors['AREA_m2'].sum()\n array_field = np.array([select_sensors.loc[surface, 'AREA_m2'] *\n np.array(items_sensor_name_and_result[surface])\n for surface in select_sensors.index]).sum(axis=0)\n dict_not_aggregated[field] = array_field / 1000 # in kWh\n dict_not_aggregated[field_area] = area_m2\n\n data_aggregated_kW = (pd.DataFrame(dict_not_aggregated)).round(2)\n data_aggregated_kW[\"Date\"] = weatherfile[\"date\"]\n data_aggregated_kW.set_index('Date', inplace=True)\n data_aggregated_kW.to_csv(locator.get_radiation_building(building_name))\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
#
# @lc app=leetcode.cn id=15 lang=python3
#
# [15] 三数之和
#
# https://leetcode-cn.com/problems/3sum/description/
#
# algorithms
# Medium (25.76%)
# Likes: 1904
# Dislikes: 0
# Total Accepted: 176.6K
# Total Submissions: 679K
# Testcase Example: '[-1,0,1,2,-1,-4]'
#
# 给你一个包含 n 个整数的数组 nums,判断 nums 中是否存在三个元素 a,b,c ,使得 a + b + c = 0
# ?请你找出所有满足条件且不重复的三元组。
#
# 注意:答案中不可以包含重复的三元组。
#
#
#
# 示例:
#
# 给定数组 nums = [-1, 0, 1, 2, -1, -4],
#
# 满足要求的三元组集合为:
# [
# [-1, 0, 1],
# [-1, -1, 2]
# ]
#
# 1. 三层循环暴力求解
# 2. 双指针求解
# 3. hashmap 求解
# @lc code=start
class Solution:
def threeSum(self, nums: List[int]) -> List[List[int]]:
res = []
nums.sort()
for k in range(len(nums) - 2):
if k > 0 and nums[k] == nums[k-1]:
continuere
if nums[k] > 0:
break
L, R = k+1, len(nums) - 1
while L < R:
s = nums[k] + nums[L] + nums[R]
if s < 0:
L += 1
elif s > 0:
R -= 1
else:
res.append((nums[k], nums[L], nums[R]))
while L < R and nums[L] == nums[L+1]:
L += 1
while L < R and nums[R] == nums[R-1]:
R -= 1
L += 1
R -= 1
return res
# @lc code=end
|
normal
|
{
"blob_id": "ccf3ada9a2bedf29820170f2e8184fc16f1b7aea",
"index": 9580,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def threeSum(self, nums: List[int]) ->List[List[int]]:\n res = []\n nums.sort()\n for k in range(len(nums) - 2):\n if k > 0 and nums[k] == nums[k - 1]:\n continuere\n if nums[k] > 0:\n break\n L, R = k + 1, len(nums) - 1\n while L < R:\n s = nums[k] + nums[L] + nums[R]\n if s < 0:\n L += 1\n elif s > 0:\n R -= 1\n else:\n res.append((nums[k], nums[L], nums[R]))\n while L < R and nums[L] == nums[L + 1]:\n L += 1\n while L < R and nums[R] == nums[R - 1]:\n R -= 1\n L += 1\n R -= 1\n return res\n",
"step-4": "#\n# @lc app=leetcode.cn id=15 lang=python3\n#\n# [15] 三数之和\n#\n# https://leetcode-cn.com/problems/3sum/description/\n#\n# algorithms\n# Medium (25.76%)\n# Likes: 1904\n# Dislikes: 0\n# Total Accepted: 176.6K\n# Total Submissions: 679K\n# Testcase Example: '[-1,0,1,2,-1,-4]'\n#\n# 给你一个包含 n 个整数的数组 nums,判断 nums 中是否存在三个元素 a,b,c ,使得 a + b + c = 0\n# ?请你找出所有满足条件且不重复的三元组。\n#\n# 注意:答案中不可以包含重复的三元组。\n#\n#\n#\n# 示例:\n#\n# 给定数组 nums = [-1, 0, 1, 2, -1, -4],\n#\n# 满足要求的三元组集合为:\n# [\n# [-1, 0, 1],\n# [-1, -1, 2]\n# ]\n#\n# 1. 三层循环暴力求解\n# 2. 双指针求解\n# 3. hashmap 求解\n\n# @lc code=start\n\n\nclass Solution:\n def threeSum(self, nums: List[int]) -> List[List[int]]:\n res = []\n nums.sort()\n for k in range(len(nums) - 2):\n if k > 0 and nums[k] == nums[k-1]:\n continuere\n if nums[k] > 0:\n break\n L, R = k+1, len(nums) - 1\n while L < R:\n s = nums[k] + nums[L] + nums[R]\n if s < 0:\n L += 1\n elif s > 0:\n R -= 1\n else:\n res.append((nums[k], nums[L], nums[R]))\n while L < R and nums[L] == nums[L+1]:\n L += 1\n while L < R and nums[R] == nums[R-1]:\n R -= 1\n L += 1\n R -= 1\n return res\n# @lc code=end\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import pyaudio
import numpy as np
from collections import OrderedDict
import utils
class MasterPlayer(object):
def __init__(self, volume=1., samplesPerSecond=44100):
self.p = pyaudio.PyAudio()
self.volume = volume
self.samplesPerSecond = samplesPerSecond
self.individual_callbacks = OrderedDict()
self.volumes = {}
def __del__(self):
self.p.terminate()
def play(self):
self.offset = 0
def callback(in_data, frame_count, time_info, status):
total_stereo = np.zeros((frame_count*2))
time = self.offset / float(self.samplesPerSecond)
for ic in self.individual_callbacks:
left, right = ic(self.offset, time, frame_count)
if left is None: # dead voice
continue
stereo = utils.to_stereo(left, right)
# Accumulate
total_stereo += stereo * self.volumes[ic]
self.offset += frame_count
output = utils.np_to_frames(total_stereo * self.volume)
return (output, pyaudio.paContinue)
self.stream = self.p.open(format=self.p.get_format_from_width(2),
channels=2,
rate=self.samplesPerSecond,
output=True,
stream_callback=callback)
self.stream.start_stream()
def stop(self):
self.stream.stop_stream()
def register(self, callback):
self.individual_callbacks[callback] = {}
self.volumes[callback] = 1.
def unregister(self, callback):
if callback in self.individual_callbacks:
del self.individual_callbacks[callback]
del self.volumes[callback]
def set_volume(self, callback, volume):
self.volumes[callback] = volume
MAXVOLUME = 32767.
def sawtooth(x):
return np.mod(x / (2*np.pi), 1.)
class ADSR(object):
def __init__(self, a=0.01, d=0.1, s=0.8, r=0.5, mode='linear'):
self.a = a
self.d = d
self.s = s
self.r = r
assert mode == 'linear'
def get_envelope_pressed(self, delta):
'''
:param delta: time after pressed
:return: envelope (between 0 and 1)
'''
delta = delta.astype(float)
#assert delta>0.
envelope = np.zeros(len(delta))
# attack
attack = delta < self.a
envelope[attack] = delta[attack] / self.a
# decay
decay = (delta < self.a + self.d) & (delta >= self.a)
envelope[decay] = 1 - (1 - self.s) * (delta[decay] - self.a) / self.d
# sustain
sustain = (delta >= self.a + self.d)
envelope[sustain] = self.s
return envelope
def get_envelope_released(self, delta):
'''
:param delta: time after released
:return: envelope (between 0 and 1)
'''
delta = delta.astype(float)
envelope = np.zeros(len(delta))
# release
release = delta < self.r
envelope[release] = self.s * (self.r - delta[release]) / self.r
# dead
dead = delta >= self.r
all_dead = np.all(dead)
return envelope, all_dead
class SineWavePlayer(object):
def __init__(self, freq, samplerate, adsr, motherwave=None):
self.freq = freq
self.samplerate = samplerate
self.pressed = False
self.volume = 0.3
#self.wave = np.sin
if motherwave is None:
motherwave = sawtooth()
self.wave = motherwave
self.adsr = adsr
self.dead = True
def __call__(self, offset, time, frame_count):
# Find out which state we are in
# Dead/NewPress/Pressed/NewRelease/Released/Dead
if self.pressed:
if self.new_press:
# Initialize phase to prevent clicking
self.onset = time
self.new_press = False
# Relative time after press
time_after_press = (time + np.arange(frame_count, dtype=float) / self.samplerate - self.onset)
left = self.volume * MAXVOLUME * self.wave(time_after_press * 2*np.pi * self.freq)
envelope = self.adsr.get_envelope_pressed(time_after_press)
left *= envelope
right = left
elif not self.dead:
if self.new_release:
self.new_release = False
self.release_time = time
# Relative time after release
time_after_press = (time + np.arange(frame_count, dtype=float) / self.samplerate - self.onset)
time_after_release = (time + np.arange(frame_count, dtype=float) / self.samplerate - self.release_time)
left = self.volume * MAXVOLUME * self.wave(time_after_press * 2*np.pi * self.freq)
envelope, self.dead = self.adsr.get_envelope_released(time_after_release)
left *= envelope
right = left
else:
left = right = None
return left, right
def press(self):
self.pressed = True
self.new_press = True
self.dead = False
def release(self):
self.pressed = False
self.new_release = True
def note_to_freq(note):
reference_a = 45
return np.exp(np.log(440) + (note - reference_a) / 12. * np.log(2))
class NaivePoly(object):
def __init__(self, octaves, samplerate, adsr, motherwave):
self.voices = []
self.octaves = octaves
for note in xrange(self.octaves*12):
# Compute frequency -> 440hz is note 45
freq = note_to_freq(note)
# Initialize voice
self.voices.append(SineWavePlayer(freq, samplerate, adsr, motherwave))
print 'note {} freq {}'.format(note, freq)
def register(self, master):
for voice in self.voices:
master.register(voice)
def unregister(self, master):
for voice in self.voices:
master.unregister(voice)
def press(self, key):
self.voices[key].press()
def release(self, key):
self.voices[key].release()
|
normal
|
{
"blob_id": "c4e4e54ac93c2acdbd3a1cd22b200341a6e45688",
"index": 224,
"step-1": "import pyaudio\nimport numpy as np\nfrom collections import OrderedDict\nimport utils\n\n\nclass MasterPlayer(object):\n def __init__(self, volume=1., samplesPerSecond=44100):\n self.p = pyaudio.PyAudio()\n self.volume = volume\n self.samplesPerSecond = samplesPerSecond\n self.individual_callbacks = OrderedDict()\n self.volumes = {}\n\n def __del__(self):\n self.p.terminate()\n\n def play(self):\n\n self.offset = 0\n def callback(in_data, frame_count, time_info, status):\n total_stereo = np.zeros((frame_count*2))\n time = self.offset / float(self.samplesPerSecond)\n\n for ic in self.individual_callbacks:\n left, right = ic(self.offset, time, frame_count)\n if left is None: # dead voice\n continue\n stereo = utils.to_stereo(left, right)\n # Accumulate\n total_stereo += stereo * self.volumes[ic]\n\n self.offset += frame_count\n output = utils.np_to_frames(total_stereo * self.volume)\n return (output, pyaudio.paContinue)\n\n self.stream = self.p.open(format=self.p.get_format_from_width(2),\n channels=2,\n rate=self.samplesPerSecond,\n output=True,\n stream_callback=callback)\n self.stream.start_stream()\n\n def stop(self):\n self.stream.stop_stream()\n\n def register(self, callback):\n self.individual_callbacks[callback] = {}\n self.volumes[callback] = 1.\n\n def unregister(self, callback):\n if callback in self.individual_callbacks:\n del self.individual_callbacks[callback]\n del self.volumes[callback]\n\n def set_volume(self, callback, volume):\n self.volumes[callback] = volume\n\nMAXVOLUME = 32767.\n\n\ndef sawtooth(x):\n return np.mod(x / (2*np.pi), 1.)\n\nclass ADSR(object):\n def __init__(self, a=0.01, d=0.1, s=0.8, r=0.5, mode='linear'):\n self.a = a\n self.d = d\n self.s = s\n self.r = r\n assert mode == 'linear'\n\n def get_envelope_pressed(self, delta):\n '''\n :param delta: time after pressed\n :return: envelope (between 0 and 1)\n '''\n delta = delta.astype(float)\n #assert delta>0.\n envelope = np.zeros(len(delta))\n # attack\n attack = delta < self.a\n envelope[attack] = delta[attack] / self.a\n # decay\n decay = (delta < self.a + self.d) & (delta >= self.a)\n envelope[decay] = 1 - (1 - self.s) * (delta[decay] - self.a) / self.d\n # sustain\n sustain = (delta >= self.a + self.d)\n envelope[sustain] = self.s\n\n return envelope\n\n def get_envelope_released(self, delta):\n '''\n :param delta: time after released\n :return: envelope (between 0 and 1)\n '''\n delta = delta.astype(float)\n envelope = np.zeros(len(delta))\n\n # release\n release = delta < self.r\n envelope[release] = self.s * (self.r - delta[release]) / self.r\n\n # dead\n dead = delta >= self.r\n all_dead = np.all(dead)\n\n return envelope, all_dead\n\n\nclass SineWavePlayer(object):\n def __init__(self, freq, samplerate, adsr, motherwave=None):\n self.freq = freq\n self.samplerate = samplerate\n self.pressed = False\n self.volume = 0.3\n #self.wave = np.sin\n if motherwave is None:\n motherwave = sawtooth()\n self.wave = motherwave\n self.adsr = adsr\n self.dead = True\n\n def __call__(self, offset, time, frame_count):\n\n # Find out which state we are in\n # Dead/NewPress/Pressed/NewRelease/Released/Dead\n if self.pressed:\n if self.new_press:\n # Initialize phase to prevent clicking\n self.onset = time\n self.new_press = False\n # Relative time after press\n time_after_press = (time + np.arange(frame_count, dtype=float) / self.samplerate - self.onset)\n\n left = self.volume * MAXVOLUME * self.wave(time_after_press * 2*np.pi * self.freq)\n envelope = self.adsr.get_envelope_pressed(time_after_press)\n left *= envelope\n right = left\n elif not self.dead:\n if self.new_release:\n self.new_release = False\n self.release_time = time\n # Relative time after release\n time_after_press = (time + np.arange(frame_count, dtype=float) / self.samplerate - self.onset)\n time_after_release = (time + np.arange(frame_count, dtype=float) / self.samplerate - self.release_time)\n\n left = self.volume * MAXVOLUME * self.wave(time_after_press * 2*np.pi * self.freq)\n envelope, self.dead = self.adsr.get_envelope_released(time_after_release)\n left *= envelope\n right = left\n else:\n left = right = None\n return left, right\n\n def press(self):\n self.pressed = True\n self.new_press = True\n self.dead = False\n\n def release(self):\n self.pressed = False\n self.new_release = True\n\n\ndef note_to_freq(note):\n reference_a = 45\n return np.exp(np.log(440) + (note - reference_a) / 12. * np.log(2))\n\n\nclass NaivePoly(object):\n def __init__(self, octaves, samplerate, adsr, motherwave):\n self.voices = []\n self.octaves = octaves\n for note in xrange(self.octaves*12):\n # Compute frequency -> 440hz is note 45\n freq = note_to_freq(note)\n # Initialize voice\n self.voices.append(SineWavePlayer(freq, samplerate, adsr, motherwave))\n print 'note {} freq {}'.format(note, freq)\n\n def register(self, master):\n for voice in self.voices:\n master.register(voice)\n\n def unregister(self, master):\n for voice in self.voices:\n master.unregister(voice)\n\n def press(self, key):\n self.voices[key].press()\n\n def release(self, key):\n self.voices[key].release()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 12 20:29:49 2019
@author: kzx789
"""
from PIL import Image
import os, glob, numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
import cv2
import pymysql
import MySQLdb as mysql
"""
#csv를 읽어서 영양정보 출력
def get_Nutrition(str) :
nutrition = pd.read_csv('C:/식품영양정보/영양정보.csv')
print(nutrition[nutrition['음식명'] == str])
"""
#사용된 전체 이미지 출력
def drawing_plt():
thisImg = os.listdir(caltech_dir)
row = 4
cols = int(math.ceil(len(thisImg)/4)) #반올림
fig = plt.figure()
i = 1
for image in glob.glob("C:/cnnTest/*.jpg"): #glob를 사용해서 Test로 사용된 파일 가져오기
img = cv2.imread(image)
subplot = fig.add_subplot(row, cols, i)
subplot.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)) #기본컬러
subplot.set_title(thisImg[i-1]) #타이틀 붙이기
subplot.axis("off")
i += 1
print('\t',"전체 이미지 리스트 ")
plt.show()
#조건에 맞는 개별 이미지 출력
def get_Image(str):
imgPath = 'C:/cnnTest/'
image = cv2.imread(imgPath+str)
image = cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
plt.imshow(image)
plt.xticks([])
plt.yticks([])
plt.show()
#데이터베이스에서 영양소 정보 가지고 오기
def get_DB_Nutrition(str):
db = pymysql.connect(host="localhost", user = "yeha", password="", db="nutrition")
cur = db.cursor() #Connection에서 Cursor생성
sql = "SELECT * FROM NUTRITION_INFO WHERE FOODNAME LIKE '음식명' OR FOODNAME LIKE %s"
cur.execute(sql,(str))
data = cur.fetchall() #정보 전부 가져오기
df = pd.Series(data[0],data[1])
print(df)
db.close()
caltech_dir = "C:/cnnTest"
#테스트할 데이터들을 128*128로 지정
image_w = 128
image_h = 128
pixels = image_h * image_w * 3 #픽셀 지정
X = []
#filenames = []
files = os.listdir(caltech_dir) #하위 디렉터리 파일 리스트 구하기
#print(files) #이미지 목록 확인
for i in range(len(files)):
files[i]=caltech_dir+'/'+ files[i]
#print(files)
for f in files:
img = Image.open(f)
img = img.convert("RGB")
img = img.resize((image_w, image_h))
data = np.asarray(img)
# filenames.append(f)
X.append(data)
X = np.array(X)
#print(X)
#모델 불러오기
from keras.models import load_model
model = load_model("C:/image/train/model/multi_img_classification.model")
prediction = model.predict(X)
#print(prediction)
np.set_printoptions(formatter={'float': lambda x: "{0:0.3f}".format(x)})
print('프로그램을 실행합니다..')
print('\n')
thisImg = os.listdir(caltech_dir)
cnt = 0
for i in prediction:
pre_ans = i.argmax() # 예측 레이블//가장 큰 번째 수
#print(i)
#print(pre_ans)
pre_ans_str = ''
if pre_ans == 0: pre_ans_str = "연어회"
elif pre_ans == 1: pre_ans_str = "쌀국수"
elif pre_ans == 2: pre_ans_str = "샌드위치"
else: pre_ans_str = "새우튀김"
if i[0] >= 0.8 :
get_Image(thisImg[cnt])
print(thisImg[cnt]+" 이미지는 "+pre_ans_str+"(으)로 추정됩니다.")
#get_Nutrition(pre_ans_str)
get_DB_Nutrition(pre_ans_str)
if i[1] >= 0.8:
get_Image(thisImg[cnt])
print(thisImg[cnt]+" 이미지는 "+pre_ans_str+"(으)로 추정됩니다.")
#get_Nutrition(pre_ans_str)
get_DB_Nutrition(pre_ans_str)
if i[2] >= 0.8:
get_Image(thisImg[cnt])
print(thisImg[cnt]+" 이미지는 "+pre_ans_str+"(으)로 추정됩니다.")
#get_Nutrition(pre_ans_str)
get_DB_Nutrition(pre_ans_str)
if i[3] >= 0.8:
get_Image(thisImg[cnt])
print(thisImg[cnt]+" 이미지는 "+pre_ans_str+"(으)로 추정됩니다.")
#get_Nutrition(pre_ans_str)
get_DB_Nutrition(pre_ans_str)
cnt += 1
drawing_plt()
|
normal
|
{
"blob_id": "1255a9df2fbe11d92991f3f0f7054b92cb017628",
"index": 2941,
"step-1": "<mask token>\n\n\ndef drawing_plt():\n thisImg = os.listdir(caltech_dir)\n row = 4\n cols = int(math.ceil(len(thisImg) / 4))\n fig = plt.figure()\n i = 1\n for image in glob.glob('C:/cnnTest/*.jpg'):\n img = cv2.imread(image)\n subplot = fig.add_subplot(row, cols, i)\n subplot.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n subplot.set_title(thisImg[i - 1])\n subplot.axis('off')\n i += 1\n print('\\t', '전체 이미지 리스트 ')\n plt.show()\n\n\ndef get_Image(str):\n imgPath = 'C:/cnnTest/'\n image = cv2.imread(imgPath + str)\n image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n plt.imshow(image)\n plt.xticks([])\n plt.yticks([])\n plt.show()\n\n\ndef get_DB_Nutrition(str):\n db = pymysql.connect(host='localhost', user='yeha', password='', db=\n 'nutrition')\n cur = db.cursor()\n sql = (\n \"SELECT * FROM NUTRITION_INFO WHERE FOODNAME LIKE '음식명' OR FOODNAME LIKE %s\"\n )\n cur.execute(sql, str)\n data = cur.fetchall()\n df = pd.Series(data[0], data[1])\n print(df)\n db.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef drawing_plt():\n thisImg = os.listdir(caltech_dir)\n row = 4\n cols = int(math.ceil(len(thisImg) / 4))\n fig = plt.figure()\n i = 1\n for image in glob.glob('C:/cnnTest/*.jpg'):\n img = cv2.imread(image)\n subplot = fig.add_subplot(row, cols, i)\n subplot.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n subplot.set_title(thisImg[i - 1])\n subplot.axis('off')\n i += 1\n print('\\t', '전체 이미지 리스트 ')\n plt.show()\n\n\ndef get_Image(str):\n imgPath = 'C:/cnnTest/'\n image = cv2.imread(imgPath + str)\n image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n plt.imshow(image)\n plt.xticks([])\n plt.yticks([])\n plt.show()\n\n\ndef get_DB_Nutrition(str):\n db = pymysql.connect(host='localhost', user='yeha', password='', db=\n 'nutrition')\n cur = db.cursor()\n sql = (\n \"SELECT * FROM NUTRITION_INFO WHERE FOODNAME LIKE '음식명' OR FOODNAME LIKE %s\"\n )\n cur.execute(sql, str)\n data = cur.fetchall()\n df = pd.Series(data[0], data[1])\n print(df)\n db.close()\n\n\n<mask token>\nfor i in range(len(files)):\n files[i] = caltech_dir + '/' + files[i]\nfor f in files:\n img = Image.open(f)\n img = img.convert('RGB')\n img = img.resize((image_w, image_h))\n data = np.asarray(img)\n X.append(data)\n<mask token>\nnp.set_printoptions(formatter={'float': lambda x: '{0:0.3f}'.format(x)})\nprint('프로그램을 실행합니다..')\nprint('\\n')\n<mask token>\nfor i in prediction:\n pre_ans = i.argmax()\n pre_ans_str = ''\n if pre_ans == 0:\n pre_ans_str = '연어회'\n elif pre_ans == 1:\n pre_ans_str = '쌀국수'\n elif pre_ans == 2:\n pre_ans_str = '샌드위치'\n else:\n pre_ans_str = '새우튀김'\n if i[0] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[1] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[2] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[3] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n cnt += 1\ndrawing_plt()\n",
"step-3": "<mask token>\n\n\ndef drawing_plt():\n thisImg = os.listdir(caltech_dir)\n row = 4\n cols = int(math.ceil(len(thisImg) / 4))\n fig = plt.figure()\n i = 1\n for image in glob.glob('C:/cnnTest/*.jpg'):\n img = cv2.imread(image)\n subplot = fig.add_subplot(row, cols, i)\n subplot.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n subplot.set_title(thisImg[i - 1])\n subplot.axis('off')\n i += 1\n print('\\t', '전체 이미지 리스트 ')\n plt.show()\n\n\ndef get_Image(str):\n imgPath = 'C:/cnnTest/'\n image = cv2.imread(imgPath + str)\n image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n plt.imshow(image)\n plt.xticks([])\n plt.yticks([])\n plt.show()\n\n\ndef get_DB_Nutrition(str):\n db = pymysql.connect(host='localhost', user='yeha', password='', db=\n 'nutrition')\n cur = db.cursor()\n sql = (\n \"SELECT * FROM NUTRITION_INFO WHERE FOODNAME LIKE '음식명' OR FOODNAME LIKE %s\"\n )\n cur.execute(sql, str)\n data = cur.fetchall()\n df = pd.Series(data[0], data[1])\n print(df)\n db.close()\n\n\ncaltech_dir = 'C:/cnnTest'\nimage_w = 128\nimage_h = 128\npixels = image_h * image_w * 3\nX = []\nfiles = os.listdir(caltech_dir)\nfor i in range(len(files)):\n files[i] = caltech_dir + '/' + files[i]\nfor f in files:\n img = Image.open(f)\n img = img.convert('RGB')\n img = img.resize((image_w, image_h))\n data = np.asarray(img)\n X.append(data)\nX = np.array(X)\n<mask token>\nmodel = load_model('C:/image/train/model/multi_img_classification.model')\nprediction = model.predict(X)\nnp.set_printoptions(formatter={'float': lambda x: '{0:0.3f}'.format(x)})\nprint('프로그램을 실행합니다..')\nprint('\\n')\nthisImg = os.listdir(caltech_dir)\ncnt = 0\nfor i in prediction:\n pre_ans = i.argmax()\n pre_ans_str = ''\n if pre_ans == 0:\n pre_ans_str = '연어회'\n elif pre_ans == 1:\n pre_ans_str = '쌀국수'\n elif pre_ans == 2:\n pre_ans_str = '샌드위치'\n else:\n pre_ans_str = '새우튀김'\n if i[0] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[1] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[2] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[3] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n cnt += 1\ndrawing_plt()\n",
"step-4": "<mask token>\nfrom PIL import Image\nimport os, glob, numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport math\nimport cv2\nimport pymysql\nimport MySQLdb as mysql\n<mask token>\n\n\ndef drawing_plt():\n thisImg = os.listdir(caltech_dir)\n row = 4\n cols = int(math.ceil(len(thisImg) / 4))\n fig = plt.figure()\n i = 1\n for image in glob.glob('C:/cnnTest/*.jpg'):\n img = cv2.imread(image)\n subplot = fig.add_subplot(row, cols, i)\n subplot.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n subplot.set_title(thisImg[i - 1])\n subplot.axis('off')\n i += 1\n print('\\t', '전체 이미지 리스트 ')\n plt.show()\n\n\ndef get_Image(str):\n imgPath = 'C:/cnnTest/'\n image = cv2.imread(imgPath + str)\n image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n plt.imshow(image)\n plt.xticks([])\n plt.yticks([])\n plt.show()\n\n\ndef get_DB_Nutrition(str):\n db = pymysql.connect(host='localhost', user='yeha', password='', db=\n 'nutrition')\n cur = db.cursor()\n sql = (\n \"SELECT * FROM NUTRITION_INFO WHERE FOODNAME LIKE '음식명' OR FOODNAME LIKE %s\"\n )\n cur.execute(sql, str)\n data = cur.fetchall()\n df = pd.Series(data[0], data[1])\n print(df)\n db.close()\n\n\ncaltech_dir = 'C:/cnnTest'\nimage_w = 128\nimage_h = 128\npixels = image_h * image_w * 3\nX = []\nfiles = os.listdir(caltech_dir)\nfor i in range(len(files)):\n files[i] = caltech_dir + '/' + files[i]\nfor f in files:\n img = Image.open(f)\n img = img.convert('RGB')\n img = img.resize((image_w, image_h))\n data = np.asarray(img)\n X.append(data)\nX = np.array(X)\nfrom keras.models import load_model\nmodel = load_model('C:/image/train/model/multi_img_classification.model')\nprediction = model.predict(X)\nnp.set_printoptions(formatter={'float': lambda x: '{0:0.3f}'.format(x)})\nprint('프로그램을 실행합니다..')\nprint('\\n')\nthisImg = os.listdir(caltech_dir)\ncnt = 0\nfor i in prediction:\n pre_ans = i.argmax()\n pre_ans_str = ''\n if pre_ans == 0:\n pre_ans_str = '연어회'\n elif pre_ans == 1:\n pre_ans_str = '쌀국수'\n elif pre_ans == 2:\n pre_ans_str = '샌드위치'\n else:\n pre_ans_str = '새우튀김'\n if i[0] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[1] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[2] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n if i[3] >= 0.8:\n get_Image(thisImg[cnt])\n print(thisImg[cnt] + ' 이미지는 ' + pre_ans_str + '(으)로 추정됩니다.')\n get_DB_Nutrition(pre_ans_str)\n cnt += 1\ndrawing_plt()\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Mar 12 20:29:49 2019\n\n@author: kzx789\n\"\"\"\n\nfrom PIL import Image\nimport os, glob, numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport math\nimport cv2\nimport pymysql\nimport MySQLdb as mysql\n\n\"\"\"\n#csv를 읽어서 영양정보 출력\ndef get_Nutrition(str) :\n nutrition = pd.read_csv('C:/식품영양정보/영양정보.csv') \n print(nutrition[nutrition['음식명'] == str])\n\"\"\" \n#사용된 전체 이미지 출력\ndef drawing_plt():\n thisImg = os.listdir(caltech_dir)\n row = 4\n cols = int(math.ceil(len(thisImg)/4)) #반올림\n fig = plt.figure()\n i = 1\n \n for image in glob.glob(\"C:/cnnTest/*.jpg\"): #glob를 사용해서 Test로 사용된 파일 가져오기\n img = cv2.imread(image)\n subplot = fig.add_subplot(row, cols, i)\n subplot.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)) #기본컬러\n subplot.set_title(thisImg[i-1]) #타이틀 붙이기\n subplot.axis(\"off\") \n i += 1\n print('\\t',\"전체 이미지 리스트 \")\n plt.show()\n\n#조건에 맞는 개별 이미지 출력\ndef get_Image(str):\n imgPath = 'C:/cnnTest/'\n image = cv2.imread(imgPath+str)\n image = cv2.cvtColor(image,cv2.COLOR_BGR2RGB)\n plt.imshow(image)\n plt.xticks([])\n plt.yticks([])\n plt.show()\n\n#데이터베이스에서 영양소 정보 가지고 오기\ndef get_DB_Nutrition(str):\n db = pymysql.connect(host=\"localhost\", user = \"yeha\", password=\"\", db=\"nutrition\")\n cur = db.cursor() #Connection에서 Cursor생성\n sql = \"SELECT * FROM NUTRITION_INFO WHERE FOODNAME LIKE '음식명' OR FOODNAME LIKE %s\"\n cur.execute(sql,(str))\n data = cur.fetchall() #정보 전부 가져오기\n df = pd.Series(data[0],data[1])\n print(df)\n db.close()\n\n\ncaltech_dir = \"C:/cnnTest\"\n\n#테스트할 데이터들을 128*128로 지정\nimage_w = 128\nimage_h = 128\npixels = image_h * image_w * 3 #픽셀 지정\n\nX = []\n#filenames = []\n\nfiles = os.listdir(caltech_dir) #하위 디렉터리 파일 리스트 구하기\n\n#print(files) #이미지 목록 확인 \n\nfor i in range(len(files)):\n files[i]=caltech_dir+'/'+ files[i]\n#print(files) \n\nfor f in files:\n img = Image.open(f)\n img = img.convert(\"RGB\")\n img = img.resize((image_w, image_h))\n data = np.asarray(img)\n # filenames.append(f)\n X.append(data)\n\nX = np.array(X)\n#print(X)\n\n#모델 불러오기\nfrom keras.models import load_model\n\nmodel = load_model(\"C:/image/train/model/multi_img_classification.model\")\nprediction = model.predict(X)\n#print(prediction)\n\nnp.set_printoptions(formatter={'float': lambda x: \"{0:0.3f}\".format(x)})\n\n\nprint('프로그램을 실행합니다..')\nprint('\\n')\nthisImg = os.listdir(caltech_dir)\ncnt = 0\n\nfor i in prediction:\n pre_ans = i.argmax() # 예측 레이블//가장 큰 번째 수\n #print(i)\n #print(pre_ans)\n pre_ans_str = ''\n if pre_ans == 0: pre_ans_str = \"연어회\"\n elif pre_ans == 1: pre_ans_str = \"쌀국수\"\n elif pre_ans == 2: pre_ans_str = \"샌드위치\"\n else: pre_ans_str = \"새우튀김\"\n\n if i[0] >= 0.8 : \n get_Image(thisImg[cnt])\n print(thisImg[cnt]+\" 이미지는 \"+pre_ans_str+\"(으)로 추정됩니다.\")\n #get_Nutrition(pre_ans_str) \n get_DB_Nutrition(pre_ans_str)\n\n if i[1] >= 0.8: \n get_Image(thisImg[cnt])\n print(thisImg[cnt]+\" 이미지는 \"+pre_ans_str+\"(으)로 추정됩니다.\")\n #get_Nutrition(pre_ans_str) \n get_DB_Nutrition(pre_ans_str)\n\n\n if i[2] >= 0.8: \n get_Image(thisImg[cnt])\n print(thisImg[cnt]+\" 이미지는 \"+pre_ans_str+\"(으)로 추정됩니다.\")\n #get_Nutrition(pre_ans_str) \n get_DB_Nutrition(pre_ans_str)\n\n if i[3] >= 0.8: \n get_Image(thisImg[cnt])\n print(thisImg[cnt]+\" 이미지는 \"+pre_ans_str+\"(으)로 추정됩니다.\")\n #get_Nutrition(pre_ans_str) \n get_DB_Nutrition(pre_ans_str)\n cnt += 1\n \ndrawing_plt()\n\n ",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from app import create_app
from app.config import Config
app = create_app(Config)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=True)
|
normal
|
{
"blob_id": "bea90bbcd4d34b64c21f022b6f3af2bee2d978e4",
"index": 1123,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=5000, debug=True)\n",
"step-3": "<mask token>\napp = create_app(Config)\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=5000, debug=True)\n",
"step-4": "from app import create_app\nfrom app.config import Config\napp = create_app(Config)\nif __name__ == '__main__':\n app.run(host='0.0.0.0', port=5000, debug=True)\n",
"step-5": "from app import create_app\nfrom app.config import Config\n\n\napp = create_app(Config)\n\n\nif __name__ == \"__main__\":\n app.run(host=\"0.0.0.0\", port=5000, debug=True)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from more_itertools import ilen
from my.body import weight, shower, food, water
def test_body() ->None:
for func in (weight, shower, food, water):
assert ilen(func()) >= 1
|
normal
|
{
"blob_id": "e06b740f27e41b9f120c962fd76a38a29d54af3c",
"index": 973,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_body() ->None:\n for func in (weight, shower, food, water):\n assert ilen(func()) >= 1\n",
"step-3": "from more_itertools import ilen\nfrom my.body import weight, shower, food, water\n\n\ndef test_body() ->None:\n for func in (weight, shower, food, water):\n assert ilen(func()) >= 1\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import numpy as np
import random
with open("./roc.txt", "r") as fin:
with open("./roc_shuffle.txt", "w") as fout:
tmp = []
for k, line in enumerate(fin):
i = k + 1
if i % 6 == 0:
idx = [0] + np.random.permutation(range(1,5)).tolist()
for sen in np.take(tmp, idx).tolist():
fout.write(sen+"\n")
tmp = []
fout.write(line.strip()+"\n")
else:
tmp.append(line.strip())
with open("./roc.txt", "r") as fin:
with open("./roc_repeat.txt", "w") as fout:
tmp = []
for k, line in enumerate(fin):
i = k + 1
if i % 6 == 0:
idx = random.randint(1,4)
tmp[idx] = tmp[idx][:-1] + tmp[idx]
for sen in tmp:
fout.write(sen+"\n")
tmp = []
fout.write(line.strip()+"\n")
else:
tmp.append(line.strip())
with open("./roc.txt", "r") as fin:
with open("./roc_replace.txt", "w") as fout:
post, tmp = [], []
for k, line in enumerate(fin):
i = k + 1
if i % 6 == 0:
post.append(tmp)
tmp = []
else:
tmp.append(line.strip().split())
data = {"1":[], "2":[], "3":[], "4":[], "5":[]}
for p in post:
for i in range(5):
data["%d"%(i+1)].append(p[i])
random_data = data.copy()
for i in range(5):
random_data["%d"%(i+1)] = np.random.permutation(random_data["%d"%(i+1)])
for k in range(len(post)):
idx = np.random.permutation(range(1,5))[0]
for i in range(5):
if i == idx:
fout.write(' '.join(random_data["%d"%(i+1)][k])+"\n")
else:
fout.write(' '.join(data["%d"%(i+1)][k])+"\n")
fout.write("------\n")
|
normal
|
{
"blob_id": "2aec0581413d4fb0ffb4090231fde0fed974bf18",
"index": 27,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_shuffle.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = [0] + np.random.permutation(range(1, 5)).tolist()\n for sen in np.take(tmp, idx).tolist():\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_repeat.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = random.randint(1, 4)\n tmp[idx] = tmp[idx][:-1] + tmp[idx]\n for sen in tmp:\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_replace.txt', 'w') as fout:\n post, tmp = [], []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n post.append(tmp)\n tmp = []\n else:\n tmp.append(line.strip().split())\n data = {'1': [], '2': [], '3': [], '4': [], '5': []}\n for p in post:\n for i in range(5):\n data['%d' % (i + 1)].append(p[i])\n random_data = data.copy()\n for i in range(5):\n random_data['%d' % (i + 1)] = np.random.permutation(random_data\n ['%d' % (i + 1)])\n for k in range(len(post)):\n idx = np.random.permutation(range(1, 5))[0]\n for i in range(5):\n if i == idx:\n fout.write(' '.join(random_data['%d' % (i + 1)][k]) + '\\n')\n else:\n fout.write(' '.join(data['%d' % (i + 1)][k]) + '\\n')\n fout.write('------\\n')\n",
"step-3": "import numpy as np\nimport random\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_shuffle.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = [0] + np.random.permutation(range(1, 5)).tolist()\n for sen in np.take(tmp, idx).tolist():\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_repeat.txt', 'w') as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = random.randint(1, 4)\n tmp[idx] = tmp[idx][:-1] + tmp[idx]\n for sen in tmp:\n fout.write(sen + '\\n')\n tmp = []\n fout.write(line.strip() + '\\n')\n else:\n tmp.append(line.strip())\nwith open('./roc.txt', 'r') as fin:\n with open('./roc_replace.txt', 'w') as fout:\n post, tmp = [], []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n post.append(tmp)\n tmp = []\n else:\n tmp.append(line.strip().split())\n data = {'1': [], '2': [], '3': [], '4': [], '5': []}\n for p in post:\n for i in range(5):\n data['%d' % (i + 1)].append(p[i])\n random_data = data.copy()\n for i in range(5):\n random_data['%d' % (i + 1)] = np.random.permutation(random_data\n ['%d' % (i + 1)])\n for k in range(len(post)):\n idx = np.random.permutation(range(1, 5))[0]\n for i in range(5):\n if i == idx:\n fout.write(' '.join(random_data['%d' % (i + 1)][k]) + '\\n')\n else:\n fout.write(' '.join(data['%d' % (i + 1)][k]) + '\\n')\n fout.write('------\\n')\n",
"step-4": "import numpy as np\nimport random\n\nwith open(\"./roc.txt\", \"r\") as fin:\n with open(\"./roc_shuffle.txt\", \"w\") as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = [0] + np.random.permutation(range(1,5)).tolist()\n for sen in np.take(tmp, idx).tolist():\n fout.write(sen+\"\\n\")\n tmp = []\n fout.write(line.strip()+\"\\n\")\n else:\n tmp.append(line.strip())\nwith open(\"./roc.txt\", \"r\") as fin:\n with open(\"./roc_repeat.txt\", \"w\") as fout:\n tmp = []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n idx = random.randint(1,4)\n tmp[idx] = tmp[idx][:-1] + tmp[idx]\n for sen in tmp:\n fout.write(sen+\"\\n\")\n tmp = []\n fout.write(line.strip()+\"\\n\")\n else:\n tmp.append(line.strip())\nwith open(\"./roc.txt\", \"r\") as fin:\n with open(\"./roc_replace.txt\", \"w\") as fout:\n post, tmp = [], []\n for k, line in enumerate(fin):\n i = k + 1\n if i % 6 == 0:\n post.append(tmp)\n tmp = []\n else:\n tmp.append(line.strip().split())\n data = {\"1\":[], \"2\":[], \"3\":[], \"4\":[], \"5\":[]}\n for p in post:\n for i in range(5):\n data[\"%d\"%(i+1)].append(p[i])\n random_data = data.copy()\n for i in range(5):\n random_data[\"%d\"%(i+1)] = np.random.permutation(random_data[\"%d\"%(i+1)])\n\n for k in range(len(post)):\n idx = np.random.permutation(range(1,5))[0]\n for i in range(5):\n if i == idx:\n fout.write(' '.join(random_data[\"%d\"%(i+1)][k])+\"\\n\")\n else:\n fout.write(' '.join(data[\"%d\"%(i+1)][k])+\"\\n\")\n fout.write(\"------\\n\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
import asyncio
import bs4
import itertools
import logging
import sys
import os
import zipfile
from asyncio import TimeoutError
from aiohttp import ClientSession, ClientConnectionError
from aiohttp.client_exceptions import ContentTypeError, ServerDisconnectedError
from bs4 import BeautifulSoup
ROOT_URL = 'https://ulrichsweb.serialssolutions.com/titleDetails/{}'
DEFAULT_START_ID = 12515
DEFAULT_END_ID = 835018
DEFAULT_RANGE_1 = range(DEFAULT_START_ID, DEFAULT_END_ID)
DEFAULT_RANGE_2 = range(15793473, 15798807)
DEFAULT_RANGE_IDS = itertools.chain(DEFAULT_RANGE_1, DEFAULT_RANGE_2)
DEFAULT_DIR_HTML = 'data/ulrich/html/'
DEFAULT_MAX_ATTEMPTS = 5
DEFAULT_MODE = 'collect'
DEFAULT_NUM_THREADS = 4
DEFAULT_SEMAPHORE_LIMIT = 2
DEFAULT_ATTRS = {'bd_Title', 'bd_ISSN', 'bd_Format', 'bd_Frequency', 'bd_Country'}
def _find_all_tr_pairs(key: str, title_details, profile_id):
try:
return title_details.find('div', {'id': key}).find('table', {'class': 'resultsTable'}).find_all('tr')
except AttributeError:
logging.warning('ID %s (KEY) %s doest not have resultsTable' % (profile_id, key))
def _split_journal_attrs(attrs):
if attrs:
return [t.text.replace(':', '').strip().split('\n') for t in
[k for k in attrs if isinstance(k, bs4.element.Tag)]]
return []
def _get_title_history(history_attrs):
all_td = []
if history_attrs:
for h in history_attrs:
all_td.extend(h.find_all('td'))
if len(all_td) > 0:
return '#'.join([''.join([a.strip() for a in k.text.split('\n')]) for k in all_td if isinstance(k, bs4.element.Tag)])
return ''
def _get_pair_key_values(splitted_attrs, prefix: str):
tmp_dict = {}
for j in splitted_attrs:
tmp_dict[prefix + j[0].replace('\t', ' ')] = '#'.join(
[k.strip().replace('\t', ' ').replace('#', ' ') for k in j[1:] if k.strip() != ''])
return tmp_dict
def html2dict(path_zip_file: str):
"""
Open, reads and converts a zipped html into a dict.
:param path_zip_file: path of the zip file
:return: a dict where each key is the profile id and the value is its key-value pairs (attrs)
"""
profile_id = path_zip_file.split('/')[-1].split('.')[0]
inner_html_path = 'data/ulrich/html/' + profile_id + '.html'
html_content = zipfile.ZipFile(path_zip_file).open(inner_html_path).read()
parsed_data = [profile_id]
soupped_html = BeautifulSoup(html_content, 'html.parser')
title_details = soupped_html.find('div', {'id': 'resultPane'})
basic_description_attrs = _find_all_tr_pairs('basicDescriptionContainer', title_details, profile_id)
title_history_attrs = _find_all_tr_pairs('titleHistoryContainer', title_details, profile_id)
bd_splitted = _split_journal_attrs(basic_description_attrs)
dict_bd = _get_pair_key_values(bd_splitted, 'bd_')
title_history = _get_title_history(title_history_attrs)
for k in sorted(DEFAULT_ATTRS):
parsed_data.append(dict_bd.get(k, ''))
parsed_data.append(title_history)
return parsed_data
def save_tsv_file(parsed_data):
"""
Save a parsed journal to a tsv file
:param parsed_data: a list of dictionaries where the only main key is a profile_id and its value is the pairs of journal's attributes
"""
result_file.write('\t'.join(parsed_data) + '\n')
def save_into_html_file(path_html_file: str, response):
"""
Receives a response (in text format).
Saves the document into a html file.
"""
html_file = open(path_html_file, 'w')
html_file.writelines(response)
html_file.close()
with zipfile.ZipFile(path_html_file.replace('.html', '.zip'), 'w') as zf:
zf.write(path_html_file, compress_type=zipfile.ZIP_DEFLATED)
zf.close()
os.remove(path_html_file)
async def fetch(url, session):
"""
Fetches the url.
Calls the method save_into_html_file with the response as a parameter (in text format).
"""
try:
async with session.get(url) as response:
profile_id = url.split('/')[-1]
print('COLLECTING %s' % profile_id)
for attempt in range(DEFAULT_MAX_ATTEMPTS):
try:
if response.status == 200:
response = await response.text(errors='ignore')
save_into_html_file(DEFAULT_DIR_HTML + profile_id + '.html', response)
logging.info('COLLECTED: %s' % profile_id)
break
elif response.status == 500 and attempt == DEFAULT_MAX_ATTEMPTS:
logging.info('RESPONSE_ERROR_500: %s' % profile_id)
elif response.status == 404:
logging.info('RESPONSE_ERROR_404: %s' % profile_id)
except ServerDisconnectedError:
logging.info('SERVER_DISCONNECTED_ERROR: %s' % profile_id)
except TimeoutError:
logging.info('TIMEOUT_ERROR: %s' % profile_id)
except ContentTypeError:
logging.info('CONTENT_TYPE_ERROR: %s' % profile_id)
except TimeoutError:
logging.info('GENERALIZED_TIMEOUT_ERROR')
except ClientConnectionError:
logging.info('GENERALIZED_CLIENT_CONNECTION_ERROR')
except ServerDisconnectedError:
logging.info('GENERALIZED_SERVER_DISCONNECTED_ERROR')
except ContentTypeError:
logging.info('GENERALIZED_CONTENT_TYPE_ERROR')
async def bound_fetch(sem, url, session):
"""
Limits the collecting task to a semaphore.
"""
async with sem:
await fetch(url, session)
async def run():
"""
Creates tasks to get the html file with respect to a list composed by htmls.
"""
sem = asyncio.Semaphore(DEFAULT_SEMAPHORE_LIMIT)
tasks = []
async with ClientSession() as session:
for u in [ROOT_URL.format(jid) for jid in DEFAULT_RANGE_IDS]:
task = asyncio.ensure_future(bound_fetch(sem, u, session))
tasks.append(task)
responses = asyncio.gather(*tasks)
await responses
if __name__ == "__main__":
logging.basicConfig(filename='ulrich.log', level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
MODE = sys.argv[1]
DIR_HTML = sys.argv[2]
if MODE == 'collect':
DEFAULT_DIR_HTML = DIR_HTML
os.makedirs(DEFAULT_DIR_HTML, exist_ok=True)
if len(sys.argv) == 4:
start_id = int(sys.argv[3])
DEFAULT_RANGE_IDS = itertools.chain(range(start_id, DEFAULT_END_ID), DEFAULT_RANGE_2)
loop = asyncio.get_event_loop()
future = asyncio.ensure_future(run())
loop.run_until_complete(future)
elif MODE == 'parse':
DEFAULT_DIR_HTML = DIR_HTML
START = int(sys.argv[3])
END = int(sys.argv[4])
if END > len(os.listdir(DEFAULT_DIR_HTML)):
END = len(os.listdir(DEFAULT_DIR_HTML))
htmls = sorted([DEFAULT_DIR_HTML + h for h in os.listdir(DIR_HTML)])[START:END]
result_file = open(DEFAULT_DIR_HTML + '../' + str(START) + '.tsv', 'w')
result_file.write('\t'.join(['Profile Identifier'] + sorted(DEFAULT_ATTRS) + ['title_history']) + '\n')
for i, h in enumerate(sorted(htmls)):
print('\r%d / %d' % (i + 1 + START, START + len(htmls)), end='')
parsed = html2dict(h)
save_tsv_file(parsed)
result_file.close()
|
normal
|
{
"blob_id": "002f65fd77ce5043d1a0495ed13c15e3b4d2fb76",
"index": 7244,
"step-1": "<mask token>\n\n\ndef _split_journal_attrs(attrs):\n if attrs:\n return [t.text.replace(':', '').strip().split('\\n') for t in [k for\n k in attrs if isinstance(k, bs4.element.Tag)]]\n return []\n\n\ndef _get_title_history(history_attrs):\n all_td = []\n if history_attrs:\n for h in history_attrs:\n all_td.extend(h.find_all('td'))\n if len(all_td) > 0:\n return '#'.join([''.join([a.strip() for a in k.text.split('\\n')]) for\n k in all_td if isinstance(k, bs4.element.Tag)])\n return ''\n\n\ndef _get_pair_key_values(splitted_attrs, prefix: str):\n tmp_dict = {}\n for j in splitted_attrs:\n tmp_dict[prefix + j[0].replace('\\t', ' ')] = '#'.join([k.strip().\n replace('\\t', ' ').replace('#', ' ') for k in j[1:] if k.strip(\n ) != ''])\n return tmp_dict\n\n\ndef html2dict(path_zip_file: str):\n \"\"\"\n Open, reads and converts a zipped html into a dict.\n :param path_zip_file: path of the zip file\n :return: a dict where each key is the profile id and the value is its key-value pairs (attrs)\n \"\"\"\n profile_id = path_zip_file.split('/')[-1].split('.')[0]\n inner_html_path = 'data/ulrich/html/' + profile_id + '.html'\n html_content = zipfile.ZipFile(path_zip_file).open(inner_html_path).read()\n parsed_data = [profile_id]\n soupped_html = BeautifulSoup(html_content, 'html.parser')\n title_details = soupped_html.find('div', {'id': 'resultPane'})\n basic_description_attrs = _find_all_tr_pairs('basicDescriptionContainer',\n title_details, profile_id)\n title_history_attrs = _find_all_tr_pairs('titleHistoryContainer',\n title_details, profile_id)\n bd_splitted = _split_journal_attrs(basic_description_attrs)\n dict_bd = _get_pair_key_values(bd_splitted, 'bd_')\n title_history = _get_title_history(title_history_attrs)\n for k in sorted(DEFAULT_ATTRS):\n parsed_data.append(dict_bd.get(k, ''))\n parsed_data.append(title_history)\n return parsed_data\n\n\ndef save_tsv_file(parsed_data):\n \"\"\"\n Save a parsed journal to a tsv file\n :param parsed_data: a list of dictionaries where the only main key is a profile_id and its value is the pairs of journal's attributes\n \"\"\"\n result_file.write('\\t'.join(parsed_data) + '\\n')\n\n\ndef save_into_html_file(path_html_file: str, response):\n \"\"\"\n Receives a response (in text format).\n Saves the document into a html file.\n \"\"\"\n html_file = open(path_html_file, 'w')\n html_file.writelines(response)\n html_file.close()\n with zipfile.ZipFile(path_html_file.replace('.html', '.zip'), 'w') as zf:\n zf.write(path_html_file, compress_type=zipfile.ZIP_DEFLATED)\n zf.close()\n os.remove(path_html_file)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _find_all_tr_pairs(key: str, title_details, profile_id):\n try:\n return title_details.find('div', {'id': key}).find('table', {\n 'class': 'resultsTable'}).find_all('tr')\n except AttributeError:\n logging.warning('ID %s (KEY) %s doest not have resultsTable' % (\n profile_id, key))\n\n\ndef _split_journal_attrs(attrs):\n if attrs:\n return [t.text.replace(':', '').strip().split('\\n') for t in [k for\n k in attrs if isinstance(k, bs4.element.Tag)]]\n return []\n\n\ndef _get_title_history(history_attrs):\n all_td = []\n if history_attrs:\n for h in history_attrs:\n all_td.extend(h.find_all('td'))\n if len(all_td) > 0:\n return '#'.join([''.join([a.strip() for a in k.text.split('\\n')]) for\n k in all_td if isinstance(k, bs4.element.Tag)])\n return ''\n\n\ndef _get_pair_key_values(splitted_attrs, prefix: str):\n tmp_dict = {}\n for j in splitted_attrs:\n tmp_dict[prefix + j[0].replace('\\t', ' ')] = '#'.join([k.strip().\n replace('\\t', ' ').replace('#', ' ') for k in j[1:] if k.strip(\n ) != ''])\n return tmp_dict\n\n\ndef html2dict(path_zip_file: str):\n \"\"\"\n Open, reads and converts a zipped html into a dict.\n :param path_zip_file: path of the zip file\n :return: a dict where each key is the profile id and the value is its key-value pairs (attrs)\n \"\"\"\n profile_id = path_zip_file.split('/')[-1].split('.')[0]\n inner_html_path = 'data/ulrich/html/' + profile_id + '.html'\n html_content = zipfile.ZipFile(path_zip_file).open(inner_html_path).read()\n parsed_data = [profile_id]\n soupped_html = BeautifulSoup(html_content, 'html.parser')\n title_details = soupped_html.find('div', {'id': 'resultPane'})\n basic_description_attrs = _find_all_tr_pairs('basicDescriptionContainer',\n title_details, profile_id)\n title_history_attrs = _find_all_tr_pairs('titleHistoryContainer',\n title_details, profile_id)\n bd_splitted = _split_journal_attrs(basic_description_attrs)\n dict_bd = _get_pair_key_values(bd_splitted, 'bd_')\n title_history = _get_title_history(title_history_attrs)\n for k in sorted(DEFAULT_ATTRS):\n parsed_data.append(dict_bd.get(k, ''))\n parsed_data.append(title_history)\n return parsed_data\n\n\ndef save_tsv_file(parsed_data):\n \"\"\"\n Save a parsed journal to a tsv file\n :param parsed_data: a list of dictionaries where the only main key is a profile_id and its value is the pairs of journal's attributes\n \"\"\"\n result_file.write('\\t'.join(parsed_data) + '\\n')\n\n\ndef save_into_html_file(path_html_file: str, response):\n \"\"\"\n Receives a response (in text format).\n Saves the document into a html file.\n \"\"\"\n html_file = open(path_html_file, 'w')\n html_file.writelines(response)\n html_file.close()\n with zipfile.ZipFile(path_html_file.replace('.html', '.zip'), 'w') as zf:\n zf.write(path_html_file, compress_type=zipfile.ZIP_DEFLATED)\n zf.close()\n os.remove(path_html_file)\n\n\nasync def fetch(url, session):\n \"\"\"\n Fetches the url.\n Calls the method save_into_html_file with the response as a parameter (in text format).\n \"\"\"\n try:\n async with session.get(url) as response:\n profile_id = url.split('/')[-1]\n print('COLLECTING %s' % profile_id)\n for attempt in range(DEFAULT_MAX_ATTEMPTS):\n try:\n if response.status == 200:\n response = await response.text(errors='ignore')\n save_into_html_file(DEFAULT_DIR_HTML + profile_id +\n '.html', response)\n logging.info('COLLECTED: %s' % profile_id)\n break\n elif response.status == 500 and attempt == DEFAULT_MAX_ATTEMPTS:\n logging.info('RESPONSE_ERROR_500: %s' % profile_id)\n elif response.status == 404:\n logging.info('RESPONSE_ERROR_404: %s' % profile_id)\n except ServerDisconnectedError:\n logging.info('SERVER_DISCONNECTED_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('TIMEOUT_ERROR: %s' % profile_id)\n except ContentTypeError:\n logging.info('CONTENT_TYPE_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('GENERALIZED_TIMEOUT_ERROR')\n except ClientConnectionError:\n logging.info('GENERALIZED_CLIENT_CONNECTION_ERROR')\n except ServerDisconnectedError:\n logging.info('GENERALIZED_SERVER_DISCONNECTED_ERROR')\n except ContentTypeError:\n logging.info('GENERALIZED_CONTENT_TYPE_ERROR')\n\n\nasync def bound_fetch(sem, url, session):\n \"\"\"\n Limits the collecting task to a semaphore.\n \"\"\"\n async with sem:\n await fetch(url, session)\n\n\nasync def run():\n \"\"\"\n Creates tasks to get the html file with respect to a list composed by htmls.\n \"\"\"\n sem = asyncio.Semaphore(DEFAULT_SEMAPHORE_LIMIT)\n tasks = []\n async with ClientSession() as session:\n for u in [ROOT_URL.format(jid) for jid in DEFAULT_RANGE_IDS]:\n task = asyncio.ensure_future(bound_fetch(sem, u, session))\n tasks.append(task)\n responses = asyncio.gather(*tasks)\n await responses\n\n\nif __name__ == '__main__':\n logging.basicConfig(filename='ulrich.log', level=logging.INFO, format=\n '%(asctime)s - %(levelname)s - %(message)s')\n MODE = sys.argv[1]\n DIR_HTML = sys.argv[2]\n if MODE == 'collect':\n DEFAULT_DIR_HTML = DIR_HTML\n os.makedirs(DEFAULT_DIR_HTML, exist_ok=True)\n if len(sys.argv) == 4:\n start_id = int(sys.argv[3])\n DEFAULT_RANGE_IDS = itertools.chain(range(start_id,\n DEFAULT_END_ID), DEFAULT_RANGE_2)\n loop = asyncio.get_event_loop()\n future = asyncio.ensure_future(run())\n loop.run_until_complete(future)\n elif MODE == 'parse':\n DEFAULT_DIR_HTML = DIR_HTML\n START = int(sys.argv[3])\n END = int(sys.argv[4])\n if END > len(os.listdir(DEFAULT_DIR_HTML)):\n END = len(os.listdir(DEFAULT_DIR_HTML))\n htmls = sorted([(DEFAULT_DIR_HTML + h) for h in os.listdir(DIR_HTML)])[\n START:END]\n result_file = open(DEFAULT_DIR_HTML + '../' + str(START) + '.tsv', 'w')\n result_file.write('\\t'.join(['Profile Identifier'] + sorted(\n DEFAULT_ATTRS) + ['title_history']) + '\\n')\n for i, h in enumerate(sorted(htmls)):\n print('\\r%d / %d' % (i + 1 + START, START + len(htmls)), end='')\n parsed = html2dict(h)\n save_tsv_file(parsed)\n result_file.close()\n",
"step-3": "<mask token>\nROOT_URL = 'https://ulrichsweb.serialssolutions.com/titleDetails/{}'\nDEFAULT_START_ID = 12515\nDEFAULT_END_ID = 835018\nDEFAULT_RANGE_1 = range(DEFAULT_START_ID, DEFAULT_END_ID)\nDEFAULT_RANGE_2 = range(15793473, 15798807)\nDEFAULT_RANGE_IDS = itertools.chain(DEFAULT_RANGE_1, DEFAULT_RANGE_2)\nDEFAULT_DIR_HTML = 'data/ulrich/html/'\nDEFAULT_MAX_ATTEMPTS = 5\nDEFAULT_MODE = 'collect'\nDEFAULT_NUM_THREADS = 4\nDEFAULT_SEMAPHORE_LIMIT = 2\nDEFAULT_ATTRS = {'bd_Title', 'bd_ISSN', 'bd_Format', 'bd_Frequency',\n 'bd_Country'}\n\n\ndef _find_all_tr_pairs(key: str, title_details, profile_id):\n try:\n return title_details.find('div', {'id': key}).find('table', {\n 'class': 'resultsTable'}).find_all('tr')\n except AttributeError:\n logging.warning('ID %s (KEY) %s doest not have resultsTable' % (\n profile_id, key))\n\n\ndef _split_journal_attrs(attrs):\n if attrs:\n return [t.text.replace(':', '').strip().split('\\n') for t in [k for\n k in attrs if isinstance(k, bs4.element.Tag)]]\n return []\n\n\ndef _get_title_history(history_attrs):\n all_td = []\n if history_attrs:\n for h in history_attrs:\n all_td.extend(h.find_all('td'))\n if len(all_td) > 0:\n return '#'.join([''.join([a.strip() for a in k.text.split('\\n')]) for\n k in all_td if isinstance(k, bs4.element.Tag)])\n return ''\n\n\ndef _get_pair_key_values(splitted_attrs, prefix: str):\n tmp_dict = {}\n for j in splitted_attrs:\n tmp_dict[prefix + j[0].replace('\\t', ' ')] = '#'.join([k.strip().\n replace('\\t', ' ').replace('#', ' ') for k in j[1:] if k.strip(\n ) != ''])\n return tmp_dict\n\n\ndef html2dict(path_zip_file: str):\n \"\"\"\n Open, reads and converts a zipped html into a dict.\n :param path_zip_file: path of the zip file\n :return: a dict where each key is the profile id and the value is its key-value pairs (attrs)\n \"\"\"\n profile_id = path_zip_file.split('/')[-1].split('.')[0]\n inner_html_path = 'data/ulrich/html/' + profile_id + '.html'\n html_content = zipfile.ZipFile(path_zip_file).open(inner_html_path).read()\n parsed_data = [profile_id]\n soupped_html = BeautifulSoup(html_content, 'html.parser')\n title_details = soupped_html.find('div', {'id': 'resultPane'})\n basic_description_attrs = _find_all_tr_pairs('basicDescriptionContainer',\n title_details, profile_id)\n title_history_attrs = _find_all_tr_pairs('titleHistoryContainer',\n title_details, profile_id)\n bd_splitted = _split_journal_attrs(basic_description_attrs)\n dict_bd = _get_pair_key_values(bd_splitted, 'bd_')\n title_history = _get_title_history(title_history_attrs)\n for k in sorted(DEFAULT_ATTRS):\n parsed_data.append(dict_bd.get(k, ''))\n parsed_data.append(title_history)\n return parsed_data\n\n\ndef save_tsv_file(parsed_data):\n \"\"\"\n Save a parsed journal to a tsv file\n :param parsed_data: a list of dictionaries where the only main key is a profile_id and its value is the pairs of journal's attributes\n \"\"\"\n result_file.write('\\t'.join(parsed_data) + '\\n')\n\n\ndef save_into_html_file(path_html_file: str, response):\n \"\"\"\n Receives a response (in text format).\n Saves the document into a html file.\n \"\"\"\n html_file = open(path_html_file, 'w')\n html_file.writelines(response)\n html_file.close()\n with zipfile.ZipFile(path_html_file.replace('.html', '.zip'), 'w') as zf:\n zf.write(path_html_file, compress_type=zipfile.ZIP_DEFLATED)\n zf.close()\n os.remove(path_html_file)\n\n\nasync def fetch(url, session):\n \"\"\"\n Fetches the url.\n Calls the method save_into_html_file with the response as a parameter (in text format).\n \"\"\"\n try:\n async with session.get(url) as response:\n profile_id = url.split('/')[-1]\n print('COLLECTING %s' % profile_id)\n for attempt in range(DEFAULT_MAX_ATTEMPTS):\n try:\n if response.status == 200:\n response = await response.text(errors='ignore')\n save_into_html_file(DEFAULT_DIR_HTML + profile_id +\n '.html', response)\n logging.info('COLLECTED: %s' % profile_id)\n break\n elif response.status == 500 and attempt == DEFAULT_MAX_ATTEMPTS:\n logging.info('RESPONSE_ERROR_500: %s' % profile_id)\n elif response.status == 404:\n logging.info('RESPONSE_ERROR_404: %s' % profile_id)\n except ServerDisconnectedError:\n logging.info('SERVER_DISCONNECTED_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('TIMEOUT_ERROR: %s' % profile_id)\n except ContentTypeError:\n logging.info('CONTENT_TYPE_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('GENERALIZED_TIMEOUT_ERROR')\n except ClientConnectionError:\n logging.info('GENERALIZED_CLIENT_CONNECTION_ERROR')\n except ServerDisconnectedError:\n logging.info('GENERALIZED_SERVER_DISCONNECTED_ERROR')\n except ContentTypeError:\n logging.info('GENERALIZED_CONTENT_TYPE_ERROR')\n\n\nasync def bound_fetch(sem, url, session):\n \"\"\"\n Limits the collecting task to a semaphore.\n \"\"\"\n async with sem:\n await fetch(url, session)\n\n\nasync def run():\n \"\"\"\n Creates tasks to get the html file with respect to a list composed by htmls.\n \"\"\"\n sem = asyncio.Semaphore(DEFAULT_SEMAPHORE_LIMIT)\n tasks = []\n async with ClientSession() as session:\n for u in [ROOT_URL.format(jid) for jid in DEFAULT_RANGE_IDS]:\n task = asyncio.ensure_future(bound_fetch(sem, u, session))\n tasks.append(task)\n responses = asyncio.gather(*tasks)\n await responses\n\n\nif __name__ == '__main__':\n logging.basicConfig(filename='ulrich.log', level=logging.INFO, format=\n '%(asctime)s - %(levelname)s - %(message)s')\n MODE = sys.argv[1]\n DIR_HTML = sys.argv[2]\n if MODE == 'collect':\n DEFAULT_DIR_HTML = DIR_HTML\n os.makedirs(DEFAULT_DIR_HTML, exist_ok=True)\n if len(sys.argv) == 4:\n start_id = int(sys.argv[3])\n DEFAULT_RANGE_IDS = itertools.chain(range(start_id,\n DEFAULT_END_ID), DEFAULT_RANGE_2)\n loop = asyncio.get_event_loop()\n future = asyncio.ensure_future(run())\n loop.run_until_complete(future)\n elif MODE == 'parse':\n DEFAULT_DIR_HTML = DIR_HTML\n START = int(sys.argv[3])\n END = int(sys.argv[4])\n if END > len(os.listdir(DEFAULT_DIR_HTML)):\n END = len(os.listdir(DEFAULT_DIR_HTML))\n htmls = sorted([(DEFAULT_DIR_HTML + h) for h in os.listdir(DIR_HTML)])[\n START:END]\n result_file = open(DEFAULT_DIR_HTML + '../' + str(START) + '.tsv', 'w')\n result_file.write('\\t'.join(['Profile Identifier'] + sorted(\n DEFAULT_ATTRS) + ['title_history']) + '\\n')\n for i, h in enumerate(sorted(htmls)):\n print('\\r%d / %d' % (i + 1 + START, START + len(htmls)), end='')\n parsed = html2dict(h)\n save_tsv_file(parsed)\n result_file.close()\n",
"step-4": "import asyncio\nimport bs4\nimport itertools\nimport logging\nimport sys\nimport os\nimport zipfile\nfrom asyncio import TimeoutError\nfrom aiohttp import ClientSession, ClientConnectionError\nfrom aiohttp.client_exceptions import ContentTypeError, ServerDisconnectedError\nfrom bs4 import BeautifulSoup\nROOT_URL = 'https://ulrichsweb.serialssolutions.com/titleDetails/{}'\nDEFAULT_START_ID = 12515\nDEFAULT_END_ID = 835018\nDEFAULT_RANGE_1 = range(DEFAULT_START_ID, DEFAULT_END_ID)\nDEFAULT_RANGE_2 = range(15793473, 15798807)\nDEFAULT_RANGE_IDS = itertools.chain(DEFAULT_RANGE_1, DEFAULT_RANGE_2)\nDEFAULT_DIR_HTML = 'data/ulrich/html/'\nDEFAULT_MAX_ATTEMPTS = 5\nDEFAULT_MODE = 'collect'\nDEFAULT_NUM_THREADS = 4\nDEFAULT_SEMAPHORE_LIMIT = 2\nDEFAULT_ATTRS = {'bd_Title', 'bd_ISSN', 'bd_Format', 'bd_Frequency',\n 'bd_Country'}\n\n\ndef _find_all_tr_pairs(key: str, title_details, profile_id):\n try:\n return title_details.find('div', {'id': key}).find('table', {\n 'class': 'resultsTable'}).find_all('tr')\n except AttributeError:\n logging.warning('ID %s (KEY) %s doest not have resultsTable' % (\n profile_id, key))\n\n\ndef _split_journal_attrs(attrs):\n if attrs:\n return [t.text.replace(':', '').strip().split('\\n') for t in [k for\n k in attrs if isinstance(k, bs4.element.Tag)]]\n return []\n\n\ndef _get_title_history(history_attrs):\n all_td = []\n if history_attrs:\n for h in history_attrs:\n all_td.extend(h.find_all('td'))\n if len(all_td) > 0:\n return '#'.join([''.join([a.strip() for a in k.text.split('\\n')]) for\n k in all_td if isinstance(k, bs4.element.Tag)])\n return ''\n\n\ndef _get_pair_key_values(splitted_attrs, prefix: str):\n tmp_dict = {}\n for j in splitted_attrs:\n tmp_dict[prefix + j[0].replace('\\t', ' ')] = '#'.join([k.strip().\n replace('\\t', ' ').replace('#', ' ') for k in j[1:] if k.strip(\n ) != ''])\n return tmp_dict\n\n\ndef html2dict(path_zip_file: str):\n \"\"\"\n Open, reads and converts a zipped html into a dict.\n :param path_zip_file: path of the zip file\n :return: a dict where each key is the profile id and the value is its key-value pairs (attrs)\n \"\"\"\n profile_id = path_zip_file.split('/')[-1].split('.')[0]\n inner_html_path = 'data/ulrich/html/' + profile_id + '.html'\n html_content = zipfile.ZipFile(path_zip_file).open(inner_html_path).read()\n parsed_data = [profile_id]\n soupped_html = BeautifulSoup(html_content, 'html.parser')\n title_details = soupped_html.find('div', {'id': 'resultPane'})\n basic_description_attrs = _find_all_tr_pairs('basicDescriptionContainer',\n title_details, profile_id)\n title_history_attrs = _find_all_tr_pairs('titleHistoryContainer',\n title_details, profile_id)\n bd_splitted = _split_journal_attrs(basic_description_attrs)\n dict_bd = _get_pair_key_values(bd_splitted, 'bd_')\n title_history = _get_title_history(title_history_attrs)\n for k in sorted(DEFAULT_ATTRS):\n parsed_data.append(dict_bd.get(k, ''))\n parsed_data.append(title_history)\n return parsed_data\n\n\ndef save_tsv_file(parsed_data):\n \"\"\"\n Save a parsed journal to a tsv file\n :param parsed_data: a list of dictionaries where the only main key is a profile_id and its value is the pairs of journal's attributes\n \"\"\"\n result_file.write('\\t'.join(parsed_data) + '\\n')\n\n\ndef save_into_html_file(path_html_file: str, response):\n \"\"\"\n Receives a response (in text format).\n Saves the document into a html file.\n \"\"\"\n html_file = open(path_html_file, 'w')\n html_file.writelines(response)\n html_file.close()\n with zipfile.ZipFile(path_html_file.replace('.html', '.zip'), 'w') as zf:\n zf.write(path_html_file, compress_type=zipfile.ZIP_DEFLATED)\n zf.close()\n os.remove(path_html_file)\n\n\nasync def fetch(url, session):\n \"\"\"\n Fetches the url.\n Calls the method save_into_html_file with the response as a parameter (in text format).\n \"\"\"\n try:\n async with session.get(url) as response:\n profile_id = url.split('/')[-1]\n print('COLLECTING %s' % profile_id)\n for attempt in range(DEFAULT_MAX_ATTEMPTS):\n try:\n if response.status == 200:\n response = await response.text(errors='ignore')\n save_into_html_file(DEFAULT_DIR_HTML + profile_id +\n '.html', response)\n logging.info('COLLECTED: %s' % profile_id)\n break\n elif response.status == 500 and attempt == DEFAULT_MAX_ATTEMPTS:\n logging.info('RESPONSE_ERROR_500: %s' % profile_id)\n elif response.status == 404:\n logging.info('RESPONSE_ERROR_404: %s' % profile_id)\n except ServerDisconnectedError:\n logging.info('SERVER_DISCONNECTED_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('TIMEOUT_ERROR: %s' % profile_id)\n except ContentTypeError:\n logging.info('CONTENT_TYPE_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('GENERALIZED_TIMEOUT_ERROR')\n except ClientConnectionError:\n logging.info('GENERALIZED_CLIENT_CONNECTION_ERROR')\n except ServerDisconnectedError:\n logging.info('GENERALIZED_SERVER_DISCONNECTED_ERROR')\n except ContentTypeError:\n logging.info('GENERALIZED_CONTENT_TYPE_ERROR')\n\n\nasync def bound_fetch(sem, url, session):\n \"\"\"\n Limits the collecting task to a semaphore.\n \"\"\"\n async with sem:\n await fetch(url, session)\n\n\nasync def run():\n \"\"\"\n Creates tasks to get the html file with respect to a list composed by htmls.\n \"\"\"\n sem = asyncio.Semaphore(DEFAULT_SEMAPHORE_LIMIT)\n tasks = []\n async with ClientSession() as session:\n for u in [ROOT_URL.format(jid) for jid in DEFAULT_RANGE_IDS]:\n task = asyncio.ensure_future(bound_fetch(sem, u, session))\n tasks.append(task)\n responses = asyncio.gather(*tasks)\n await responses\n\n\nif __name__ == '__main__':\n logging.basicConfig(filename='ulrich.log', level=logging.INFO, format=\n '%(asctime)s - %(levelname)s - %(message)s')\n MODE = sys.argv[1]\n DIR_HTML = sys.argv[2]\n if MODE == 'collect':\n DEFAULT_DIR_HTML = DIR_HTML\n os.makedirs(DEFAULT_DIR_HTML, exist_ok=True)\n if len(sys.argv) == 4:\n start_id = int(sys.argv[3])\n DEFAULT_RANGE_IDS = itertools.chain(range(start_id,\n DEFAULT_END_ID), DEFAULT_RANGE_2)\n loop = asyncio.get_event_loop()\n future = asyncio.ensure_future(run())\n loop.run_until_complete(future)\n elif MODE == 'parse':\n DEFAULT_DIR_HTML = DIR_HTML\n START = int(sys.argv[3])\n END = int(sys.argv[4])\n if END > len(os.listdir(DEFAULT_DIR_HTML)):\n END = len(os.listdir(DEFAULT_DIR_HTML))\n htmls = sorted([(DEFAULT_DIR_HTML + h) for h in os.listdir(DIR_HTML)])[\n START:END]\n result_file = open(DEFAULT_DIR_HTML + '../' + str(START) + '.tsv', 'w')\n result_file.write('\\t'.join(['Profile Identifier'] + sorted(\n DEFAULT_ATTRS) + ['title_history']) + '\\n')\n for i, h in enumerate(sorted(htmls)):\n print('\\r%d / %d' % (i + 1 + START, START + len(htmls)), end='')\n parsed = html2dict(h)\n save_tsv_file(parsed)\n result_file.close()\n",
"step-5": "#!/usr/bin/env python3\nimport asyncio\n\nimport bs4\nimport itertools\nimport logging\nimport sys\nimport os\nimport zipfile\n\nfrom asyncio import TimeoutError\nfrom aiohttp import ClientSession, ClientConnectionError\nfrom aiohttp.client_exceptions import ContentTypeError, ServerDisconnectedError\nfrom bs4 import BeautifulSoup\n\nROOT_URL = 'https://ulrichsweb.serialssolutions.com/titleDetails/{}'\n\nDEFAULT_START_ID = 12515\nDEFAULT_END_ID = 835018\nDEFAULT_RANGE_1 = range(DEFAULT_START_ID, DEFAULT_END_ID)\nDEFAULT_RANGE_2 = range(15793473, 15798807)\nDEFAULT_RANGE_IDS = itertools.chain(DEFAULT_RANGE_1, DEFAULT_RANGE_2)\n\nDEFAULT_DIR_HTML = 'data/ulrich/html/'\n\nDEFAULT_MAX_ATTEMPTS = 5\nDEFAULT_MODE = 'collect'\nDEFAULT_NUM_THREADS = 4\nDEFAULT_SEMAPHORE_LIMIT = 2\n\nDEFAULT_ATTRS = {'bd_Title', 'bd_ISSN', 'bd_Format', 'bd_Frequency', 'bd_Country'}\n\n\ndef _find_all_tr_pairs(key: str, title_details, profile_id):\n try:\n return title_details.find('div', {'id': key}).find('table', {'class': 'resultsTable'}).find_all('tr')\n except AttributeError:\n logging.warning('ID %s (KEY) %s doest not have resultsTable' % (profile_id, key))\n\n\ndef _split_journal_attrs(attrs):\n if attrs:\n return [t.text.replace(':', '').strip().split('\\n') for t in\n [k for k in attrs if isinstance(k, bs4.element.Tag)]]\n return []\n\n\ndef _get_title_history(history_attrs):\n all_td = []\n if history_attrs:\n for h in history_attrs:\n all_td.extend(h.find_all('td'))\n if len(all_td) > 0:\n return '#'.join([''.join([a.strip() for a in k.text.split('\\n')]) for k in all_td if isinstance(k, bs4.element.Tag)])\n return ''\n\n\ndef _get_pair_key_values(splitted_attrs, prefix: str):\n tmp_dict = {}\n for j in splitted_attrs:\n tmp_dict[prefix + j[0].replace('\\t', ' ')] = '#'.join(\n [k.strip().replace('\\t', ' ').replace('#', ' ') for k in j[1:] if k.strip() != ''])\n return tmp_dict\n\n\ndef html2dict(path_zip_file: str):\n \"\"\"\n Open, reads and converts a zipped html into a dict.\n :param path_zip_file: path of the zip file\n :return: a dict where each key is the profile id and the value is its key-value pairs (attrs)\n \"\"\"\n profile_id = path_zip_file.split('/')[-1].split('.')[0]\n inner_html_path = 'data/ulrich/html/' + profile_id + '.html'\n html_content = zipfile.ZipFile(path_zip_file).open(inner_html_path).read()\n\n parsed_data = [profile_id]\n\n soupped_html = BeautifulSoup(html_content, 'html.parser')\n\n title_details = soupped_html.find('div', {'id': 'resultPane'})\n basic_description_attrs = _find_all_tr_pairs('basicDescriptionContainer', title_details, profile_id)\n title_history_attrs = _find_all_tr_pairs('titleHistoryContainer', title_details, profile_id)\n bd_splitted = _split_journal_attrs(basic_description_attrs)\n dict_bd = _get_pair_key_values(bd_splitted, 'bd_')\n title_history = _get_title_history(title_history_attrs)\n\n for k in sorted(DEFAULT_ATTRS):\n parsed_data.append(dict_bd.get(k, ''))\n\n parsed_data.append(title_history)\n\n return parsed_data\n\n\ndef save_tsv_file(parsed_data):\n \"\"\"\n Save a parsed journal to a tsv file\n :param parsed_data: a list of dictionaries where the only main key is a profile_id and its value is the pairs of journal's attributes\n \"\"\"\n result_file.write('\\t'.join(parsed_data) + '\\n')\n\n\ndef save_into_html_file(path_html_file: str, response):\n \"\"\"\n Receives a response (in text format).\n Saves the document into a html file.\n \"\"\"\n html_file = open(path_html_file, 'w')\n html_file.writelines(response)\n html_file.close()\n\n with zipfile.ZipFile(path_html_file.replace('.html', '.zip'), 'w') as zf:\n zf.write(path_html_file, compress_type=zipfile.ZIP_DEFLATED)\n zf.close()\n os.remove(path_html_file)\n\n\nasync def fetch(url, session):\n \"\"\"\n Fetches the url.\n Calls the method save_into_html_file with the response as a parameter (in text format).\n \"\"\"\n try:\n async with session.get(url) as response:\n profile_id = url.split('/')[-1]\n print('COLLECTING %s' % profile_id)\n for attempt in range(DEFAULT_MAX_ATTEMPTS):\n try:\n if response.status == 200:\n response = await response.text(errors='ignore')\n save_into_html_file(DEFAULT_DIR_HTML + profile_id + '.html', response)\n logging.info('COLLECTED: %s' % profile_id)\n break\n elif response.status == 500 and attempt == DEFAULT_MAX_ATTEMPTS:\n logging.info('RESPONSE_ERROR_500: %s' % profile_id)\n elif response.status == 404:\n logging.info('RESPONSE_ERROR_404: %s' % profile_id)\n except ServerDisconnectedError:\n logging.info('SERVER_DISCONNECTED_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('TIMEOUT_ERROR: %s' % profile_id)\n except ContentTypeError:\n logging.info('CONTENT_TYPE_ERROR: %s' % profile_id)\n except TimeoutError:\n logging.info('GENERALIZED_TIMEOUT_ERROR')\n except ClientConnectionError:\n logging.info('GENERALIZED_CLIENT_CONNECTION_ERROR')\n except ServerDisconnectedError:\n logging.info('GENERALIZED_SERVER_DISCONNECTED_ERROR')\n except ContentTypeError:\n logging.info('GENERALIZED_CONTENT_TYPE_ERROR')\n\n\nasync def bound_fetch(sem, url, session):\n \"\"\"\n Limits the collecting task to a semaphore.\n \"\"\"\n async with sem:\n await fetch(url, session)\n\n\nasync def run():\n \"\"\"\n Creates tasks to get the html file with respect to a list composed by htmls.\n \"\"\"\n sem = asyncio.Semaphore(DEFAULT_SEMAPHORE_LIMIT)\n tasks = []\n\n async with ClientSession() as session:\n for u in [ROOT_URL.format(jid) for jid in DEFAULT_RANGE_IDS]:\n task = asyncio.ensure_future(bound_fetch(sem, u, session))\n tasks.append(task)\n responses = asyncio.gather(*tasks)\n await responses\n\n\nif __name__ == \"__main__\":\n logging.basicConfig(filename='ulrich.log', level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')\n\n MODE = sys.argv[1]\n DIR_HTML = sys.argv[2]\n\n if MODE == 'collect':\n DEFAULT_DIR_HTML = DIR_HTML\n os.makedirs(DEFAULT_DIR_HTML, exist_ok=True)\n\n if len(sys.argv) == 4:\n start_id = int(sys.argv[3])\n DEFAULT_RANGE_IDS = itertools.chain(range(start_id, DEFAULT_END_ID), DEFAULT_RANGE_2)\n\n loop = asyncio.get_event_loop()\n future = asyncio.ensure_future(run())\n loop.run_until_complete(future)\n elif MODE == 'parse':\n DEFAULT_DIR_HTML = DIR_HTML\n\n START = int(sys.argv[3])\n END = int(sys.argv[4])\n\n if END > len(os.listdir(DEFAULT_DIR_HTML)):\n END = len(os.listdir(DEFAULT_DIR_HTML))\n\n htmls = sorted([DEFAULT_DIR_HTML + h for h in os.listdir(DIR_HTML)])[START:END]\n\n result_file = open(DEFAULT_DIR_HTML + '../' + str(START) + '.tsv', 'w')\n result_file.write('\\t'.join(['Profile Identifier'] + sorted(DEFAULT_ATTRS) + ['title_history']) + '\\n')\n\n for i, h in enumerate(sorted(htmls)):\n print('\\r%d / %d' % (i + 1 + START, START + len(htmls)), end='')\n parsed = html2dict(h)\n save_tsv_file(parsed)\n result_file.close()\n",
"step-ids": [
6,
8,
9,
10,
11
]
}
|
[
6,
8,
9,
10,
11
] |
from rest_framework import permissions
class AdminUrlUserPermission(permissions.BasePermission):
def has_permission(self, request, view):
return (request.user.is_authenticated
and (request.user.role == 'admin'
or request.user.is_superuser))
def has_object_permission(self, request, view, obj):
return (request.user.role == 'admin'
or request.user.is_superuser)
class ReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in permissions.SAFE_METHODS
class AuthorModeratorAdminOrReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
is_safe = request.method in permissions.SAFE_METHODS
is_auth = request.user.is_authenticated
return is_safe or is_auth
def has_object_permission(self, request, view, obj):
is_safe = request.method in permissions.SAFE_METHODS
is_author = obj.author == request.user
is_privileged = None
if request.user.is_authenticated:
is_privileged = request.user.role in ('moderator', 'admin')
return is_author or is_safe or is_privileged
|
normal
|
{
"blob_id": "4549f26cf8051535f9d3486d111fc7afe7514dea",
"index": 5674,
"step-1": "<mask token>\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n <mask token>\n <mask token>\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-2": "<mask token>\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.user.is_authenticated and (request.user.role ==\n 'admin' or request.user.is_superuser)\n <mask token>\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-3": "<mask token>\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.user.is_authenticated and (request.user.role ==\n 'admin' or request.user.is_superuser)\n\n def has_object_permission(self, request, view, obj):\n return request.user.role == 'admin' or request.user.is_superuser\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-4": "from rest_framework import permissions\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.user.is_authenticated and (request.user.role ==\n 'admin' or request.user.is_superuser)\n\n def has_object_permission(self, request, view, obj):\n return request.user.role == 'admin' or request.user.is_superuser\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-5": "from rest_framework import permissions\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n def has_permission(self, request, view):\n return (request.user.is_authenticated\n and (request.user.role == 'admin'\n or request.user.is_superuser))\n\n def has_object_permission(self, request, view, obj):\n return (request.user.role == 'admin'\n or request.user.is_superuser)\n\n\nclass ReadOnly(permissions.BasePermission):\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
import pandas as pd
import matplotlib.pyplot as plt
from netCDF4 import Dataset
from cftime import num2date
import os
import numpy as np
from datetime import datetime, timedelta, date
def plot_temperatures_by_country(values, country, start, end):
"""
Returns a plot for temperature values for a country
from a start point to an end point
"""
filtered = values.loc[(values['Country'] == country) &
(values['dt'] >= start) &
(values['dt'] <= end)]
# x axis values
x1 = filtered['dt']
# corresponding y axis values
y1 = filtered['AverageTemperature']
# plotting the points
plt.plot(x1, y1, label = "line 1")
filtered = values.loc[(values['Country'] == country) &
(values['dt'] >= '1973-01-01') &
(values['dt'] <= '1974-01-01')]
# x axis values
x2 = filtered['dt']
# corresponding y axis values
y2 = filtered['AverageTemperature']
# plotting the points
plt.plot(x2, y2, label="line 2")
# naming the x axis
plt.xlabel('x - axis - date')
# naming the y axis
plt.ylabel('y - axis - temperature')
plt.title('Temperatures from ' + start + ' to ' + end + ' for ' + country)
# function to show the plot
plt.show()
def temperatures_by_city_till2013():
"""
Info for dataset, temperatures by city part 1 - from 1743 to 2013
"""
# Columns: dt,AverageTemperature,AverageTemperatureUncertainty,City,Country,Latitude,Longitude
temperatures = pd.read_csv("GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv")
# 8 599 212 rows
print(len(temperatures))
countries = temperatures['Country'].unique()
print(len(countries))
print(sorted(countries))
def temperatures_by_country_till2013():
"""
Info for dataset, temperatures by country part 1 - from 1743 to 2013
"""
# Columns: dt, AverageTemperature, AverageTemperatureUncertainty, Country
temperatures = pd.read_csv("GlobalLandTemperatures/GlobalLandTemperaturesByCountry.csv")
# 577 462 rows
print(len(temperatures))
countries = temperatures['Country'].unique()
print(len(countries))
print(sorted(countries))
def plot_co2_by_country(values, country, start, end):
"""
Returns a plot for co2 values for a country
from a start point to an end point
"""
filtered = values.loc[(values['Country'] == country) &
(values['Year'] >= start) &
(values['Year'] <= end)]
# x axis values
x1 = filtered['Year']
# corresponding y axis values
y1 = filtered['CO2']
# plotting the points
plt.plot(x1, y1, label = "line 1")
# naming the x axis
plt.xlabel('x - axis - year')
# naming the y axis
plt.ylabel('y - axis - co2')
# giving a title to my graph
plt.title('CO2 from ' + start + ' to ' + end + ' for ' + country)
# function to show the plot
plt.show()
def co2_by_country_till2019():
"""
Info for dataset, co2 by country part 1 - from 1751 to 2017
"""
co2_messy = pd.read_csv("CO2/emission data.csv")
co2 = pd.melt(co2_messy, id_vars=["Country"], var_name="Year", value_name="CO2")
df = pd.DataFrame()
df['Country'] = co2['Country']
df['Year'] = co2['Year']
df['CO2'] = co2['CO2']
df.to_csv(r'C:\Users\stoja\Desktop\EmissionCO2.csv', index=False)
def get_lat_lon():
"""
Returns arrays for latitudes, longitudes, cities and countries
from dataset, temperatures by country part 1, from 1743 to 2013
"""
# Columns: dt,AverageTemperature,AverageTemperatureUncertainty,City,Country,Latitude,Longitude
temperatures = pd.read_csv("GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv")
Latitude = temperatures['Latitude']
Longitude = temperatures['Longitude']
City = temperatures['City']
Country = temperatures['Country']
lat_array = []
long_array = []
cities_array = []
countries_array = []
tuples = []
for i, j, city, country in zip(Latitude, Longitude, City, Country):
if (i, j) not in tuples:
tuples.append((i, j))
lat_array.append(float(i[:-1]))
long_array.append(float(j[:-1]))
cities_array.append(city)
countries_array.append(country)
return lat_array, long_array, cities_array, countries_array
def make_dataset_temperatures(filename, points):
"""
From netCDF4 file to CSV file
"""
ds = Dataset(filename)
lats, lons, cities, countries = get_lat_lon()
# total lat,lon pairs: 1366
print('The number of rows is ' + str(len(lats)*points))
lon = ds.variables['longitude']
lat = ds.variables['latitude']
time = ds.variables['date_number']
lon_array = lon[:]
lat_array = lat[:]
time_array = time[:]
temperature = ds.variables['temperature']
dates = []
for time in time_array[:]:
year = int(time)
rem = time - year
base = datetime(year, 1, 1)
dates.append((base + timedelta(seconds=(base.replace(year=base.year + 1) - base).total_seconds() * rem)).date())
# second approach
# for t in time_array[:]:
# dates.append(num2date(t, units=time.units))
dateResult = []
temperatureResult = []
latitudeResult = []
longitudeResult = []
cityResult = []
countryResult = []
for latitude, longitude, city, country in zip(lats, lons, cities, countries):
# We want to find data for latitude, longitude
# We first need to find the indexes
i = np.abs(lon_array - longitude).argmin()
j = np.abs(lat_array - latitude).argmin()
for d in dates:
dateResult.append(d)
resultTemperature = temperature[:, j, i]
for t in resultTemperature:
temperatureResult.append(t)
resultLatitues = np.full(
shape=points,
fill_value=latitude,
dtype=np.float
)
for l in resultLatitues:
latitudeResult.append(l)
resultLongitudes = np.full(
shape=points,
fill_value=longitude,
dtype=np.float
)
for l in resultLongitudes:
longitudeResult.append(l)
resultCities = np.full(
shape=points,
fill_value=city
)
for c in resultCities:
cityResult.append(c)
resultCountries = np.full(
shape=points,
fill_value=country
)
for c in resultCountries:
countryResult.append(c)
print('iteration no:' + str(i))
df = pd.DataFrame()
df['date'] = dateResult
df['temperature'] = temperatureResult
df['latitude'] = latitudeResult
df['longitude'] = longitudeResult
df['city'] = cityResult
df['country'] = countryResult
df.to_csv(r'C:\Users\stoja\Desktop\Temperatures.csv', index=False)
return df
def model():
# Info for netCDF4 file
# 1416
ds = Dataset('air.mon.mean.v501.nc')
print(ds)
time = ds.variables['time']
print(time.units)
time_array = time[:]
for t in time_array[:]:
print(num2date(t, units=time.units))
if __name__ == '__main__':
print('Start')
# Making the CO2 dataset
co2_by_country_till2019()
# Making the temperatures dataset
df1 = make_dataset_temperatures('air.mon.mean.v501.nc', 1416)
print(df1.head())
# Making the temperatures anomalies dataset
df2 = make_dataset_temperatures('Complete_TAVG_Daily_LatLong1_2010.nc', 3652)
print(df2.head())
|
normal
|
{
"blob_id": "2b579c3def4c2d02d365f019518e8e0b25664460",
"index": 7436,
"step-1": "<mask token>\n\n\ndef plot_temperatures_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for temperature values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n start) & (values['dt'] <= end)]\n x1 = filtered['dt']\n y1 = filtered['AverageTemperature']\n plt.plot(x1, y1, label='line 1')\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n '1973-01-01') & (values['dt'] <= '1974-01-01')]\n x2 = filtered['dt']\n y2 = filtered['AverageTemperature']\n plt.plot(x2, y2, label='line 2')\n plt.xlabel('x - axis - date')\n plt.ylabel('y - axis - temperature')\n plt.title('Temperatures from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\ndef temperatures_by_city_till2013():\n \"\"\"\n Info for dataset, temperatures by city part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef temperatures_by_country_till2013():\n \"\"\"\n Info for dataset, temperatures by country part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCountry.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef plot_co2_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for co2 values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['Year'] >=\n start) & (values['Year'] <= end)]\n x1 = filtered['Year']\n y1 = filtered['CO2']\n plt.plot(x1, y1, label='line 1')\n plt.xlabel('x - axis - year')\n plt.ylabel('y - axis - co2')\n plt.title('CO2 from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\n<mask token>\n\n\ndef get_lat_lon():\n \"\"\"\n Returns arrays for latitudes, longitudes, cities and countries\n from dataset, temperatures by country part 1, from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n Latitude = temperatures['Latitude']\n Longitude = temperatures['Longitude']\n City = temperatures['City']\n Country = temperatures['Country']\n lat_array = []\n long_array = []\n cities_array = []\n countries_array = []\n tuples = []\n for i, j, city, country in zip(Latitude, Longitude, City, Country):\n if (i, j) not in tuples:\n tuples.append((i, j))\n lat_array.append(float(i[:-1]))\n long_array.append(float(j[:-1]))\n cities_array.append(city)\n countries_array.append(country)\n return lat_array, long_array, cities_array, countries_array\n\n\ndef make_dataset_temperatures(filename, points):\n \"\"\"\n From netCDF4 file to CSV file\n \"\"\"\n ds = Dataset(filename)\n lats, lons, cities, countries = get_lat_lon()\n print('The number of rows is ' + str(len(lats) * points))\n lon = ds.variables['longitude']\n lat = ds.variables['latitude']\n time = ds.variables['date_number']\n lon_array = lon[:]\n lat_array = lat[:]\n time_array = time[:]\n temperature = ds.variables['temperature']\n dates = []\n for time in time_array[:]:\n year = int(time)\n rem = time - year\n base = datetime(year, 1, 1)\n dates.append((base + timedelta(seconds=(base.replace(year=base.year +\n 1) - base).total_seconds() * rem)).date())\n dateResult = []\n temperatureResult = []\n latitudeResult = []\n longitudeResult = []\n cityResult = []\n countryResult = []\n for latitude, longitude, city, country in zip(lats, lons, cities, countries\n ):\n i = np.abs(lon_array - longitude).argmin()\n j = np.abs(lat_array - latitude).argmin()\n for d in dates:\n dateResult.append(d)\n resultTemperature = temperature[:, j, i]\n for t in resultTemperature:\n temperatureResult.append(t)\n resultLatitues = np.full(shape=points, fill_value=latitude, dtype=\n np.float)\n for l in resultLatitues:\n latitudeResult.append(l)\n resultLongitudes = np.full(shape=points, fill_value=longitude,\n dtype=np.float)\n for l in resultLongitudes:\n longitudeResult.append(l)\n resultCities = np.full(shape=points, fill_value=city)\n for c in resultCities:\n cityResult.append(c)\n resultCountries = np.full(shape=points, fill_value=country)\n for c in resultCountries:\n countryResult.append(c)\n print('iteration no:' + str(i))\n df = pd.DataFrame()\n df['date'] = dateResult\n df['temperature'] = temperatureResult\n df['latitude'] = latitudeResult\n df['longitude'] = longitudeResult\n df['city'] = cityResult\n df['country'] = countryResult\n df.to_csv('C:\\\\Users\\\\stoja\\\\Desktop\\\\Temperatures.csv', index=False)\n return df\n\n\ndef model():\n ds = Dataset('air.mon.mean.v501.nc')\n print(ds)\n time = ds.variables['time']\n print(time.units)\n time_array = time[:]\n for t in time_array[:]:\n print(num2date(t, units=time.units))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef plot_temperatures_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for temperature values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n start) & (values['dt'] <= end)]\n x1 = filtered['dt']\n y1 = filtered['AverageTemperature']\n plt.plot(x1, y1, label='line 1')\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n '1973-01-01') & (values['dt'] <= '1974-01-01')]\n x2 = filtered['dt']\n y2 = filtered['AverageTemperature']\n plt.plot(x2, y2, label='line 2')\n plt.xlabel('x - axis - date')\n plt.ylabel('y - axis - temperature')\n plt.title('Temperatures from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\ndef temperatures_by_city_till2013():\n \"\"\"\n Info for dataset, temperatures by city part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef temperatures_by_country_till2013():\n \"\"\"\n Info for dataset, temperatures by country part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCountry.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef plot_co2_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for co2 values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['Year'] >=\n start) & (values['Year'] <= end)]\n x1 = filtered['Year']\n y1 = filtered['CO2']\n plt.plot(x1, y1, label='line 1')\n plt.xlabel('x - axis - year')\n plt.ylabel('y - axis - co2')\n plt.title('CO2 from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\ndef co2_by_country_till2019():\n \"\"\"\n Info for dataset, co2 by country part 1 - from 1751 to 2017\n \"\"\"\n co2_messy = pd.read_csv('CO2/emission data.csv')\n co2 = pd.melt(co2_messy, id_vars=['Country'], var_name='Year',\n value_name='CO2')\n df = pd.DataFrame()\n df['Country'] = co2['Country']\n df['Year'] = co2['Year']\n df['CO2'] = co2['CO2']\n df.to_csv('C:\\\\Users\\\\stoja\\\\Desktop\\\\EmissionCO2.csv', index=False)\n\n\ndef get_lat_lon():\n \"\"\"\n Returns arrays for latitudes, longitudes, cities and countries\n from dataset, temperatures by country part 1, from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n Latitude = temperatures['Latitude']\n Longitude = temperatures['Longitude']\n City = temperatures['City']\n Country = temperatures['Country']\n lat_array = []\n long_array = []\n cities_array = []\n countries_array = []\n tuples = []\n for i, j, city, country in zip(Latitude, Longitude, City, Country):\n if (i, j) not in tuples:\n tuples.append((i, j))\n lat_array.append(float(i[:-1]))\n long_array.append(float(j[:-1]))\n cities_array.append(city)\n countries_array.append(country)\n return lat_array, long_array, cities_array, countries_array\n\n\ndef make_dataset_temperatures(filename, points):\n \"\"\"\n From netCDF4 file to CSV file\n \"\"\"\n ds = Dataset(filename)\n lats, lons, cities, countries = get_lat_lon()\n print('The number of rows is ' + str(len(lats) * points))\n lon = ds.variables['longitude']\n lat = ds.variables['latitude']\n time = ds.variables['date_number']\n lon_array = lon[:]\n lat_array = lat[:]\n time_array = time[:]\n temperature = ds.variables['temperature']\n dates = []\n for time in time_array[:]:\n year = int(time)\n rem = time - year\n base = datetime(year, 1, 1)\n dates.append((base + timedelta(seconds=(base.replace(year=base.year +\n 1) - base).total_seconds() * rem)).date())\n dateResult = []\n temperatureResult = []\n latitudeResult = []\n longitudeResult = []\n cityResult = []\n countryResult = []\n for latitude, longitude, city, country in zip(lats, lons, cities, countries\n ):\n i = np.abs(lon_array - longitude).argmin()\n j = np.abs(lat_array - latitude).argmin()\n for d in dates:\n dateResult.append(d)\n resultTemperature = temperature[:, j, i]\n for t in resultTemperature:\n temperatureResult.append(t)\n resultLatitues = np.full(shape=points, fill_value=latitude, dtype=\n np.float)\n for l in resultLatitues:\n latitudeResult.append(l)\n resultLongitudes = np.full(shape=points, fill_value=longitude,\n dtype=np.float)\n for l in resultLongitudes:\n longitudeResult.append(l)\n resultCities = np.full(shape=points, fill_value=city)\n for c in resultCities:\n cityResult.append(c)\n resultCountries = np.full(shape=points, fill_value=country)\n for c in resultCountries:\n countryResult.append(c)\n print('iteration no:' + str(i))\n df = pd.DataFrame()\n df['date'] = dateResult\n df['temperature'] = temperatureResult\n df['latitude'] = latitudeResult\n df['longitude'] = longitudeResult\n df['city'] = cityResult\n df['country'] = countryResult\n df.to_csv('C:\\\\Users\\\\stoja\\\\Desktop\\\\Temperatures.csv', index=False)\n return df\n\n\ndef model():\n ds = Dataset('air.mon.mean.v501.nc')\n print(ds)\n time = ds.variables['time']\n print(time.units)\n time_array = time[:]\n for t in time_array[:]:\n print(num2date(t, units=time.units))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef plot_temperatures_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for temperature values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n start) & (values['dt'] <= end)]\n x1 = filtered['dt']\n y1 = filtered['AverageTemperature']\n plt.plot(x1, y1, label='line 1')\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n '1973-01-01') & (values['dt'] <= '1974-01-01')]\n x2 = filtered['dt']\n y2 = filtered['AverageTemperature']\n plt.plot(x2, y2, label='line 2')\n plt.xlabel('x - axis - date')\n plt.ylabel('y - axis - temperature')\n plt.title('Temperatures from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\ndef temperatures_by_city_till2013():\n \"\"\"\n Info for dataset, temperatures by city part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef temperatures_by_country_till2013():\n \"\"\"\n Info for dataset, temperatures by country part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCountry.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef plot_co2_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for co2 values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['Year'] >=\n start) & (values['Year'] <= end)]\n x1 = filtered['Year']\n y1 = filtered['CO2']\n plt.plot(x1, y1, label='line 1')\n plt.xlabel('x - axis - year')\n plt.ylabel('y - axis - co2')\n plt.title('CO2 from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\ndef co2_by_country_till2019():\n \"\"\"\n Info for dataset, co2 by country part 1 - from 1751 to 2017\n \"\"\"\n co2_messy = pd.read_csv('CO2/emission data.csv')\n co2 = pd.melt(co2_messy, id_vars=['Country'], var_name='Year',\n value_name='CO2')\n df = pd.DataFrame()\n df['Country'] = co2['Country']\n df['Year'] = co2['Year']\n df['CO2'] = co2['CO2']\n df.to_csv('C:\\\\Users\\\\stoja\\\\Desktop\\\\EmissionCO2.csv', index=False)\n\n\ndef get_lat_lon():\n \"\"\"\n Returns arrays for latitudes, longitudes, cities and countries\n from dataset, temperatures by country part 1, from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n Latitude = temperatures['Latitude']\n Longitude = temperatures['Longitude']\n City = temperatures['City']\n Country = temperatures['Country']\n lat_array = []\n long_array = []\n cities_array = []\n countries_array = []\n tuples = []\n for i, j, city, country in zip(Latitude, Longitude, City, Country):\n if (i, j) not in tuples:\n tuples.append((i, j))\n lat_array.append(float(i[:-1]))\n long_array.append(float(j[:-1]))\n cities_array.append(city)\n countries_array.append(country)\n return lat_array, long_array, cities_array, countries_array\n\n\ndef make_dataset_temperatures(filename, points):\n \"\"\"\n From netCDF4 file to CSV file\n \"\"\"\n ds = Dataset(filename)\n lats, lons, cities, countries = get_lat_lon()\n print('The number of rows is ' + str(len(lats) * points))\n lon = ds.variables['longitude']\n lat = ds.variables['latitude']\n time = ds.variables['date_number']\n lon_array = lon[:]\n lat_array = lat[:]\n time_array = time[:]\n temperature = ds.variables['temperature']\n dates = []\n for time in time_array[:]:\n year = int(time)\n rem = time - year\n base = datetime(year, 1, 1)\n dates.append((base + timedelta(seconds=(base.replace(year=base.year +\n 1) - base).total_seconds() * rem)).date())\n dateResult = []\n temperatureResult = []\n latitudeResult = []\n longitudeResult = []\n cityResult = []\n countryResult = []\n for latitude, longitude, city, country in zip(lats, lons, cities, countries\n ):\n i = np.abs(lon_array - longitude).argmin()\n j = np.abs(lat_array - latitude).argmin()\n for d in dates:\n dateResult.append(d)\n resultTemperature = temperature[:, j, i]\n for t in resultTemperature:\n temperatureResult.append(t)\n resultLatitues = np.full(shape=points, fill_value=latitude, dtype=\n np.float)\n for l in resultLatitues:\n latitudeResult.append(l)\n resultLongitudes = np.full(shape=points, fill_value=longitude,\n dtype=np.float)\n for l in resultLongitudes:\n longitudeResult.append(l)\n resultCities = np.full(shape=points, fill_value=city)\n for c in resultCities:\n cityResult.append(c)\n resultCountries = np.full(shape=points, fill_value=country)\n for c in resultCountries:\n countryResult.append(c)\n print('iteration no:' + str(i))\n df = pd.DataFrame()\n df['date'] = dateResult\n df['temperature'] = temperatureResult\n df['latitude'] = latitudeResult\n df['longitude'] = longitudeResult\n df['city'] = cityResult\n df['country'] = countryResult\n df.to_csv('C:\\\\Users\\\\stoja\\\\Desktop\\\\Temperatures.csv', index=False)\n return df\n\n\ndef model():\n ds = Dataset('air.mon.mean.v501.nc')\n print(ds)\n time = ds.variables['time']\n print(time.units)\n time_array = time[:]\n for t in time_array[:]:\n print(num2date(t, units=time.units))\n\n\nif __name__ == '__main__':\n print('Start')\n co2_by_country_till2019()\n df1 = make_dataset_temperatures('air.mon.mean.v501.nc', 1416)\n print(df1.head())\n df2 = make_dataset_temperatures('Complete_TAVG_Daily_LatLong1_2010.nc',\n 3652)\n print(df2.head())\n",
"step-4": "import pandas as pd\nimport matplotlib.pyplot as plt\nfrom netCDF4 import Dataset\nfrom cftime import num2date\nimport os\nimport numpy as np\nfrom datetime import datetime, timedelta, date\n\n\ndef plot_temperatures_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for temperature values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n start) & (values['dt'] <= end)]\n x1 = filtered['dt']\n y1 = filtered['AverageTemperature']\n plt.plot(x1, y1, label='line 1')\n filtered = values.loc[(values['Country'] == country) & (values['dt'] >=\n '1973-01-01') & (values['dt'] <= '1974-01-01')]\n x2 = filtered['dt']\n y2 = filtered['AverageTemperature']\n plt.plot(x2, y2, label='line 2')\n plt.xlabel('x - axis - date')\n plt.ylabel('y - axis - temperature')\n plt.title('Temperatures from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\ndef temperatures_by_city_till2013():\n \"\"\"\n Info for dataset, temperatures by city part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef temperatures_by_country_till2013():\n \"\"\"\n Info for dataset, temperatures by country part 1 - from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCountry.csv')\n print(len(temperatures))\n countries = temperatures['Country'].unique()\n print(len(countries))\n print(sorted(countries))\n\n\ndef plot_co2_by_country(values, country, start, end):\n \"\"\"\n Returns a plot for co2 values for a country\n from a start point to an end point\n \"\"\"\n filtered = values.loc[(values['Country'] == country) & (values['Year'] >=\n start) & (values['Year'] <= end)]\n x1 = filtered['Year']\n y1 = filtered['CO2']\n plt.plot(x1, y1, label='line 1')\n plt.xlabel('x - axis - year')\n plt.ylabel('y - axis - co2')\n plt.title('CO2 from ' + start + ' to ' + end + ' for ' + country)\n plt.show()\n\n\ndef co2_by_country_till2019():\n \"\"\"\n Info for dataset, co2 by country part 1 - from 1751 to 2017\n \"\"\"\n co2_messy = pd.read_csv('CO2/emission data.csv')\n co2 = pd.melt(co2_messy, id_vars=['Country'], var_name='Year',\n value_name='CO2')\n df = pd.DataFrame()\n df['Country'] = co2['Country']\n df['Year'] = co2['Year']\n df['CO2'] = co2['CO2']\n df.to_csv('C:\\\\Users\\\\stoja\\\\Desktop\\\\EmissionCO2.csv', index=False)\n\n\ndef get_lat_lon():\n \"\"\"\n Returns arrays for latitudes, longitudes, cities and countries\n from dataset, temperatures by country part 1, from 1743 to 2013\n \"\"\"\n temperatures = pd.read_csv(\n 'GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv')\n Latitude = temperatures['Latitude']\n Longitude = temperatures['Longitude']\n City = temperatures['City']\n Country = temperatures['Country']\n lat_array = []\n long_array = []\n cities_array = []\n countries_array = []\n tuples = []\n for i, j, city, country in zip(Latitude, Longitude, City, Country):\n if (i, j) not in tuples:\n tuples.append((i, j))\n lat_array.append(float(i[:-1]))\n long_array.append(float(j[:-1]))\n cities_array.append(city)\n countries_array.append(country)\n return lat_array, long_array, cities_array, countries_array\n\n\ndef make_dataset_temperatures(filename, points):\n \"\"\"\n From netCDF4 file to CSV file\n \"\"\"\n ds = Dataset(filename)\n lats, lons, cities, countries = get_lat_lon()\n print('The number of rows is ' + str(len(lats) * points))\n lon = ds.variables['longitude']\n lat = ds.variables['latitude']\n time = ds.variables['date_number']\n lon_array = lon[:]\n lat_array = lat[:]\n time_array = time[:]\n temperature = ds.variables['temperature']\n dates = []\n for time in time_array[:]:\n year = int(time)\n rem = time - year\n base = datetime(year, 1, 1)\n dates.append((base + timedelta(seconds=(base.replace(year=base.year +\n 1) - base).total_seconds() * rem)).date())\n dateResult = []\n temperatureResult = []\n latitudeResult = []\n longitudeResult = []\n cityResult = []\n countryResult = []\n for latitude, longitude, city, country in zip(lats, lons, cities, countries\n ):\n i = np.abs(lon_array - longitude).argmin()\n j = np.abs(lat_array - latitude).argmin()\n for d in dates:\n dateResult.append(d)\n resultTemperature = temperature[:, j, i]\n for t in resultTemperature:\n temperatureResult.append(t)\n resultLatitues = np.full(shape=points, fill_value=latitude, dtype=\n np.float)\n for l in resultLatitues:\n latitudeResult.append(l)\n resultLongitudes = np.full(shape=points, fill_value=longitude,\n dtype=np.float)\n for l in resultLongitudes:\n longitudeResult.append(l)\n resultCities = np.full(shape=points, fill_value=city)\n for c in resultCities:\n cityResult.append(c)\n resultCountries = np.full(shape=points, fill_value=country)\n for c in resultCountries:\n countryResult.append(c)\n print('iteration no:' + str(i))\n df = pd.DataFrame()\n df['date'] = dateResult\n df['temperature'] = temperatureResult\n df['latitude'] = latitudeResult\n df['longitude'] = longitudeResult\n df['city'] = cityResult\n df['country'] = countryResult\n df.to_csv('C:\\\\Users\\\\stoja\\\\Desktop\\\\Temperatures.csv', index=False)\n return df\n\n\ndef model():\n ds = Dataset('air.mon.mean.v501.nc')\n print(ds)\n time = ds.variables['time']\n print(time.units)\n time_array = time[:]\n for t in time_array[:]:\n print(num2date(t, units=time.units))\n\n\nif __name__ == '__main__':\n print('Start')\n co2_by_country_till2019()\n df1 = make_dataset_temperatures('air.mon.mean.v501.nc', 1416)\n print(df1.head())\n df2 = make_dataset_temperatures('Complete_TAVG_Daily_LatLong1_2010.nc',\n 3652)\n print(df2.head())\n",
"step-5": "import pandas as pd\r\nimport matplotlib.pyplot as plt\r\nfrom netCDF4 import Dataset\r\nfrom cftime import num2date\r\nimport os\r\nimport numpy as np\r\nfrom datetime import datetime, timedelta, date\r\n\r\n\r\ndef plot_temperatures_by_country(values, country, start, end):\r\n \"\"\"\r\n Returns a plot for temperature values for a country\r\n from a start point to an end point\r\n \"\"\"\r\n\r\n filtered = values.loc[(values['Country'] == country) &\r\n (values['dt'] >= start) &\r\n (values['dt'] <= end)]\r\n\r\n # x axis values\r\n x1 = filtered['dt']\r\n # corresponding y axis values\r\n y1 = filtered['AverageTemperature']\r\n\r\n # plotting the points\r\n plt.plot(x1, y1, label = \"line 1\")\r\n\r\n filtered = values.loc[(values['Country'] == country) &\r\n (values['dt'] >= '1973-01-01') &\r\n (values['dt'] <= '1974-01-01')]\r\n\r\n # x axis values\r\n x2 = filtered['dt']\r\n # corresponding y axis values\r\n y2 = filtered['AverageTemperature']\r\n\r\n # plotting the points\r\n plt.plot(x2, y2, label=\"line 2\")\r\n\r\n # naming the x axis\r\n plt.xlabel('x - axis - date')\r\n # naming the y axis\r\n plt.ylabel('y - axis - temperature')\r\n\r\n plt.title('Temperatures from ' + start + ' to ' + end + ' for ' + country)\r\n\r\n # function to show the plot\r\n plt.show()\r\n\r\n\r\ndef temperatures_by_city_till2013():\r\n \"\"\"\r\n Info for dataset, temperatures by city part 1 - from 1743 to 2013\r\n \"\"\"\r\n\r\n # Columns: dt,AverageTemperature,AverageTemperatureUncertainty,City,Country,Latitude,Longitude\r\n temperatures = pd.read_csv(\"GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv\")\r\n\r\n # 8 599 212 rows\r\n print(len(temperatures))\r\n\r\n countries = temperatures['Country'].unique()\r\n print(len(countries))\r\n print(sorted(countries))\r\n\r\n\r\ndef temperatures_by_country_till2013():\r\n \"\"\"\r\n Info for dataset, temperatures by country part 1 - from 1743 to 2013\r\n \"\"\"\r\n\r\n # Columns: dt, AverageTemperature, AverageTemperatureUncertainty, Country\r\n temperatures = pd.read_csv(\"GlobalLandTemperatures/GlobalLandTemperaturesByCountry.csv\")\r\n\r\n # 577 462 rows\r\n print(len(temperatures))\r\n\r\n countries = temperatures['Country'].unique()\r\n print(len(countries))\r\n print(sorted(countries))\r\n\r\n\r\ndef plot_co2_by_country(values, country, start, end):\r\n \"\"\"\r\n Returns a plot for co2 values for a country\r\n from a start point to an end point\r\n \"\"\"\r\n\r\n filtered = values.loc[(values['Country'] == country) &\r\n (values['Year'] >= start) &\r\n (values['Year'] <= end)]\r\n\r\n # x axis values\r\n x1 = filtered['Year']\r\n # corresponding y axis values\r\n y1 = filtered['CO2']\r\n\r\n # plotting the points\r\n plt.plot(x1, y1, label = \"line 1\")\r\n\r\n # naming the x axis\r\n plt.xlabel('x - axis - year')\r\n # naming the y axis\r\n plt.ylabel('y - axis - co2')\r\n\r\n # giving a title to my graph\r\n plt.title('CO2 from ' + start + ' to ' + end + ' for ' + country)\r\n\r\n # function to show the plot\r\n plt.show()\r\n\r\n\r\ndef co2_by_country_till2019():\r\n \"\"\"\r\n Info for dataset, co2 by country part 1 - from 1751 to 2017\r\n \"\"\"\r\n co2_messy = pd.read_csv(\"CO2/emission data.csv\")\r\n\r\n co2 = pd.melt(co2_messy, id_vars=[\"Country\"], var_name=\"Year\", value_name=\"CO2\")\r\n\r\n df = pd.DataFrame()\r\n df['Country'] = co2['Country']\r\n df['Year'] = co2['Year']\r\n df['CO2'] = co2['CO2']\r\n\r\n df.to_csv(r'C:\\Users\\stoja\\Desktop\\EmissionCO2.csv', index=False)\r\n\r\n\r\ndef get_lat_lon():\r\n \"\"\"\r\n Returns arrays for latitudes, longitudes, cities and countries\r\n from dataset, temperatures by country part 1, from 1743 to 2013\r\n \"\"\"\r\n\r\n # Columns: dt,AverageTemperature,AverageTemperatureUncertainty,City,Country,Latitude,Longitude\r\n temperatures = pd.read_csv(\"GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv\")\r\n\r\n Latitude = temperatures['Latitude']\r\n Longitude = temperatures['Longitude']\r\n City = temperatures['City']\r\n Country = temperatures['Country']\r\n\r\n lat_array = []\r\n long_array = []\r\n cities_array = []\r\n countries_array = []\r\n tuples = []\r\n for i, j, city, country in zip(Latitude, Longitude, City, Country):\r\n if (i, j) not in tuples:\r\n tuples.append((i, j))\r\n lat_array.append(float(i[:-1]))\r\n long_array.append(float(j[:-1]))\r\n cities_array.append(city)\r\n countries_array.append(country)\r\n\r\n return lat_array, long_array, cities_array, countries_array\r\n\r\n\r\ndef make_dataset_temperatures(filename, points):\r\n \"\"\"\r\n From netCDF4 file to CSV file\r\n \"\"\"\r\n\r\n ds = Dataset(filename)\r\n\r\n lats, lons, cities, countries = get_lat_lon()\r\n\r\n # total lat,lon pairs: 1366\r\n print('The number of rows is ' + str(len(lats)*points))\r\n lon = ds.variables['longitude']\r\n lat = ds.variables['latitude']\r\n time = ds.variables['date_number']\r\n\r\n lon_array = lon[:]\r\n lat_array = lat[:]\r\n time_array = time[:]\r\n\r\n temperature = ds.variables['temperature']\r\n\r\n dates = []\r\n for time in time_array[:]:\r\n year = int(time)\r\n rem = time - year\r\n base = datetime(year, 1, 1)\r\n dates.append((base + timedelta(seconds=(base.replace(year=base.year + 1) - base).total_seconds() * rem)).date())\r\n\r\n # second approach\r\n # for t in time_array[:]:\r\n # dates.append(num2date(t, units=time.units))\r\n\r\n dateResult = []\r\n temperatureResult = []\r\n latitudeResult = []\r\n longitudeResult = []\r\n cityResult = []\r\n countryResult = []\r\n\r\n for latitude, longitude, city, country in zip(lats, lons, cities, countries):\r\n\r\n # We want to find data for latitude, longitude\r\n # We first need to find the indexes\r\n i = np.abs(lon_array - longitude).argmin()\r\n j = np.abs(lat_array - latitude).argmin()\r\n\r\n for d in dates:\r\n dateResult.append(d)\r\n\r\n resultTemperature = temperature[:, j, i]\r\n for t in resultTemperature:\r\n temperatureResult.append(t)\r\n\r\n resultLatitues = np.full(\r\n shape=points,\r\n fill_value=latitude,\r\n dtype=np.float\r\n )\r\n for l in resultLatitues:\r\n latitudeResult.append(l)\r\n\r\n resultLongitudes = np.full(\r\n shape=points,\r\n fill_value=longitude,\r\n dtype=np.float\r\n )\r\n for l in resultLongitudes:\r\n longitudeResult.append(l)\r\n\r\n resultCities = np.full(\r\n shape=points,\r\n fill_value=city\r\n )\r\n for c in resultCities:\r\n cityResult.append(c)\r\n\r\n resultCountries = np.full(\r\n shape=points,\r\n fill_value=country\r\n )\r\n for c in resultCountries:\r\n countryResult.append(c)\r\n\r\n print('iteration no:' + str(i))\r\n\r\n df = pd.DataFrame()\r\n df['date'] = dateResult\r\n df['temperature'] = temperatureResult\r\n df['latitude'] = latitudeResult\r\n df['longitude'] = longitudeResult\r\n df['city'] = cityResult\r\n df['country'] = countryResult\r\n\r\n df.to_csv(r'C:\\Users\\stoja\\Desktop\\Temperatures.csv', index=False)\r\n return df\r\n\r\n\r\ndef model():\r\n\r\n # Info for netCDF4 file\r\n # 1416\r\n ds = Dataset('air.mon.mean.v501.nc')\r\n print(ds)\r\n time = ds.variables['time']\r\n print(time.units)\r\n time_array = time[:]\r\n for t in time_array[:]:\r\n print(num2date(t, units=time.units))\r\n\r\n\r\nif __name__ == '__main__':\r\n print('Start')\r\n\r\n # Making the CO2 dataset\r\n co2_by_country_till2019()\r\n\r\n # Making the temperatures dataset\r\n df1 = make_dataset_temperatures('air.mon.mean.v501.nc', 1416)\r\n print(df1.head())\r\n\r\n # Making the temperatures anomalies dataset\r\n df2 = make_dataset_temperatures('Complete_TAVG_Daily_LatLong1_2010.nc', 3652)\r\n print(df2.head())\r\n",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
from datapackage_pipelines.wrapper import ingest, spew
params, datapackage, res_iter = ingest()
columns = params['columns']
for resource in datapackage['resources']:
fields = resource.get('schema', {}).get('fields')
if fields is not None:
fields = [field for field in fields if field['name'] not in columns]
resource['schema']['fields'] = fields
def process_resources(_res_iter):
for rows in _res_iter:
def process_rows(_rows):
for row in _rows:
for column in columns:
if column in row:
del row[column]
yield row
yield process_rows(rows)
spew(datapackage, process_resources(res_iter))
|
normal
|
{
"blob_id": "17b3fb44d9e7a09fe3b807b47bdc0248b6960634",
"index": 4022,
"step-1": "<mask token>\n\n\ndef process_resources(_res_iter):\n for rows in _res_iter:\n\n def process_rows(_rows):\n for row in _rows:\n for column in columns:\n if column in row:\n del row[column]\n yield row\n yield process_rows(rows)\n\n\n<mask token>\n",
"step-2": "<mask token>\nfor resource in datapackage['resources']:\n fields = resource.get('schema', {}).get('fields')\n if fields is not None:\n fields = [field for field in fields if field['name'] not in columns]\n resource['schema']['fields'] = fields\n\n\ndef process_resources(_res_iter):\n for rows in _res_iter:\n\n def process_rows(_rows):\n for row in _rows:\n for column in columns:\n if column in row:\n del row[column]\n yield row\n yield process_rows(rows)\n\n\nspew(datapackage, process_resources(res_iter))\n",
"step-3": "<mask token>\nparams, datapackage, res_iter = ingest()\ncolumns = params['columns']\nfor resource in datapackage['resources']:\n fields = resource.get('schema', {}).get('fields')\n if fields is not None:\n fields = [field for field in fields if field['name'] not in columns]\n resource['schema']['fields'] = fields\n\n\ndef process_resources(_res_iter):\n for rows in _res_iter:\n\n def process_rows(_rows):\n for row in _rows:\n for column in columns:\n if column in row:\n del row[column]\n yield row\n yield process_rows(rows)\n\n\nspew(datapackage, process_resources(res_iter))\n",
"step-4": "from datapackage_pipelines.wrapper import ingest, spew\nparams, datapackage, res_iter = ingest()\ncolumns = params['columns']\nfor resource in datapackage['resources']:\n fields = resource.get('schema', {}).get('fields')\n if fields is not None:\n fields = [field for field in fields if field['name'] not in columns]\n resource['schema']['fields'] = fields\n\n\ndef process_resources(_res_iter):\n for rows in _res_iter:\n\n def process_rows(_rows):\n for row in _rows:\n for column in columns:\n if column in row:\n del row[column]\n yield row\n yield process_rows(rows)\n\n\nspew(datapackage, process_resources(res_iter))\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
from import_.Import import Import
from classifier.Classifier import Classifier
from export.Export import Export
from preprocessing.PreProcess import PreProcess
def main():
date_column = "date of last vet visit"
target = "age at death"
export_file_dir = "./output/"
export_model_dir = "./model/xgb_model.dat"
# IMPORT
import_ = Import()
print("""
To predict how long cats will live (in years) please enter the file path
for the cats csv file for example: ./input/cats_pred.csv
""")
cats = import_.import_df("predict")
cats_copy = cats.copy()
# PRE-PROCESSING
pre_process = PreProcess()
print("Pre-processing Imported Data..")
# process date to keep year only
print("Processing date column to keep year only")
pre_process.strip_year(cats, date_column)
# Storing numerical columns in the background
pre_process.get_numerical_cols(cats)
# Convert all columns to float data type
print("Convert all columns to float data type")
pre_process.convert_to_float(cats)
# Replace NaN values with Median
print("Replacing all NaN values with median")
cats = pre_process.replace_nan(cats)
# Normalise dataset
print("Normalising dataset")
cats = pre_process.normalise(cats)
print("""
Cats dataset
{0}
""".format(cats.head()))
# PREDICTION
print("Prediction Starting")
cats_pred = Classifier.predict(export_model_dir, cats)
# EXPORTING
print("Prediction Finished")
Export.export_pred_file(cats_copy, cats_pred, target, export_file_dir)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "696b9db78cc7f6002eb39b640e0e5b2b53e52e91",
"index": 8448,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n date_column = 'date of last vet visit'\n target = 'age at death'\n export_file_dir = './output/'\n export_model_dir = './model/xgb_model.dat'\n import_ = Import()\n print(\n \"\"\"\nTo predict how long cats will live (in years) please enter the file path\nfor the cats csv file for example: ./input/cats_pred.csv\n \"\"\"\n )\n cats = import_.import_df('predict')\n cats_copy = cats.copy()\n pre_process = PreProcess()\n print('Pre-processing Imported Data..')\n print('Processing date column to keep year only')\n pre_process.strip_year(cats, date_column)\n pre_process.get_numerical_cols(cats)\n print('Convert all columns to float data type')\n pre_process.convert_to_float(cats)\n print('Replacing all NaN values with median')\n cats = pre_process.replace_nan(cats)\n print('Normalising dataset')\n cats = pre_process.normalise(cats)\n print(\"\"\"\n Cats dataset \n {0} \n \"\"\".format(cats.head()))\n print('Prediction Starting')\n cats_pred = Classifier.predict(export_model_dir, cats)\n print('Prediction Finished')\n Export.export_pred_file(cats_copy, cats_pred, target, export_file_dir)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n date_column = 'date of last vet visit'\n target = 'age at death'\n export_file_dir = './output/'\n export_model_dir = './model/xgb_model.dat'\n import_ = Import()\n print(\n \"\"\"\nTo predict how long cats will live (in years) please enter the file path\nfor the cats csv file for example: ./input/cats_pred.csv\n \"\"\"\n )\n cats = import_.import_df('predict')\n cats_copy = cats.copy()\n pre_process = PreProcess()\n print('Pre-processing Imported Data..')\n print('Processing date column to keep year only')\n pre_process.strip_year(cats, date_column)\n pre_process.get_numerical_cols(cats)\n print('Convert all columns to float data type')\n pre_process.convert_to_float(cats)\n print('Replacing all NaN values with median')\n cats = pre_process.replace_nan(cats)\n print('Normalising dataset')\n cats = pre_process.normalise(cats)\n print(\"\"\"\n Cats dataset \n {0} \n \"\"\".format(cats.head()))\n print('Prediction Starting')\n cats_pred = Classifier.predict(export_model_dir, cats)\n print('Prediction Finished')\n Export.export_pred_file(cats_copy, cats_pred, target, export_file_dir)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from import_.Import import Import\nfrom classifier.Classifier import Classifier\nfrom export.Export import Export\nfrom preprocessing.PreProcess import PreProcess\n\n\ndef main():\n date_column = 'date of last vet visit'\n target = 'age at death'\n export_file_dir = './output/'\n export_model_dir = './model/xgb_model.dat'\n import_ = Import()\n print(\n \"\"\"\nTo predict how long cats will live (in years) please enter the file path\nfor the cats csv file for example: ./input/cats_pred.csv\n \"\"\"\n )\n cats = import_.import_df('predict')\n cats_copy = cats.copy()\n pre_process = PreProcess()\n print('Pre-processing Imported Data..')\n print('Processing date column to keep year only')\n pre_process.strip_year(cats, date_column)\n pre_process.get_numerical_cols(cats)\n print('Convert all columns to float data type')\n pre_process.convert_to_float(cats)\n print('Replacing all NaN values with median')\n cats = pre_process.replace_nan(cats)\n print('Normalising dataset')\n cats = pre_process.normalise(cats)\n print(\"\"\"\n Cats dataset \n {0} \n \"\"\".format(cats.head()))\n print('Prediction Starting')\n cats_pred = Classifier.predict(export_model_dir, cats)\n print('Prediction Finished')\n Export.export_pred_file(cats_copy, cats_pred, target, export_file_dir)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from import_.Import import Import\nfrom classifier.Classifier import Classifier\nfrom export.Export import Export\nfrom preprocessing.PreProcess import PreProcess\n\n\ndef main():\n\n date_column = \"date of last vet visit\"\n target = \"age at death\"\n export_file_dir = \"./output/\"\n export_model_dir = \"./model/xgb_model.dat\"\n\n # IMPORT\n import_ = Import()\n print(\"\"\"\nTo predict how long cats will live (in years) please enter the file path\nfor the cats csv file for example: ./input/cats_pred.csv\n \"\"\")\n cats = import_.import_df(\"predict\")\n cats_copy = cats.copy()\n\n # PRE-PROCESSING\n pre_process = PreProcess()\n print(\"Pre-processing Imported Data..\")\n\n # process date to keep year only\n print(\"Processing date column to keep year only\")\n pre_process.strip_year(cats, date_column)\n\n # Storing numerical columns in the background\n pre_process.get_numerical_cols(cats)\n\n # Convert all columns to float data type\n print(\"Convert all columns to float data type\")\n pre_process.convert_to_float(cats)\n\n # Replace NaN values with Median\n print(\"Replacing all NaN values with median\")\n cats = pre_process.replace_nan(cats)\n\n # Normalise dataset\n print(\"Normalising dataset\")\n cats = pre_process.normalise(cats)\n print(\"\"\"\n Cats dataset \n {0} \n \"\"\".format(cats.head()))\n\n # PREDICTION\n print(\"Prediction Starting\")\n cats_pred = Classifier.predict(export_model_dir, cats)\n\n # EXPORTING\n print(\"Prediction Finished\")\n Export.export_pred_file(cats_copy, cats_pred, target, export_file_dir)\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
/home/runner/.cache/pip/pool/9b/88/a0/f20a7b2f367cd365add3353eba0cf34569d5f62a33587f96cebe6d4360
|
normal
|
{
"blob_id": "12f05f42c9ed56d6a2c95fb56a8619fae47a2f1a",
"index": 6035,
"step-1": "/home/runner/.cache/pip/pool/9b/88/a0/f20a7b2f367cd365add3353eba0cf34569d5f62a33587f96cebe6d4360",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
'''Turning on or off, toggling and checking the status' of a specific relay'''
#!/bin/env python3
from time import sleep
from gpiozero import LED
RELAYS = [
LED(23),
LED(24),
LED(25),
LED(8),
LED(7),
LED(1),
LED(12),
LED(16)
]
def on_action(relay_option, number):
'''To turn on the chosen relay'''
relay_option.on()
print(f"relay {number} is turning on")
def off_action(relay_option, number):
'''To turn off the chosen relay'''
relay_option.off()
print(f"relay {number} is turning off")
def toggle_action(relay_option, number):
'''To toggle the chosen relay'''
print(f"relay {number} is toggling")
relay_option.on()
sleep(0.5)
relay_option.off()
sleep(0.5)
def print_help():
'''Print/show help for informations of the required parameter'''
print('''
Description
Arguments:
number number of relay 1 to 8
action on, off, or toggle
optional arguments:
h show this help message and exit
''')
def options():
'''Input the relay number or show help and check the input'''
input_str = input("Which relay? ")
while True:
if input_str == 'h':
print_help()
return
index = int(input_str) - 1
if 0 <= index <= 7:
relay_status(RELAYS[index], input_str)
relay_action(RELAYS[index], input_str)
relay_status(RELAYS[index], input_str)
return
else:
print("index out of range")
return
def relay_action(relay_number, num):
'''Do the given order(turn on, turn off, toggle) or raise error'''
action = input("Which action? ")
while True:
try:
return {
'on': on_action,
'off': off_action,
'toggle': toggle_action
}[action](relay_number, num)
except KeyError:
print("Try again")
return relay_action(relay_number, num)
def relay_status(relay_number, number):
'''Check initial relay's status'''
if relay_number.value == 1:
print(f"relay {number} is on")
else:
print(f"relay {number} is off")
while True:
options()
sleep(1)
|
normal
|
{
"blob_id": "d82412055affc96d634957c953a35ea69b7e702f",
"index": 403,
"step-1": "<mask token>\n\n\ndef on_action(relay_option, number):\n \"\"\"To turn on the chosen relay\"\"\"\n relay_option.on()\n print(f'relay {number} is turning on')\n\n\n<mask token>\n\n\ndef toggle_action(relay_option, number):\n \"\"\"To toggle the chosen relay\"\"\"\n print(f'relay {number} is toggling')\n relay_option.on()\n sleep(0.5)\n relay_option.off()\n sleep(0.5)\n\n\ndef print_help():\n \"\"\"Print/show help for informations of the required parameter\"\"\"\n print(\n \"\"\"\nDescription\n\nArguments:\n number number of relay 1 to 8\n action on, off, or toggle\n\noptional arguments:\n h show this help message and exit\n \"\"\"\n )\n\n\ndef options():\n \"\"\"Input the relay number or show help and check the input\"\"\"\n input_str = input('Which relay? ')\n while True:\n if input_str == 'h':\n print_help()\n return\n index = int(input_str) - 1\n if 0 <= index <= 7:\n relay_status(RELAYS[index], input_str)\n relay_action(RELAYS[index], input_str)\n relay_status(RELAYS[index], input_str)\n return\n else:\n print('index out of range')\n return\n\n\ndef relay_action(relay_number, num):\n \"\"\"Do the given order(turn on, turn off, toggle) or raise error\"\"\"\n action = input('Which action? ')\n while True:\n try:\n return {'on': on_action, 'off': off_action, 'toggle': toggle_action\n }[action](relay_number, num)\n except KeyError:\n print('Try again')\n return relay_action(relay_number, num)\n\n\ndef relay_status(relay_number, number):\n \"\"\"Check initial relay's status\"\"\"\n if relay_number.value == 1:\n print(f'relay {number} is on')\n else:\n print(f'relay {number} is off')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef on_action(relay_option, number):\n \"\"\"To turn on the chosen relay\"\"\"\n relay_option.on()\n print(f'relay {number} is turning on')\n\n\ndef off_action(relay_option, number):\n \"\"\"To turn off the chosen relay\"\"\"\n relay_option.off()\n print(f'relay {number} is turning off')\n\n\ndef toggle_action(relay_option, number):\n \"\"\"To toggle the chosen relay\"\"\"\n print(f'relay {number} is toggling')\n relay_option.on()\n sleep(0.5)\n relay_option.off()\n sleep(0.5)\n\n\ndef print_help():\n \"\"\"Print/show help for informations of the required parameter\"\"\"\n print(\n \"\"\"\nDescription\n\nArguments:\n number number of relay 1 to 8\n action on, off, or toggle\n\noptional arguments:\n h show this help message and exit\n \"\"\"\n )\n\n\ndef options():\n \"\"\"Input the relay number or show help and check the input\"\"\"\n input_str = input('Which relay? ')\n while True:\n if input_str == 'h':\n print_help()\n return\n index = int(input_str) - 1\n if 0 <= index <= 7:\n relay_status(RELAYS[index], input_str)\n relay_action(RELAYS[index], input_str)\n relay_status(RELAYS[index], input_str)\n return\n else:\n print('index out of range')\n return\n\n\ndef relay_action(relay_number, num):\n \"\"\"Do the given order(turn on, turn off, toggle) or raise error\"\"\"\n action = input('Which action? ')\n while True:\n try:\n return {'on': on_action, 'off': off_action, 'toggle': toggle_action\n }[action](relay_number, num)\n except KeyError:\n print('Try again')\n return relay_action(relay_number, num)\n\n\ndef relay_status(relay_number, number):\n \"\"\"Check initial relay's status\"\"\"\n if relay_number.value == 1:\n print(f'relay {number} is on')\n else:\n print(f'relay {number} is off')\n\n\n<mask token>\n",
"step-3": "<mask token>\nRELAYS = [LED(23), LED(24), LED(25), LED(8), LED(7), LED(1), LED(12), LED(16)]\n\n\ndef on_action(relay_option, number):\n \"\"\"To turn on the chosen relay\"\"\"\n relay_option.on()\n print(f'relay {number} is turning on')\n\n\ndef off_action(relay_option, number):\n \"\"\"To turn off the chosen relay\"\"\"\n relay_option.off()\n print(f'relay {number} is turning off')\n\n\ndef toggle_action(relay_option, number):\n \"\"\"To toggle the chosen relay\"\"\"\n print(f'relay {number} is toggling')\n relay_option.on()\n sleep(0.5)\n relay_option.off()\n sleep(0.5)\n\n\ndef print_help():\n \"\"\"Print/show help for informations of the required parameter\"\"\"\n print(\n \"\"\"\nDescription\n\nArguments:\n number number of relay 1 to 8\n action on, off, or toggle\n\noptional arguments:\n h show this help message and exit\n \"\"\"\n )\n\n\ndef options():\n \"\"\"Input the relay number or show help and check the input\"\"\"\n input_str = input('Which relay? ')\n while True:\n if input_str == 'h':\n print_help()\n return\n index = int(input_str) - 1\n if 0 <= index <= 7:\n relay_status(RELAYS[index], input_str)\n relay_action(RELAYS[index], input_str)\n relay_status(RELAYS[index], input_str)\n return\n else:\n print('index out of range')\n return\n\n\ndef relay_action(relay_number, num):\n \"\"\"Do the given order(turn on, turn off, toggle) or raise error\"\"\"\n action = input('Which action? ')\n while True:\n try:\n return {'on': on_action, 'off': off_action, 'toggle': toggle_action\n }[action](relay_number, num)\n except KeyError:\n print('Try again')\n return relay_action(relay_number, num)\n\n\ndef relay_status(relay_number, number):\n \"\"\"Check initial relay's status\"\"\"\n if relay_number.value == 1:\n print(f'relay {number} is on')\n else:\n print(f'relay {number} is off')\n\n\nwhile True:\n options()\n sleep(1)\n",
"step-4": "<mask token>\nfrom time import sleep\nfrom gpiozero import LED\nRELAYS = [LED(23), LED(24), LED(25), LED(8), LED(7), LED(1), LED(12), LED(16)]\n\n\ndef on_action(relay_option, number):\n \"\"\"To turn on the chosen relay\"\"\"\n relay_option.on()\n print(f'relay {number} is turning on')\n\n\ndef off_action(relay_option, number):\n \"\"\"To turn off the chosen relay\"\"\"\n relay_option.off()\n print(f'relay {number} is turning off')\n\n\ndef toggle_action(relay_option, number):\n \"\"\"To toggle the chosen relay\"\"\"\n print(f'relay {number} is toggling')\n relay_option.on()\n sleep(0.5)\n relay_option.off()\n sleep(0.5)\n\n\ndef print_help():\n \"\"\"Print/show help for informations of the required parameter\"\"\"\n print(\n \"\"\"\nDescription\n\nArguments:\n number number of relay 1 to 8\n action on, off, or toggle\n\noptional arguments:\n h show this help message and exit\n \"\"\"\n )\n\n\ndef options():\n \"\"\"Input the relay number or show help and check the input\"\"\"\n input_str = input('Which relay? ')\n while True:\n if input_str == 'h':\n print_help()\n return\n index = int(input_str) - 1\n if 0 <= index <= 7:\n relay_status(RELAYS[index], input_str)\n relay_action(RELAYS[index], input_str)\n relay_status(RELAYS[index], input_str)\n return\n else:\n print('index out of range')\n return\n\n\ndef relay_action(relay_number, num):\n \"\"\"Do the given order(turn on, turn off, toggle) or raise error\"\"\"\n action = input('Which action? ')\n while True:\n try:\n return {'on': on_action, 'off': off_action, 'toggle': toggle_action\n }[action](relay_number, num)\n except KeyError:\n print('Try again')\n return relay_action(relay_number, num)\n\n\ndef relay_status(relay_number, number):\n \"\"\"Check initial relay's status\"\"\"\n if relay_number.value == 1:\n print(f'relay {number} is on')\n else:\n print(f'relay {number} is off')\n\n\nwhile True:\n options()\n sleep(1)\n",
"step-5": "'''Turning on or off, toggling and checking the status' of a specific relay'''\n\n#!/bin/env python3\n\nfrom time import sleep\nfrom gpiozero import LED\n\nRELAYS = [\n LED(23),\n LED(24),\n LED(25),\n LED(8),\n LED(7),\n LED(1),\n LED(12),\n LED(16)\n]\n\n\ndef on_action(relay_option, number):\n '''To turn on the chosen relay'''\n relay_option.on()\n print(f\"relay {number} is turning on\")\n\n\ndef off_action(relay_option, number):\n '''To turn off the chosen relay'''\n relay_option.off()\n print(f\"relay {number} is turning off\")\n\n\ndef toggle_action(relay_option, number):\n '''To toggle the chosen relay'''\n print(f\"relay {number} is toggling\")\n relay_option.on()\n sleep(0.5)\n relay_option.off()\n sleep(0.5)\n\n\ndef print_help():\n '''Print/show help for informations of the required parameter'''\n print('''\nDescription\n\nArguments:\n number number of relay 1 to 8\n action on, off, or toggle\n\noptional arguments:\n h show this help message and exit\n ''')\n\n\ndef options():\n '''Input the relay number or show help and check the input'''\n input_str = input(\"Which relay? \")\n while True:\n if input_str == 'h':\n print_help()\n return\n\n index = int(input_str) - 1\n if 0 <= index <= 7:\n relay_status(RELAYS[index], input_str)\n relay_action(RELAYS[index], input_str)\n relay_status(RELAYS[index], input_str)\n return\n else:\n print(\"index out of range\")\n return\n\n\ndef relay_action(relay_number, num):\n '''Do the given order(turn on, turn off, toggle) or raise error'''\n action = input(\"Which action? \")\n while True:\n\n try:\n return {\n 'on': on_action,\n 'off': off_action,\n 'toggle': toggle_action\n }[action](relay_number, num)\n except KeyError:\n print(\"Try again\")\n return relay_action(relay_number, num)\n\n\ndef relay_status(relay_number, number):\n '''Check initial relay's status'''\n if relay_number.value == 1:\n print(f\"relay {number} is on\")\n else:\n print(f\"relay {number} is off\")\n\n\nwhile True:\n options()\n sleep(1)\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
'''
check if word appear in file
'''
# easier solution :
def findKeyInFile(word, filepath):
with open(filepath) as f:
for line in f.readlines():
if line.count(word) > 0:
return line
return None
|
normal
|
{
"blob_id": "97fb2388777bcb459b9818495121fdf8318095ca",
"index": 8881,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef findKeyInFile(word, filepath):\n with open(filepath) as f:\n for line in f.readlines():\n if line.count(word) > 0:\n return line\n return None\n",
"step-3": "'''\ncheck if word appear in file\n'''\n# easier solution :\ndef findKeyInFile(word, filepath):\n with open(filepath) as f:\n for line in f.readlines():\n if line.count(word) > 0:\n return line\n return None\n\n\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django.contrib import admin
from django.urls import path
from django.conf.urls import url
from . import views
urlpatterns = [
path('admin/', admin.site.urls),
path(r'', views.index, name='index'),
]
|
normal
|
{
"blob_id": "b0fad3847519bb18365a8cd4226d06e9d96a8308",
"index": 1258,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('admin/', admin.site.urls), path('', views.index, name=\n 'index')]\n",
"step-3": "from django.contrib import admin\nfrom django.urls import path\nfrom django.conf.urls import url\nfrom . import views\nurlpatterns = [path('admin/', admin.site.urls), path('', views.index, name=\n 'index')]\n",
"step-4": "from django.contrib import admin\nfrom django.urls import path\nfrom django.conf.urls import url\nfrom . import views\nurlpatterns = [\n path('admin/', admin.site.urls),\n path(r'', views.index, name='index'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 3.0.8 on 2021-03-25 13:47
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Asha', '0005_baby'),
]
operations = [
migrations.AlterField(
model_name='baby',
name='Auth_Id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Asha.BasicDetails'),
),
]
|
normal
|
{
"blob_id": "e14b8d0f85042ceda955022bee08b3b3b4c2361d",
"index": 7367,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Asha', '0005_baby')]\n operations = [migrations.AlterField(model_name='baby', name='Auth_Id',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n to='Asha.BasicDetails'))]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Asha', '0005_baby')]\n operations = [migrations.AlterField(model_name='baby', name='Auth_Id',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,\n to='Asha.BasicDetails'))]\n",
"step-5": "# Generated by Django 3.0.8 on 2021-03-25 13:47\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('Asha', '0005_baby'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='baby',\r\n name='Auth_Id',\r\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Asha.BasicDetails'),\r\n ),\r\n ]\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
___author__ = 'acmASCIS'
'''
by ahani at {9/24/2016}
'''
import time
class Freq(object):
def __init__(self, array):
self.__array = array
self.__frequency_dict = {}
self.__array_length = len(array)
self.__running_time = round(time.time() * 1000)
def get_original_array(self):
return self.__array
def get_array_length(self):
return self.__array_length
def get_frequency_array(self):
if self.__frequency_dict is None:
raise Exception("The frequency array is empty, check your function implementation!")
return self.__frequency_dict
def get_running_time(self):
return self.__running_time
def get_frequency(self):
"""
Implement your elements frequency algorithm
:return: (dictionary) that contains key: element in array, value: frequency. Note that your dictionary should be sorted by key!
"""
#TODO
self.__running_time = round(time.time() * 1000) - self.__running_time
return self.__frequency_dict
|
normal
|
{
"blob_id": "b569f0a0dda048d6337e1028a240caabf188a174",
"index": 9420,
"step-1": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\n 'The frequency array is empty, check your function implementation!'\n )\n return self.__frequency_dict\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\n 'The frequency array is empty, check your function implementation!'\n )\n return self.__frequency_dict\n\n def get_running_time(self):\n return self.__running_time\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass Freq(object):\n\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\n 'The frequency array is empty, check your function implementation!'\n )\n return self.__frequency_dict\n\n def get_running_time(self):\n return self.__running_time\n\n def get_frequency(self):\n \"\"\"\n Implement your elements frequency algorithm\n :return: (dictionary) that contains key: element in array, value: frequency. Note that your dictionary should be sorted by key!\n \"\"\"\n self.__running_time = round(time.time() * 1000) - self.__running_time\n return self.__frequency_dict\n",
"step-5": "___author__ = 'acmASCIS'\n\n'''\n by ahani at {9/24/2016}\n'''\n\nimport time\n\n\nclass Freq(object):\n def __init__(self, array):\n self.__array = array\n self.__frequency_dict = {}\n self.__array_length = len(array)\n self.__running_time = round(time.time() * 1000)\n\n def get_original_array(self):\n return self.__array\n\n def get_array_length(self):\n return self.__array_length\n\n def get_frequency_array(self):\n if self.__frequency_dict is None:\n raise Exception(\"The frequency array is empty, check your function implementation!\")\n\n return self.__frequency_dict\n\n def get_running_time(self):\n return self.__running_time\n\n def get_frequency(self):\n \"\"\"\n Implement your elements frequency algorithm\n :return: (dictionary) that contains key: element in array, value: frequency. Note that your dictionary should be sorted by key!\n \"\"\"\n\n #TODO\n\n\n self.__running_time = round(time.time() * 1000) - self.__running_time\n\n return self.__frequency_dict\n",
"step-ids": [
3,
5,
6,
7,
10
]
}
|
[
3,
5,
6,
7,
10
] |
#import fungsi_saya as fs
# from fungsi_saya import kalkulator as k
# hasil = k(10,5,'+')
# print(hasil)
from kelas import Siswa
siswa_1 = Siswa('Afif', "A.I.", 17, 'XII IPA')
siswa_2 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')
siswa_3 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')
siswa_4 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')
siswa_5 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')
siswa_6 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')
siswa_7 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')
#print(Siswa.jum_siswa)
|
normal
|
{
"blob_id": "bd2c327915c1e133a6e7b7a46290369440d50347",
"index": 3876,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsiswa_1 = Siswa('Afif', 'A.I.', 17, 'XII IPA')\nsiswa_2 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_3 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_4 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_5 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_6 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_7 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\n",
"step-3": "from kelas import Siswa\nsiswa_1 = Siswa('Afif', 'A.I.', 17, 'XII IPA')\nsiswa_2 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_3 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_4 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_5 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_6 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_7 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\n",
"step-4": "#import fungsi_saya as fs\n# from fungsi_saya import kalkulator as k\n\n# hasil = k(10,5,'+')\n# print(hasil)\n\nfrom kelas import Siswa\n\nsiswa_1 = Siswa('Afif', \"A.I.\", 17, 'XII IPA')\nsiswa_2 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_3 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_4 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_5 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_6 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\nsiswa_7 = Siswa('Bayu', 'Sudrajat', 20, 'XII IPS')\n#print(Siswa.jum_siswa)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import pytest
from homeworks.homework6.oop_2 import (
DeadLineError,
Homework,
HomeworkResult,
Student,
Teacher,
)
def test_creating_objects():
teacher = Teacher("Daniil", "Shadrin")
student = Student("Roman", "Petrov")
homework = teacher.create_homework("Learn OOP", 1)
homework_result = student.do_homework(homework, "I have done this hw")
assert isinstance(teacher, Teacher)
assert isinstance(student, Student)
assert isinstance(homework, Homework)
assert isinstance(homework_result, HomeworkResult)
def test_do_homework_exception():
teacher = Teacher("Daniil", "Shadrin")
student = Student("Lev", "Sokolov")
homework = teacher.create_homework("Learn OOP", 0)
with pytest.raises(DeadLineError, match=r"You are late"):
student.do_homework(homework, "I have done this hw")
def test_creating_and_resetting_homework_results_by_teacher():
teacher = Teacher("Daniil", "Shadrin")
student = Student("Roman", "Petrov")
homework_1 = teacher.create_homework("Learn OOP", 1)
homework_1_result = student.do_homework(homework_1, "I have done this hw")
assert teacher.check_homework(homework_1_result) is True
assert homework_1_result in teacher.homework_done[homework_1]
homework_2 = teacher.create_homework("homework 2", 1)
homework_2_result = student.do_homework(homework_2, "zero")
assert teacher.check_homework(homework_2_result) is False
assert teacher.homework_done.get(homework_2) is None
homework_3 = teacher.create_homework("homework 3", 1)
homework_3_result = student.do_homework(homework_3, "I have done this hw")
assert teacher.check_homework(homework_3_result) is True
assert homework_3_result in teacher.homework_done.get(homework_3)
assert len(teacher.homework_done) == 2
Teacher.reset_results(homework_3)
assert len(teacher.homework_done) == 1
Teacher.reset_results()
assert len(teacher.homework_done) == 0
|
normal
|
{
"blob_id": "8f971ee3b98691a887ee0632afd613bbf4f19aa0",
"index": 3505,
"step-1": "<mask token>\n\n\ndef test_creating_objects():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Roman', 'Petrov')\n homework = teacher.create_homework('Learn OOP', 1)\n homework_result = student.do_homework(homework, 'I have done this hw')\n assert isinstance(teacher, Teacher)\n assert isinstance(student, Student)\n assert isinstance(homework, Homework)\n assert isinstance(homework_result, HomeworkResult)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_creating_objects():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Roman', 'Petrov')\n homework = teacher.create_homework('Learn OOP', 1)\n homework_result = student.do_homework(homework, 'I have done this hw')\n assert isinstance(teacher, Teacher)\n assert isinstance(student, Student)\n assert isinstance(homework, Homework)\n assert isinstance(homework_result, HomeworkResult)\n\n\n<mask token>\n\n\ndef test_creating_and_resetting_homework_results_by_teacher():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Roman', 'Petrov')\n homework_1 = teacher.create_homework('Learn OOP', 1)\n homework_1_result = student.do_homework(homework_1, 'I have done this hw')\n assert teacher.check_homework(homework_1_result) is True\n assert homework_1_result in teacher.homework_done[homework_1]\n homework_2 = teacher.create_homework('homework 2', 1)\n homework_2_result = student.do_homework(homework_2, 'zero')\n assert teacher.check_homework(homework_2_result) is False\n assert teacher.homework_done.get(homework_2) is None\n homework_3 = teacher.create_homework('homework 3', 1)\n homework_3_result = student.do_homework(homework_3, 'I have done this hw')\n assert teacher.check_homework(homework_3_result) is True\n assert homework_3_result in teacher.homework_done.get(homework_3)\n assert len(teacher.homework_done) == 2\n Teacher.reset_results(homework_3)\n assert len(teacher.homework_done) == 1\n Teacher.reset_results()\n assert len(teacher.homework_done) == 0\n",
"step-3": "<mask token>\n\n\ndef test_creating_objects():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Roman', 'Petrov')\n homework = teacher.create_homework('Learn OOP', 1)\n homework_result = student.do_homework(homework, 'I have done this hw')\n assert isinstance(teacher, Teacher)\n assert isinstance(student, Student)\n assert isinstance(homework, Homework)\n assert isinstance(homework_result, HomeworkResult)\n\n\ndef test_do_homework_exception():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Lev', 'Sokolov')\n homework = teacher.create_homework('Learn OOP', 0)\n with pytest.raises(DeadLineError, match='You are late'):\n student.do_homework(homework, 'I have done this hw')\n\n\ndef test_creating_and_resetting_homework_results_by_teacher():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Roman', 'Petrov')\n homework_1 = teacher.create_homework('Learn OOP', 1)\n homework_1_result = student.do_homework(homework_1, 'I have done this hw')\n assert teacher.check_homework(homework_1_result) is True\n assert homework_1_result in teacher.homework_done[homework_1]\n homework_2 = teacher.create_homework('homework 2', 1)\n homework_2_result = student.do_homework(homework_2, 'zero')\n assert teacher.check_homework(homework_2_result) is False\n assert teacher.homework_done.get(homework_2) is None\n homework_3 = teacher.create_homework('homework 3', 1)\n homework_3_result = student.do_homework(homework_3, 'I have done this hw')\n assert teacher.check_homework(homework_3_result) is True\n assert homework_3_result in teacher.homework_done.get(homework_3)\n assert len(teacher.homework_done) == 2\n Teacher.reset_results(homework_3)\n assert len(teacher.homework_done) == 1\n Teacher.reset_results()\n assert len(teacher.homework_done) == 0\n",
"step-4": "import pytest\nfrom homeworks.homework6.oop_2 import DeadLineError, Homework, HomeworkResult, Student, Teacher\n\n\ndef test_creating_objects():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Roman', 'Petrov')\n homework = teacher.create_homework('Learn OOP', 1)\n homework_result = student.do_homework(homework, 'I have done this hw')\n assert isinstance(teacher, Teacher)\n assert isinstance(student, Student)\n assert isinstance(homework, Homework)\n assert isinstance(homework_result, HomeworkResult)\n\n\ndef test_do_homework_exception():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Lev', 'Sokolov')\n homework = teacher.create_homework('Learn OOP', 0)\n with pytest.raises(DeadLineError, match='You are late'):\n student.do_homework(homework, 'I have done this hw')\n\n\ndef test_creating_and_resetting_homework_results_by_teacher():\n teacher = Teacher('Daniil', 'Shadrin')\n student = Student('Roman', 'Petrov')\n homework_1 = teacher.create_homework('Learn OOP', 1)\n homework_1_result = student.do_homework(homework_1, 'I have done this hw')\n assert teacher.check_homework(homework_1_result) is True\n assert homework_1_result in teacher.homework_done[homework_1]\n homework_2 = teacher.create_homework('homework 2', 1)\n homework_2_result = student.do_homework(homework_2, 'zero')\n assert teacher.check_homework(homework_2_result) is False\n assert teacher.homework_done.get(homework_2) is None\n homework_3 = teacher.create_homework('homework 3', 1)\n homework_3_result = student.do_homework(homework_3, 'I have done this hw')\n assert teacher.check_homework(homework_3_result) is True\n assert homework_3_result in teacher.homework_done.get(homework_3)\n assert len(teacher.homework_done) == 2\n Teacher.reset_results(homework_3)\n assert len(teacher.homework_done) == 1\n Teacher.reset_results()\n assert len(teacher.homework_done) == 0\n",
"step-5": "import pytest\n\nfrom homeworks.homework6.oop_2 import (\n DeadLineError,\n Homework,\n HomeworkResult,\n Student,\n Teacher,\n)\n\n\ndef test_creating_objects():\n teacher = Teacher(\"Daniil\", \"Shadrin\")\n student = Student(\"Roman\", \"Petrov\")\n homework = teacher.create_homework(\"Learn OOP\", 1)\n homework_result = student.do_homework(homework, \"I have done this hw\")\n assert isinstance(teacher, Teacher)\n assert isinstance(student, Student)\n assert isinstance(homework, Homework)\n assert isinstance(homework_result, HomeworkResult)\n\n\ndef test_do_homework_exception():\n teacher = Teacher(\"Daniil\", \"Shadrin\")\n student = Student(\"Lev\", \"Sokolov\")\n homework = teacher.create_homework(\"Learn OOP\", 0)\n with pytest.raises(DeadLineError, match=r\"You are late\"):\n student.do_homework(homework, \"I have done this hw\")\n\n\ndef test_creating_and_resetting_homework_results_by_teacher():\n teacher = Teacher(\"Daniil\", \"Shadrin\")\n student = Student(\"Roman\", \"Petrov\")\n homework_1 = teacher.create_homework(\"Learn OOP\", 1)\n homework_1_result = student.do_homework(homework_1, \"I have done this hw\")\n assert teacher.check_homework(homework_1_result) is True\n assert homework_1_result in teacher.homework_done[homework_1]\n\n homework_2 = teacher.create_homework(\"homework 2\", 1)\n homework_2_result = student.do_homework(homework_2, \"zero\")\n assert teacher.check_homework(homework_2_result) is False\n assert teacher.homework_done.get(homework_2) is None\n\n homework_3 = teacher.create_homework(\"homework 3\", 1)\n homework_3_result = student.do_homework(homework_3, \"I have done this hw\")\n assert teacher.check_homework(homework_3_result) is True\n assert homework_3_result in teacher.homework_done.get(homework_3)\n\n assert len(teacher.homework_done) == 2\n Teacher.reset_results(homework_3)\n assert len(teacher.homework_done) == 1\n Teacher.reset_results()\n assert len(teacher.homework_done) == 0\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import pandas as pd
import numpy as np
#import data
df = pd.read_csv('../.gitignore/PPP_data_to_150k.csv')
counties = pd.read_csv('../data/zip_code_database.csv')
demographics = pd.read_csv('../data/counties.csv')
#filter out all unanswered ethnicities
df2 = df[~df.RaceEthnicity.str.contains("Unanswered")]
#drop nonprofit column
df2.drop('NonProfit', axis=1,inplace=True)
#drop row with Nebraska Zip code
df2.drop([71479],axis=0, inplace=True)
#filter zip code database for Colorado, drop unnecessary columns
co_counties = counties[counties['state']=='CO']
co_counties_1 = co_counties.drop(['decommissioned', 'acceptable_cities', 'unacceptable_cities','timezone','area_codes','world_region','country','irs_estimated_population_2015','primary_city','state'],axis=1)
#merge counties onto dataframe
df_with_counties = pd.merge(df2,co_counties_1, left_on='Zip', right_on='zip')
#only include 2018 demographic data
demographics_18 = demographics[demographics['YEAR']==2018]
demographics_18 = demographics_18.iloc[:,:11]
#drop NAN Jobs Retained values for scatter comparison of Jobs Retained to Loan Amount by ethnicity
ethnicity_dfs_job_comparison = [x.dropna(subset=['JobsRetained']) for x in ethnicity_dfs]
if __name__ == '__main__':
|
normal
|
{
"blob_id": "732478fd826e09cf304760dfcc30cd077f74d83e",
"index": 2250,
"step-1": "import pandas as pd\nimport numpy as np\n\n#import data\ndf = pd.read_csv('../.gitignore/PPP_data_to_150k.csv')\ncounties = pd.read_csv('../data/zip_code_database.csv')\ndemographics = pd.read_csv('../data/counties.csv')\n\n#filter out all unanswered ethnicities\ndf2 = df[~df.RaceEthnicity.str.contains(\"Unanswered\")]\n\n#drop nonprofit column\ndf2.drop('NonProfit', axis=1,inplace=True)\n\n#drop row with Nebraska Zip code\ndf2.drop([71479],axis=0, inplace=True)\n\n#filter zip code database for Colorado, drop unnecessary columns\nco_counties = counties[counties['state']=='CO']\nco_counties_1 = co_counties.drop(['decommissioned', 'acceptable_cities', 'unacceptable_cities','timezone','area_codes','world_region','country','irs_estimated_population_2015','primary_city','state'],axis=1)\n\n#merge counties onto dataframe \ndf_with_counties = pd.merge(df2,co_counties_1, left_on='Zip', right_on='zip')\n\n#only include 2018 demographic data\ndemographics_18 = demographics[demographics['YEAR']==2018]\ndemographics_18 = demographics_18.iloc[:,:11]\n\n#drop NAN Jobs Retained values for scatter comparison of Jobs Retained to Loan Amount by ethnicity\nethnicity_dfs_job_comparison = [x.dropna(subset=['JobsRetained']) for x in ethnicity_dfs]\n\nif __name__ == '__main__':\n\n ",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from typing import List
from re import match
from utility import ButtonGroup
import rumps
class RepeatWorkBreak(rumps.App):
def __init__(self):
rumps.debug_mode(True)
self.config = {
"app_title": "Repeat Work and Break",
"start": "Start",
"pause": "Pause Timer",
"continue": "Continue Timer",
"stop": "Stop Timer",
"timeout_message": "Time is up! Take a break :)",
"shift_time_in_seconds": 60 * 60 * 1, # 60 seconds * 60 = 1 hour
"break_time_in_seconds": 60 * 5,
'shift_setting_buttons': [
{
'title': '1 hour',
},
{
'title': '4 hour',
},
{
'title': '8 hour',
}
],
'break_setting_buttons': [
{
'title': '5 minutes',
},
{
'title': '10 minutes',
},
{
'title': '15 minutes',
}
],
}
self.app = rumps.App(self.config['app_title'])
self.timer = rumps.Timer(self.on_tick, 1)
self.shift_setting_button_group = ButtonGroup(
self.config['shift_setting_buttons'], callback=self.handle_shift_setting_button)
self.break_setting_button_group = ButtonGroup(
self.config['break_setting_buttons'], callback=self.handle_shift_setting_button)
self.shift_time_in_seconds = self.config["shift_time_in_seconds"]
self.break_time_in_seconds = self.config["break_time_in_seconds"]
self.elapsed_shift_time_in_hours = 0
self.progress_box = '◻︎' * (self.shift_time_in_seconds // 3600)
self.start_pause_button = rumps.MenuItem(
title=self.config["start"], callback=self.start_timer)
self.stop_button = rumps.MenuItem(
title=self.config["stop"], callback=None)
self.app.menu = [
{
'Preferences':
{
"Setting Shift": self.shift_setting_button_group.buttons,
"Setting Break / hr": self.break_setting_button_group.buttons,
}
},
None,
self.start_pause_button,
self.stop_button,
]
def set_up_menu(self):
self.timer.stop()
self.timer.count = 0
self.app.title = self.config['app_title']
def convert_seconds_to_time_string(self, seconds) -> str:
seconds = seconds % (24 * 3600)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
return "%d:%02d:%02d" % (hours, minutes, seconds)
def on_tick(self, sender):
time_left_in_seconds = sender.end - sender.count
time_left_in_string = self.convert_seconds_to_time_string(
time_left_in_seconds)
if sender.count != 0 and sender.count % 3600 == 0:
self.elapsed_shift_time_in_hours += 1
self.update_progress_box()
if time_left_in_seconds == 0:
rumps.notification(
title=self.config["app_title"], subtitle=self.config["timeout_message"], message='')
self.stop_timer()
self.stop_button.set_callback(None)
else:
self.stop_button.set_callback(self.stop_timer)
self.app.title = self.progress_box + ' | ' + time_left_in_string
sender.count += 1
def update_progress_box(self):
self.progress_box = self.elapsed_shift_time_in_hours * '☑︎' + (self.shift_time_in_seconds // 3600 -
self.elapsed_shift_time_in_hours) * '◻︎'
def start_timer(self, sender):
if sender.title.lower().startswith(("start", "continue")):
if sender.title == self.config["start"]:
self.timer.count = 0
self.timer.end = self.shift_time_in_seconds
sender.title = self.config["pause"]
self.timer.start()
else:
sender.title = self.config["continue"]
self.timer.stop()
def stop_timer(self, sender=None):
self.set_up_menu()
self.stop_button.set_callback(None)
self.start_pause_button.title = self.config["start"]
def handle_shift_setting_button(self, sender):
self.shift_setting_button_group.toggle(sender)
selected_hours = int(match(r'^\d+\s{1}', sender.title)[0])
self.progress_box = "◻︎" * selected_hours # update empty progress box
self.shift_time_in_seconds = selected_hours * 3600 # hours in seconds
def handle_break_setting_button(self, sender):
self.break_setting_button_group.toggle(sender)
selected_minutes = int(match(r'^\d+\s{1}', sender.title)[0])
self.break_time_in_seconds = selected_minutes * 60
def run(self):
self.app.run()
if __name__ == "__main__":
app = RepeatWorkBreak()
app.run()
|
normal
|
{
"blob_id": "2ca91c410b8c8d6306d5ed918783a4d77a091ba8",
"index": 360,
"step-1": "<mask token>\n\n\nclass RepeatWorkBreak(rumps.App):\n <mask token>\n\n def set_up_menu(self):\n self.timer.stop()\n self.timer.count = 0\n self.app.title = self.config['app_title']\n\n def convert_seconds_to_time_string(self, seconds) ->str:\n seconds = seconds % (24 * 3600)\n hours, seconds = divmod(seconds, 3600)\n minutes, seconds = divmod(seconds, 60)\n return '%d:%02d:%02d' % (hours, minutes, seconds)\n\n def on_tick(self, sender):\n time_left_in_seconds = sender.end - sender.count\n time_left_in_string = self.convert_seconds_to_time_string(\n time_left_in_seconds)\n if sender.count != 0 and sender.count % 3600 == 0:\n self.elapsed_shift_time_in_hours += 1\n self.update_progress_box()\n if time_left_in_seconds == 0:\n rumps.notification(title=self.config['app_title'], subtitle=\n self.config['timeout_message'], message='')\n self.stop_timer()\n self.stop_button.set_callback(None)\n else:\n self.stop_button.set_callback(self.stop_timer)\n self.app.title = self.progress_box + ' | ' + time_left_in_string\n sender.count += 1\n\n def update_progress_box(self):\n self.progress_box = self.elapsed_shift_time_in_hours * '☑︎' + (self\n .shift_time_in_seconds // 3600 - self.elapsed_shift_time_in_hours\n ) * '◻︎'\n <mask token>\n <mask token>\n <mask token>\n\n def handle_break_setting_button(self, sender):\n self.break_setting_button_group.toggle(sender)\n selected_minutes = int(match('^\\\\d+\\\\s{1}', sender.title)[0])\n self.break_time_in_seconds = selected_minutes * 60\n\n def run(self):\n self.app.run()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass RepeatWorkBreak(rumps.App):\n\n def __init__(self):\n rumps.debug_mode(True)\n self.config = {'app_title': 'Repeat Work and Break', 'start':\n 'Start', 'pause': 'Pause Timer', 'continue': 'Continue Timer',\n 'stop': 'Stop Timer', 'timeout_message':\n 'Time is up! Take a break :)', 'shift_time_in_seconds': 60 * 60 *\n 1, 'break_time_in_seconds': 60 * 5, 'shift_setting_buttons': [{\n 'title': '1 hour'}, {'title': '4 hour'}, {'title': '8 hour'}],\n 'break_setting_buttons': [{'title': '5 minutes'}, {'title':\n '10 minutes'}, {'title': '15 minutes'}]}\n self.app = rumps.App(self.config['app_title'])\n self.timer = rumps.Timer(self.on_tick, 1)\n self.shift_setting_button_group = ButtonGroup(self.config[\n 'shift_setting_buttons'], callback=self.handle_shift_setting_button\n )\n self.break_setting_button_group = ButtonGroup(self.config[\n 'break_setting_buttons'], callback=self.handle_shift_setting_button\n )\n self.shift_time_in_seconds = self.config['shift_time_in_seconds']\n self.break_time_in_seconds = self.config['break_time_in_seconds']\n self.elapsed_shift_time_in_hours = 0\n self.progress_box = '◻︎' * (self.shift_time_in_seconds // 3600)\n self.start_pause_button = rumps.MenuItem(title=self.config['start'],\n callback=self.start_timer)\n self.stop_button = rumps.MenuItem(title=self.config['stop'],\n callback=None)\n self.app.menu = [{'Preferences': {'Setting Shift': self.\n shift_setting_button_group.buttons, 'Setting Break / hr': self.\n break_setting_button_group.buttons}}, None, self.\n start_pause_button, self.stop_button]\n\n def set_up_menu(self):\n self.timer.stop()\n self.timer.count = 0\n self.app.title = self.config['app_title']\n\n def convert_seconds_to_time_string(self, seconds) ->str:\n seconds = seconds % (24 * 3600)\n hours, seconds = divmod(seconds, 3600)\n minutes, seconds = divmod(seconds, 60)\n return '%d:%02d:%02d' % (hours, minutes, seconds)\n\n def on_tick(self, sender):\n time_left_in_seconds = sender.end - sender.count\n time_left_in_string = self.convert_seconds_to_time_string(\n time_left_in_seconds)\n if sender.count != 0 and sender.count % 3600 == 0:\n self.elapsed_shift_time_in_hours += 1\n self.update_progress_box()\n if time_left_in_seconds == 0:\n rumps.notification(title=self.config['app_title'], subtitle=\n self.config['timeout_message'], message='')\n self.stop_timer()\n self.stop_button.set_callback(None)\n else:\n self.stop_button.set_callback(self.stop_timer)\n self.app.title = self.progress_box + ' | ' + time_left_in_string\n sender.count += 1\n\n def update_progress_box(self):\n self.progress_box = self.elapsed_shift_time_in_hours * '☑︎' + (self\n .shift_time_in_seconds // 3600 - self.elapsed_shift_time_in_hours\n ) * '◻︎'\n\n def start_timer(self, sender):\n if sender.title.lower().startswith(('start', 'continue')):\n if sender.title == self.config['start']:\n self.timer.count = 0\n self.timer.end = self.shift_time_in_seconds\n sender.title = self.config['pause']\n self.timer.start()\n else:\n sender.title = self.config['continue']\n self.timer.stop()\n <mask token>\n\n def handle_shift_setting_button(self, sender):\n self.shift_setting_button_group.toggle(sender)\n selected_hours = int(match('^\\\\d+\\\\s{1}', sender.title)[0])\n self.progress_box = '◻︎' * selected_hours\n self.shift_time_in_seconds = selected_hours * 3600\n\n def handle_break_setting_button(self, sender):\n self.break_setting_button_group.toggle(sender)\n selected_minutes = int(match('^\\\\d+\\\\s{1}', sender.title)[0])\n self.break_time_in_seconds = selected_minutes * 60\n\n def run(self):\n self.app.run()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass RepeatWorkBreak(rumps.App):\n\n def __init__(self):\n rumps.debug_mode(True)\n self.config = {'app_title': 'Repeat Work and Break', 'start':\n 'Start', 'pause': 'Pause Timer', 'continue': 'Continue Timer',\n 'stop': 'Stop Timer', 'timeout_message':\n 'Time is up! Take a break :)', 'shift_time_in_seconds': 60 * 60 *\n 1, 'break_time_in_seconds': 60 * 5, 'shift_setting_buttons': [{\n 'title': '1 hour'}, {'title': '4 hour'}, {'title': '8 hour'}],\n 'break_setting_buttons': [{'title': '5 minutes'}, {'title':\n '10 minutes'}, {'title': '15 minutes'}]}\n self.app = rumps.App(self.config['app_title'])\n self.timer = rumps.Timer(self.on_tick, 1)\n self.shift_setting_button_group = ButtonGroup(self.config[\n 'shift_setting_buttons'], callback=self.handle_shift_setting_button\n )\n self.break_setting_button_group = ButtonGroup(self.config[\n 'break_setting_buttons'], callback=self.handle_shift_setting_button\n )\n self.shift_time_in_seconds = self.config['shift_time_in_seconds']\n self.break_time_in_seconds = self.config['break_time_in_seconds']\n self.elapsed_shift_time_in_hours = 0\n self.progress_box = '◻︎' * (self.shift_time_in_seconds // 3600)\n self.start_pause_button = rumps.MenuItem(title=self.config['start'],\n callback=self.start_timer)\n self.stop_button = rumps.MenuItem(title=self.config['stop'],\n callback=None)\n self.app.menu = [{'Preferences': {'Setting Shift': self.\n shift_setting_button_group.buttons, 'Setting Break / hr': self.\n break_setting_button_group.buttons}}, None, self.\n start_pause_button, self.stop_button]\n\n def set_up_menu(self):\n self.timer.stop()\n self.timer.count = 0\n self.app.title = self.config['app_title']\n\n def convert_seconds_to_time_string(self, seconds) ->str:\n seconds = seconds % (24 * 3600)\n hours, seconds = divmod(seconds, 3600)\n minutes, seconds = divmod(seconds, 60)\n return '%d:%02d:%02d' % (hours, minutes, seconds)\n\n def on_tick(self, sender):\n time_left_in_seconds = sender.end - sender.count\n time_left_in_string = self.convert_seconds_to_time_string(\n time_left_in_seconds)\n if sender.count != 0 and sender.count % 3600 == 0:\n self.elapsed_shift_time_in_hours += 1\n self.update_progress_box()\n if time_left_in_seconds == 0:\n rumps.notification(title=self.config['app_title'], subtitle=\n self.config['timeout_message'], message='')\n self.stop_timer()\n self.stop_button.set_callback(None)\n else:\n self.stop_button.set_callback(self.stop_timer)\n self.app.title = self.progress_box + ' | ' + time_left_in_string\n sender.count += 1\n\n def update_progress_box(self):\n self.progress_box = self.elapsed_shift_time_in_hours * '☑︎' + (self\n .shift_time_in_seconds // 3600 - self.elapsed_shift_time_in_hours\n ) * '◻︎'\n\n def start_timer(self, sender):\n if sender.title.lower().startswith(('start', 'continue')):\n if sender.title == self.config['start']:\n self.timer.count = 0\n self.timer.end = self.shift_time_in_seconds\n sender.title = self.config['pause']\n self.timer.start()\n else:\n sender.title = self.config['continue']\n self.timer.stop()\n\n def stop_timer(self, sender=None):\n self.set_up_menu()\n self.stop_button.set_callback(None)\n self.start_pause_button.title = self.config['start']\n\n def handle_shift_setting_button(self, sender):\n self.shift_setting_button_group.toggle(sender)\n selected_hours = int(match('^\\\\d+\\\\s{1}', sender.title)[0])\n self.progress_box = '◻︎' * selected_hours\n self.shift_time_in_seconds = selected_hours * 3600\n\n def handle_break_setting_button(self, sender):\n self.break_setting_button_group.toggle(sender)\n selected_minutes = int(match('^\\\\d+\\\\s{1}', sender.title)[0])\n self.break_time_in_seconds = selected_minutes * 60\n\n def run(self):\n self.app.run()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass RepeatWorkBreak(rumps.App):\n\n def __init__(self):\n rumps.debug_mode(True)\n self.config = {'app_title': 'Repeat Work and Break', 'start':\n 'Start', 'pause': 'Pause Timer', 'continue': 'Continue Timer',\n 'stop': 'Stop Timer', 'timeout_message':\n 'Time is up! Take a break :)', 'shift_time_in_seconds': 60 * 60 *\n 1, 'break_time_in_seconds': 60 * 5, 'shift_setting_buttons': [{\n 'title': '1 hour'}, {'title': '4 hour'}, {'title': '8 hour'}],\n 'break_setting_buttons': [{'title': '5 minutes'}, {'title':\n '10 minutes'}, {'title': '15 minutes'}]}\n self.app = rumps.App(self.config['app_title'])\n self.timer = rumps.Timer(self.on_tick, 1)\n self.shift_setting_button_group = ButtonGroup(self.config[\n 'shift_setting_buttons'], callback=self.handle_shift_setting_button\n )\n self.break_setting_button_group = ButtonGroup(self.config[\n 'break_setting_buttons'], callback=self.handle_shift_setting_button\n )\n self.shift_time_in_seconds = self.config['shift_time_in_seconds']\n self.break_time_in_seconds = self.config['break_time_in_seconds']\n self.elapsed_shift_time_in_hours = 0\n self.progress_box = '◻︎' * (self.shift_time_in_seconds // 3600)\n self.start_pause_button = rumps.MenuItem(title=self.config['start'],\n callback=self.start_timer)\n self.stop_button = rumps.MenuItem(title=self.config['stop'],\n callback=None)\n self.app.menu = [{'Preferences': {'Setting Shift': self.\n shift_setting_button_group.buttons, 'Setting Break / hr': self.\n break_setting_button_group.buttons}}, None, self.\n start_pause_button, self.stop_button]\n\n def set_up_menu(self):\n self.timer.stop()\n self.timer.count = 0\n self.app.title = self.config['app_title']\n\n def convert_seconds_to_time_string(self, seconds) ->str:\n seconds = seconds % (24 * 3600)\n hours, seconds = divmod(seconds, 3600)\n minutes, seconds = divmod(seconds, 60)\n return '%d:%02d:%02d' % (hours, minutes, seconds)\n\n def on_tick(self, sender):\n time_left_in_seconds = sender.end - sender.count\n time_left_in_string = self.convert_seconds_to_time_string(\n time_left_in_seconds)\n if sender.count != 0 and sender.count % 3600 == 0:\n self.elapsed_shift_time_in_hours += 1\n self.update_progress_box()\n if time_left_in_seconds == 0:\n rumps.notification(title=self.config['app_title'], subtitle=\n self.config['timeout_message'], message='')\n self.stop_timer()\n self.stop_button.set_callback(None)\n else:\n self.stop_button.set_callback(self.stop_timer)\n self.app.title = self.progress_box + ' | ' + time_left_in_string\n sender.count += 1\n\n def update_progress_box(self):\n self.progress_box = self.elapsed_shift_time_in_hours * '☑︎' + (self\n .shift_time_in_seconds // 3600 - self.elapsed_shift_time_in_hours\n ) * '◻︎'\n\n def start_timer(self, sender):\n if sender.title.lower().startswith(('start', 'continue')):\n if sender.title == self.config['start']:\n self.timer.count = 0\n self.timer.end = self.shift_time_in_seconds\n sender.title = self.config['pause']\n self.timer.start()\n else:\n sender.title = self.config['continue']\n self.timer.stop()\n\n def stop_timer(self, sender=None):\n self.set_up_menu()\n self.stop_button.set_callback(None)\n self.start_pause_button.title = self.config['start']\n\n def handle_shift_setting_button(self, sender):\n self.shift_setting_button_group.toggle(sender)\n selected_hours = int(match('^\\\\d+\\\\s{1}', sender.title)[0])\n self.progress_box = '◻︎' * selected_hours\n self.shift_time_in_seconds = selected_hours * 3600\n\n def handle_break_setting_button(self, sender):\n self.break_setting_button_group.toggle(sender)\n selected_minutes = int(match('^\\\\d+\\\\s{1}', sender.title)[0])\n self.break_time_in_seconds = selected_minutes * 60\n\n def run(self):\n self.app.run()\n\n\nif __name__ == '__main__':\n app = RepeatWorkBreak()\n app.run()\n",
"step-5": "from typing import List\nfrom re import match\nfrom utility import ButtonGroup\nimport rumps\n\n\nclass RepeatWorkBreak(rumps.App):\n def __init__(self):\n rumps.debug_mode(True)\n\n self.config = {\n \"app_title\": \"Repeat Work and Break\",\n \"start\": \"Start\",\n \"pause\": \"Pause Timer\",\n \"continue\": \"Continue Timer\",\n \"stop\": \"Stop Timer\",\n \"timeout_message\": \"Time is up! Take a break :)\",\n \"shift_time_in_seconds\": 60 * 60 * 1, # 60 seconds * 60 = 1 hour\n \"break_time_in_seconds\": 60 * 5,\n 'shift_setting_buttons': [\n {\n 'title': '1 hour',\n },\n {\n 'title': '4 hour',\n },\n {\n 'title': '8 hour',\n }\n ],\n 'break_setting_buttons': [\n {\n 'title': '5 minutes',\n },\n {\n 'title': '10 minutes',\n },\n {\n 'title': '15 minutes',\n }\n ],\n }\n self.app = rumps.App(self.config['app_title'])\n self.timer = rumps.Timer(self.on_tick, 1)\n self.shift_setting_button_group = ButtonGroup(\n self.config['shift_setting_buttons'], callback=self.handle_shift_setting_button)\n self.break_setting_button_group = ButtonGroup(\n self.config['break_setting_buttons'], callback=self.handle_shift_setting_button)\n self.shift_time_in_seconds = self.config[\"shift_time_in_seconds\"]\n self.break_time_in_seconds = self.config[\"break_time_in_seconds\"]\n self.elapsed_shift_time_in_hours = 0\n self.progress_box = '◻︎' * (self.shift_time_in_seconds // 3600)\n self.start_pause_button = rumps.MenuItem(\n title=self.config[\"start\"], callback=self.start_timer)\n self.stop_button = rumps.MenuItem(\n title=self.config[\"stop\"], callback=None)\n self.app.menu = [\n {\n 'Preferences':\n {\n \"Setting Shift\": self.shift_setting_button_group.buttons,\n \"Setting Break / hr\": self.break_setting_button_group.buttons,\n }\n },\n None,\n self.start_pause_button,\n self.stop_button,\n ]\n\n def set_up_menu(self):\n self.timer.stop()\n self.timer.count = 0\n self.app.title = self.config['app_title']\n\n def convert_seconds_to_time_string(self, seconds) -> str:\n seconds = seconds % (24 * 3600)\n hours, seconds = divmod(seconds, 3600)\n minutes, seconds = divmod(seconds, 60)\n\n return \"%d:%02d:%02d\" % (hours, minutes, seconds)\n\n def on_tick(self, sender):\n time_left_in_seconds = sender.end - sender.count\n\n time_left_in_string = self.convert_seconds_to_time_string(\n time_left_in_seconds)\n if sender.count != 0 and sender.count % 3600 == 0:\n self.elapsed_shift_time_in_hours += 1\n self.update_progress_box()\n if time_left_in_seconds == 0:\n rumps.notification(\n title=self.config[\"app_title\"], subtitle=self.config[\"timeout_message\"], message='')\n self.stop_timer()\n self.stop_button.set_callback(None)\n else:\n self.stop_button.set_callback(self.stop_timer)\n\n self.app.title = self.progress_box + ' | ' + time_left_in_string\n sender.count += 1\n\n def update_progress_box(self):\n self.progress_box = self.elapsed_shift_time_in_hours * '☑︎' + (self.shift_time_in_seconds // 3600 -\n self.elapsed_shift_time_in_hours) * '◻︎'\n\n def start_timer(self, sender):\n if sender.title.lower().startswith((\"start\", \"continue\")):\n if sender.title == self.config[\"start\"]:\n self.timer.count = 0\n self.timer.end = self.shift_time_in_seconds\n sender.title = self.config[\"pause\"]\n self.timer.start()\n else:\n sender.title = self.config[\"continue\"]\n self.timer.stop()\n\n def stop_timer(self, sender=None):\n self.set_up_menu()\n self.stop_button.set_callback(None)\n self.start_pause_button.title = self.config[\"start\"]\n\n def handle_shift_setting_button(self, sender):\n self.shift_setting_button_group.toggle(sender)\n selected_hours = int(match(r'^\\d+\\s{1}', sender.title)[0])\n self.progress_box = \"◻︎\" * selected_hours # update empty progress box\n self.shift_time_in_seconds = selected_hours * 3600 # hours in seconds\n\n def handle_break_setting_button(self, sender):\n self.break_setting_button_group.toggle(sender)\n selected_minutes = int(match(r'^\\d+\\s{1}', sender.title)[0])\n self.break_time_in_seconds = selected_minutes * 60\n\n def run(self):\n self.app.run()\n\n\nif __name__ == \"__main__\":\n app = RepeatWorkBreak()\n app.run()\n",
"step-ids": [
7,
10,
11,
12,
14
]
}
|
[
7,
10,
11,
12,
14
] |
from yapsy.IPlugin import IPlugin
import wolframalpha
import yaml
keys_file = open("friday/plugins/KEYS")
keys = yaml.load(keys_file)
keys_file.close()
class Wolfram(IPlugin):
def can_perform(self, friday, request):
return 'result' in request and 'resolvedQuery' in request['result']\
and 'action' in request['result'] and request['result']['action'] == 'wisdom.unknown'
# result = request['result'] # Assumes we're using gTTS
# # Get the text that is supposed to be spoken aloud
# reply = result['fulfillment']['speech']
# # Get what the service thought you said
# question = result['resolvedQuery']
def perform(self, friday, request):
question = request['result']['resolvedQuery']
client = wolframalpha.Client(keys['WOLFRAM'])
res = client.query(question)
answer = str(list(res))
"""if len(res):
results = list(res.results)
if len(results):
answer = results[0].text[0]
else:
answer = ' '.join([each_answer.subpods[0].text for each_answer in res.pods
if each_answer.subpods[0].text])
else:
# answer = "Sorry, Wolfram doesn't know the answer."
answer = ""
"""
"""# Replace some of its notation so it's more easily read.
answer = answer.replace('\n', '. ').replace('~~', ' or about ')
# Get the result to a computation and don't bother reading the original question.
if '=' in answer:
answer = answer[answer.index('=') + 1:].strip()
"""
return answer
#
# def wolfram_query(question):
# # Every service should have a general set of requirements under which
# # it is activated, this would be one of the ones that Wolfram Alpha
# # uses, it does have others as well. Consider having a single method
# # in the plugin system that returns a boolean determining whether
# # a plugin should be activated.
# if question:
#
#
# def wolfram_query_old(question):
# import wolframalpha
# # Every service should have a general set of requirements under which
# # it is activated, this would be one of the ones that Wolfram Alpha
# # uses, it does have others as well. Consider having a single method
# # in the plugin system that returns a boolean determining whether
# # a plugin should be activated.
# if question.lower().startswith('wolfram'):
# question = question[8:]
# client = wolframalpha.Client(user_info.WOLFRAM_KEY)
# res = client.query(question)
# try:
# return next(res.results).text # This really needs to be changed.
# # I shouldn't have to rely upon error catching for my flow control.
# except StopIteration:
# pass
# try:
# answer = ' '.join([each_answer.text for each_answer in res.pods if each_answer])
# except TypeError:
# answer = None
# if not answer:
# answer = "Sorry, Wolfram doesn't know the answer."
#
# # Replace some of its notation so it's more easily read.
# answer = answer.replace('\n', '; ').replace('~~', ' or about ')
# # Get the result to a computation and don't bother reading the original question.
# if '=' in answer:
# answer = answer[answer.index('=')+1:]
# return [answer, None] # Follows answer format of [text, action]
#
|
normal
|
{
"blob_id": "57564c2e94a65187bf5e033ee06926fb593e11a7",
"index": 7733,
"step-1": "<mask token>\n\n\nclass Wolfram(IPlugin):\n\n def can_perform(self, friday, request):\n return 'result' in request and 'resolvedQuery' in request['result'\n ] and 'action' in request['result'] and request['result']['action'\n ] == 'wisdom.unknown'\n\n def perform(self, friday, request):\n question = request['result']['resolvedQuery']\n client = wolframalpha.Client(keys['WOLFRAM'])\n res = client.query(question)\n answer = str(list(res))\n \"\"\"if len(res):\n results = list(res.results)\n if len(results):\n answer = results[0].text[0]\n else:\n answer = ' '.join([each_answer.subpods[0].text for each_answer in res.pods\n if each_answer.subpods[0].text])\n else:\n # answer = \"Sorry, Wolfram doesn't know the answer.\"\n answer = \"\"\n \"\"\"\n \"\"\"# Replace some of its notation so it's more easily read.\n answer = answer.replace('\n', '. ').replace('~~', ' or about ')\n # Get the result to a computation and don't bother reading the original question.\n if '=' in answer:\n answer = answer[answer.index('=') + 1:].strip()\n \"\"\"\n return answer\n",
"step-2": "<mask token>\nkeys_file.close()\n\n\nclass Wolfram(IPlugin):\n\n def can_perform(self, friday, request):\n return 'result' in request and 'resolvedQuery' in request['result'\n ] and 'action' in request['result'] and request['result']['action'\n ] == 'wisdom.unknown'\n\n def perform(self, friday, request):\n question = request['result']['resolvedQuery']\n client = wolframalpha.Client(keys['WOLFRAM'])\n res = client.query(question)\n answer = str(list(res))\n \"\"\"if len(res):\n results = list(res.results)\n if len(results):\n answer = results[0].text[0]\n else:\n answer = ' '.join([each_answer.subpods[0].text for each_answer in res.pods\n if each_answer.subpods[0].text])\n else:\n # answer = \"Sorry, Wolfram doesn't know the answer.\"\n answer = \"\"\n \"\"\"\n \"\"\"# Replace some of its notation so it's more easily read.\n answer = answer.replace('\n', '. ').replace('~~', ' or about ')\n # Get the result to a computation and don't bother reading the original question.\n if '=' in answer:\n answer = answer[answer.index('=') + 1:].strip()\n \"\"\"\n return answer\n",
"step-3": "<mask token>\nkeys_file = open('friday/plugins/KEYS')\nkeys = yaml.load(keys_file)\nkeys_file.close()\n\n\nclass Wolfram(IPlugin):\n\n def can_perform(self, friday, request):\n return 'result' in request and 'resolvedQuery' in request['result'\n ] and 'action' in request['result'] and request['result']['action'\n ] == 'wisdom.unknown'\n\n def perform(self, friday, request):\n question = request['result']['resolvedQuery']\n client = wolframalpha.Client(keys['WOLFRAM'])\n res = client.query(question)\n answer = str(list(res))\n \"\"\"if len(res):\n results = list(res.results)\n if len(results):\n answer = results[0].text[0]\n else:\n answer = ' '.join([each_answer.subpods[0].text for each_answer in res.pods\n if each_answer.subpods[0].text])\n else:\n # answer = \"Sorry, Wolfram doesn't know the answer.\"\n answer = \"\"\n \"\"\"\n \"\"\"# Replace some of its notation so it's more easily read.\n answer = answer.replace('\n', '. ').replace('~~', ' or about ')\n # Get the result to a computation and don't bother reading the original question.\n if '=' in answer:\n answer = answer[answer.index('=') + 1:].strip()\n \"\"\"\n return answer\n",
"step-4": "from yapsy.IPlugin import IPlugin\nimport wolframalpha\nimport yaml\nkeys_file = open('friday/plugins/KEYS')\nkeys = yaml.load(keys_file)\nkeys_file.close()\n\n\nclass Wolfram(IPlugin):\n\n def can_perform(self, friday, request):\n return 'result' in request and 'resolvedQuery' in request['result'\n ] and 'action' in request['result'] and request['result']['action'\n ] == 'wisdom.unknown'\n\n def perform(self, friday, request):\n question = request['result']['resolvedQuery']\n client = wolframalpha.Client(keys['WOLFRAM'])\n res = client.query(question)\n answer = str(list(res))\n \"\"\"if len(res):\n results = list(res.results)\n if len(results):\n answer = results[0].text[0]\n else:\n answer = ' '.join([each_answer.subpods[0].text for each_answer in res.pods\n if each_answer.subpods[0].text])\n else:\n # answer = \"Sorry, Wolfram doesn't know the answer.\"\n answer = \"\"\n \"\"\"\n \"\"\"# Replace some of its notation so it's more easily read.\n answer = answer.replace('\n', '. ').replace('~~', ' or about ')\n # Get the result to a computation and don't bother reading the original question.\n if '=' in answer:\n answer = answer[answer.index('=') + 1:].strip()\n \"\"\"\n return answer\n",
"step-5": "from yapsy.IPlugin import IPlugin\nimport wolframalpha\nimport yaml\n\nkeys_file = open(\"friday/plugins/KEYS\")\nkeys = yaml.load(keys_file)\nkeys_file.close()\n\n\nclass Wolfram(IPlugin):\n def can_perform(self, friday, request):\n return 'result' in request and 'resolvedQuery' in request['result']\\\n and 'action' in request['result'] and request['result']['action'] == 'wisdom.unknown'\n # result = request['result'] # Assumes we're using gTTS\n # # Get the text that is supposed to be spoken aloud\n # reply = result['fulfillment']['speech']\n # # Get what the service thought you said\n # question = result['resolvedQuery']\n\n\n def perform(self, friday, request):\n question = request['result']['resolvedQuery']\n client = wolframalpha.Client(keys['WOLFRAM'])\n res = client.query(question)\n answer = str(list(res))\n \"\"\"if len(res):\n results = list(res.results)\n if len(results):\n answer = results[0].text[0]\n else:\n answer = ' '.join([each_answer.subpods[0].text for each_answer in res.pods\n if each_answer.subpods[0].text])\n else:\n # answer = \"Sorry, Wolfram doesn't know the answer.\"\n answer = \"\"\n \"\"\"\n \"\"\"# Replace some of its notation so it's more easily read.\n answer = answer.replace('\\n', '. ').replace('~~', ' or about ')\n # Get the result to a computation and don't bother reading the original question.\n if '=' in answer:\n answer = answer[answer.index('=') + 1:].strip()\n \"\"\"\n return answer\n\n#\n# def wolfram_query(question):\n# # Every service should have a general set of requirements under which\n# # it is activated, this would be one of the ones that Wolfram Alpha\n# # uses, it does have others as well. Consider having a single method\n# # in the plugin system that returns a boolean determining whether\n# # a plugin should be activated.\n# if question:\n#\n#\n# def wolfram_query_old(question):\n# import wolframalpha\n# # Every service should have a general set of requirements under which\n# # it is activated, this would be one of the ones that Wolfram Alpha\n# # uses, it does have others as well. Consider having a single method\n# # in the plugin system that returns a boolean determining whether\n# # a plugin should be activated.\n# if question.lower().startswith('wolfram'):\n# question = question[8:]\n# client = wolframalpha.Client(user_info.WOLFRAM_KEY)\n# res = client.query(question)\n# try:\n# return next(res.results).text # This really needs to be changed.\n# # I shouldn't have to rely upon error catching for my flow control.\n# except StopIteration:\n# pass\n# try:\n# answer = ' '.join([each_answer.text for each_answer in res.pods if each_answer])\n# except TypeError:\n# answer = None\n# if not answer:\n# answer = \"Sorry, Wolfram doesn't know the answer.\"\n#\n# # Replace some of its notation so it's more easily read.\n# answer = answer.replace('\\n', '; ').replace('~~', ' or about ')\n# # Get the result to a computation and don't bother reading the original question.\n# if '=' in answer:\n# answer = answer[answer.index('=')+1:]\n# return [answer, None] # Follows answer format of [text, action]\n#\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import database
import nltk
def pop(i): # pupulate the words table
loc = i
sentencesTrial = []
File = open('words.txt')
lines = File.read()
sentences = nltk.sent_tokenize(lines)
locations = ["Castle","Beach","Beach","Ghost Town","Ghost Town","Haunted House","Jungle","Carnival", "Ghost Town", "Highway", "Castle", "Pyramid","Beach","Beach","Carnival", "Highway", "Castle" ,"Jungle" ]
for sentence in sentences:
for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):
if(pos == 'NN'):
database.nouns.append(word.lower())
sentencesTrial.append("NN")
elif (pos == 'NNS'):
database.nounsplural.append(word.lower())
sentencesTrial.append("NNS")
elif (pos == 'NNP'):
database.propernounS.append(word.lower())
sentencesTrial.append("NNP")
elif (pos == 'NNPS'):
database.propernounP.append(word.lower())
sentencesTrial.append("NNPS")
elif (pos == 'JJ'):
database.adjective.append(word.lower())
sentencesTrial.append("JJ")
elif (pos == 'VB' or pos == 'VBG' or pos == 'VBN'):
database.verbs.append(word.lower())
sentencesTrial.append("VB")
elif (pos == 'VBD'):
database.verbpast.append(word.lower())
sentencesTrial.append("VBD")
elif (pos == 'VBZ' or pos == 'VBP'):
database.verb3person.append(word.lower())
sentencesTrial.append("VBZ")
elif (pos == 'RB' or pos == 'RBR' or pos == 'RBS'):
database.adverb.append(word)
sentencesTrial.append("RB".lower())
else:
if(word == ","):
database.useless.append(word)
sentencesTrial.append(",")
break
elif(word == "."):
database.useless.append(word)
sentencesTrial.append(".")
break
else:
database.unUsedWords.append(word.lower())
break
nounCount = []
trueNouns = []
for x in database.nouns:
if x in trueNouns:
a = trueNouns.index(x)
nounCount[a] +=1
else:
trueNouns.append(x)
a = trueNouns.index(x)
nounCount.append(1)
for x in trueNouns:
i = trueNouns.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x,'NN',locations[loc],nounCount[i]))
nounpCount = []
trueNounsp = []
for x in database.nounsplural:
if x in trueNounsp:
a = trueNounsp.index(x)
nounpCount[a] += 1
else:
trueNounsp.append(x)
a = trueNounsp.index(x)
nounpCount.append(1)
for x in trueNounsp:
i = trueNounsp.index(x)
database.cursor.execute(
"INSERT INTO words VALUES (?, ?, ?, ?)",
(x, 'NNS', locations[loc], nounpCount[i]))
pnounCount = []
truepNouns = []
for x in database.propernounS:
if x in truepNouns:
a = truepNouns.index(x)
pnounCount[a] += 1
else:
truepNouns.append(x)
a = truepNouns.index(x)
pnounCount.append(1)
for x in truepNouns:
i = truepNouns.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'NNP', locations[loc], pnounCount[i]))
pnounpCount = []
truepNounsp = []
for x in database.propernounP:
if x in truepNounsp:
a = truepNounsp.index(x)
pnounpCount[a] += 1
else:
truepNounsp.append(x)
a = truepNounsp.index(x)
pnounpCount.append(1)
for x in truepNounsp:
i = truepNounsp.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'NNPS', locations[loc], pnounpCount[i]))
adjectCount = []
trueadject = []
for x in database.adjective:
if x in trueadject:
a = trueadject.index(x)
adjectCount[a] += 1
else:
trueadject.append(x)
a = trueadject.index(x)
adjectCount.append(1)
for x in trueadject:
i = trueadject.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'JJ', locations[loc], adjectCount[i]))
verbCount = []
trueVerb = []
for x in database.verbs:
if x in trueVerb:
a = trueVerb.index(x)
verbCount[a] += 1
else:
trueVerb.append(x)
a = trueVerb.index(x)
verbCount.append(1)
for x in trueVerb:
i = trueVerb.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'VB', locations[loc], verbCount[i]))
verbpCount = []
trueVerbp = []
for x in database.verbpast:
if x in trueVerbp:
a = trueVerbp.index(x)
verbpCount[a] += 1
else:
trueVerbp.append(x)
a = trueVerbp.index(x)
verbpCount.append(1)
for x in trueVerbp:
i = trueVerbp.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'VBD', locations[loc], verbpCount[i]))
verb3pCount = []
trueVerb3p = []
for x in database.verb3person:
if x in trueVerb3p:
a = trueVerb3p.index(x)
verb3pCount[a] += 1
else:
trueVerb3p.append(x)
a = trueVerb3p.index(x)
verb3pCount.append(1)
for x in trueVerb3p:
i = trueVerb3p.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'VBZ', locations[loc], verb3pCount[i]))
adverbCount = []
trueAdverb = []
for x in database.adverb:
if x in trueAdverb:
a = trueAdverb.index(x)
adverbCount[a] += 1
else:
trueAdverb.append(x)
a = trueAdverb.index(x)
adverbCount.append(1)
for x in trueAdverb:
i = trueAdverb.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'RB', locations[loc], adverbCount[i]))
uselessCount = []
trueUseless = []
for x in database.useless:
if x in trueUseless:
a = trueUseless.index(x)
uselessCount[a] += 1
else:
trueUseless.append(x)
a = trueUseless.index(x)
uselessCount.append(1)
for x in trueUseless:
i = trueUseless.index(x)
database.cursor.execute(
"INSERT INTO words VALUES (?, ?, ?, ?)",
(x, 'PU', locations[loc], uselessCount[i]))
uuWCount = []
trueuuW = []
for x in database.unUsedWords:
if x in trueuuW:
a = trueuuW.index(x)
uuWCount[a] += 1
else:
trueuuW.append(x)
a = trueuuW.index(x)
uuWCount.append(1)
for x in trueuuW:
i = trueuuW.index(x)
database.cursor.execute("INSERT INTO words VALUES (?, ?, ?, ?)", (x, 'US', locations[loc], uuWCount[i]))
def pop2(): #populate the monster and characters table
####populating the monsters
database.cursor.execute("INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')")
database.cursor.execute("INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')")
database.cursor.execute("INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')")
database.cursor.execute("INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')")
database.cursor.execute("INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')")
database.cursor.execute("INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')")
database.cursor.execute("INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')")
database.cursor.execute("INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')")
database.cursor.execute("INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')")
database.cursor.execute("INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')")
database.cursor.execute("INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')")
database.cursor.execute("INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')")
database.cursor.execute("INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')")
database.cursor.execute("INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')")
database.cursor.execute("INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')")
database.cursor.execute("INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')")
database.cursor.execute("INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')")
database.cursor.execute("INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')")
database.cursor.execute("INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')")
database.cursor.execute("INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')")
database.cursor.execute("INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')")
database.cursor.execute("INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')")
database.cursor.execute("INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')")
database.cursor.execute("INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')")
####populating the characters
database.cursor.execute("INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')")
database.cursor.execute("INSERT INTO characters VALUES ('Shaggy','Zoinks!')")
database.cursor.execute("INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')")
database.cursor.execute("INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')")
database.cursor.execute("INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')")
database.cursor.execute("INSERT INTO location VALUES ('Castle','Stormy')")
database.cursor.execute("INSERT INTO location VALUES ('Castle','Raining')")
database.cursor.execute("INSERT INTO location VALUES ('Castle','Misty')")
database.cursor.execute("INSERT INTO location VALUES ('Castle','Dark')")
database.cursor.execute("INSERT INTO location VALUES ('Beach','Sunny')")
database.cursor.execute("INSERT INTO location VALUES ('Beach','Misty')")
database.cursor.execute("INSERT INTO location VALUES ('Ghost Town','Cloudy')")
database.cursor.execute("INSERT INTO location VALUES ('Ghost TOwn','Foggy')")
database.cursor.execute("INSERT INTO location VALUES ('Haunted House','Stormy')")
database.cursor.execute("INSERT INTO location VALUES ('Haunted House','Misty')")
database.cursor.execute("INSERT INTO location VALUES ('Jungle','Sunny')")
database.cursor.execute("INSERT INTO location VALUES ('Jungle','Raining')")
database.cursor.execute("INSERT INTO location VALUES ('Carnival','Dark')")
database.cursor.execute("INSERT INTO location VALUES ('Carnival','Cloudy')")
database.cursor.execute("INSERT INTO location VALUES ('Carnival','Overcast')")
database.cursor.execute("INSERT INTO location VALUES ('Highway','Overcast')")
database.cursor.execute("INSERT INTO location VALUES ('Highway','Sunny')")
database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Overcast')")
database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Sunny')")
database.cursor.execute("INSERT INTO location VALUES ('Pyramid','Raining')")
|
normal
|
{
"blob_id": "e7ac5c1010330aec81ce505fd7f52ccdeddb76de",
"index": 8923,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef pop(i):\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town',\n 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway',\n 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway',\n 'Castle', 'Jungle']\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if pos == 'NN':\n database.nouns.append(word.lower())\n sentencesTrial.append('NN')\n elif pos == 'NNS':\n database.nounsplural.append(word.lower())\n sentencesTrial.append('NNS')\n elif pos == 'NNP':\n database.propernounS.append(word.lower())\n sentencesTrial.append('NNP')\n elif pos == 'NNPS':\n database.propernounP.append(word.lower())\n sentencesTrial.append('NNPS')\n elif pos == 'JJ':\n database.adjective.append(word.lower())\n sentencesTrial.append('JJ')\n elif pos == 'VB' or pos == 'VBG' or pos == 'VBN':\n database.verbs.append(word.lower())\n sentencesTrial.append('VB')\n elif pos == 'VBD':\n database.verbpast.append(word.lower())\n sentencesTrial.append('VBD')\n elif pos == 'VBZ' or pos == 'VBP':\n database.verb3person.append(word.lower())\n sentencesTrial.append('VBZ')\n elif pos == 'RB' or pos == 'RBR' or pos == 'RBS':\n database.adverb.append(word)\n sentencesTrial.append('RB'.lower())\n elif word == ',':\n database.useless.append(word)\n sentencesTrial.append(',')\n break\n elif word == '.':\n database.useless.append(word)\n sentencesTrial.append('.')\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n nounCount = []\n trueNouns = []\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] += 1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NN', locations[loc], nounCount[i]))\n nounpCount = []\n trueNounsp = []\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNS', locations[loc], nounpCount[i]))\n pnounCount = []\n truepNouns = []\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNP', locations[loc], pnounCount[i]))\n pnounpCount = []\n truepNounsp = []\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNPS', locations[loc], pnounpCount[i]))\n adjectCount = []\n trueadject = []\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'JJ', locations[loc], adjectCount[i]))\n verbCount = []\n trueVerb = []\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VB', locations[loc], verbCount[i]))\n verbpCount = []\n trueVerbp = []\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBD', locations[loc], verbpCount[i]))\n verb3pCount = []\n trueVerb3p = []\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBZ', locations[loc], verb3pCount[i]))\n adverbCount = []\n trueAdverb = []\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'RB', locations[loc], adverbCount[i]))\n uselessCount = []\n trueUseless = []\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'PU', locations[loc], uselessCount[i]))\n uuWCount = []\n trueuuW = []\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'US', locations[loc], uuWCount[i]))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef pop(i):\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town',\n 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway',\n 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway',\n 'Castle', 'Jungle']\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if pos == 'NN':\n database.nouns.append(word.lower())\n sentencesTrial.append('NN')\n elif pos == 'NNS':\n database.nounsplural.append(word.lower())\n sentencesTrial.append('NNS')\n elif pos == 'NNP':\n database.propernounS.append(word.lower())\n sentencesTrial.append('NNP')\n elif pos == 'NNPS':\n database.propernounP.append(word.lower())\n sentencesTrial.append('NNPS')\n elif pos == 'JJ':\n database.adjective.append(word.lower())\n sentencesTrial.append('JJ')\n elif pos == 'VB' or pos == 'VBG' or pos == 'VBN':\n database.verbs.append(word.lower())\n sentencesTrial.append('VB')\n elif pos == 'VBD':\n database.verbpast.append(word.lower())\n sentencesTrial.append('VBD')\n elif pos == 'VBZ' or pos == 'VBP':\n database.verb3person.append(word.lower())\n sentencesTrial.append('VBZ')\n elif pos == 'RB' or pos == 'RBR' or pos == 'RBS':\n database.adverb.append(word)\n sentencesTrial.append('RB'.lower())\n elif word == ',':\n database.useless.append(word)\n sentencesTrial.append(',')\n break\n elif word == '.':\n database.useless.append(word)\n sentencesTrial.append('.')\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n nounCount = []\n trueNouns = []\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] += 1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NN', locations[loc], nounCount[i]))\n nounpCount = []\n trueNounsp = []\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNS', locations[loc], nounpCount[i]))\n pnounCount = []\n truepNouns = []\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNP', locations[loc], pnounCount[i]))\n pnounpCount = []\n truepNounsp = []\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNPS', locations[loc], pnounpCount[i]))\n adjectCount = []\n trueadject = []\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'JJ', locations[loc], adjectCount[i]))\n verbCount = []\n trueVerb = []\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VB', locations[loc], verbCount[i]))\n verbpCount = []\n trueVerbp = []\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBD', locations[loc], verbpCount[i]))\n verb3pCount = []\n trueVerb3p = []\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBZ', locations[loc], verb3pCount[i]))\n adverbCount = []\n trueAdverb = []\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'RB', locations[loc], adverbCount[i]))\n uselessCount = []\n trueUseless = []\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'PU', locations[loc], uselessCount[i]))\n uuWCount = []\n trueuuW = []\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'US', locations[loc], uuWCount[i]))\n\n\ndef pop2():\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Shaggy','Zoinks!')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')\"\n )\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Misty')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost Town','Cloudy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost TOwn','Foggy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Stormy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Cloudy')\"\n )\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Carnival','Overcast')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Highway','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Sunny')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Pyramid','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Raining')\"\n )\n",
"step-4": "import database\nimport nltk\n\n\ndef pop(i):\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = ['Castle', 'Beach', 'Beach', 'Ghost Town', 'Ghost Town',\n 'Haunted House', 'Jungle', 'Carnival', 'Ghost Town', 'Highway',\n 'Castle', 'Pyramid', 'Beach', 'Beach', 'Carnival', 'Highway',\n 'Castle', 'Jungle']\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if pos == 'NN':\n database.nouns.append(word.lower())\n sentencesTrial.append('NN')\n elif pos == 'NNS':\n database.nounsplural.append(word.lower())\n sentencesTrial.append('NNS')\n elif pos == 'NNP':\n database.propernounS.append(word.lower())\n sentencesTrial.append('NNP')\n elif pos == 'NNPS':\n database.propernounP.append(word.lower())\n sentencesTrial.append('NNPS')\n elif pos == 'JJ':\n database.adjective.append(word.lower())\n sentencesTrial.append('JJ')\n elif pos == 'VB' or pos == 'VBG' or pos == 'VBN':\n database.verbs.append(word.lower())\n sentencesTrial.append('VB')\n elif pos == 'VBD':\n database.verbpast.append(word.lower())\n sentencesTrial.append('VBD')\n elif pos == 'VBZ' or pos == 'VBP':\n database.verb3person.append(word.lower())\n sentencesTrial.append('VBZ')\n elif pos == 'RB' or pos == 'RBR' or pos == 'RBS':\n database.adverb.append(word)\n sentencesTrial.append('RB'.lower())\n elif word == ',':\n database.useless.append(word)\n sentencesTrial.append(',')\n break\n elif word == '.':\n database.useless.append(word)\n sentencesTrial.append('.')\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n nounCount = []\n trueNouns = []\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] += 1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NN', locations[loc], nounCount[i]))\n nounpCount = []\n trueNounsp = []\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNS', locations[loc], nounpCount[i]))\n pnounCount = []\n truepNouns = []\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNP', locations[loc], pnounCount[i]))\n pnounpCount = []\n truepNounsp = []\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'NNPS', locations[loc], pnounpCount[i]))\n adjectCount = []\n trueadject = []\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'JJ', locations[loc], adjectCount[i]))\n verbCount = []\n trueVerb = []\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VB', locations[loc], verbCount[i]))\n verbpCount = []\n trueVerbp = []\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBD', locations[loc], verbpCount[i]))\n verb3pCount = []\n trueVerb3p = []\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'VBZ', locations[loc], verb3pCount[i]))\n adverbCount = []\n trueAdverb = []\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'RB', locations[loc], adverbCount[i]))\n uselessCount = []\n trueUseless = []\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'PU', locations[loc], uselessCount[i]))\n uuWCount = []\n trueuuW = []\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute('INSERT INTO words VALUES (?, ?, ?, ?)', (x,\n 'US', locations[loc], uuWCount[i]))\n\n\ndef pop2():\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')\"\n )\n database.cursor.execute(\n \"INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Shaggy','Zoinks!')\")\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')\"\n )\n database.cursor.execute(\n \"INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')\"\n )\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Misty')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost Town','Cloudy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Ghost TOwn','Foggy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Stormy')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Haunted House','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Cloudy')\"\n )\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Carnival','Overcast')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Highway','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Sunny')\")\n database.cursor.execute(\n \"INSERT INTO location VALUES ('Pyramid','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Raining')\"\n )\n",
"step-5": "import database\nimport nltk\ndef pop(i): # pupulate the words table\n loc = i\n sentencesTrial = []\n File = open('words.txt')\n lines = File.read()\n sentences = nltk.sent_tokenize(lines)\n locations = [\"Castle\",\"Beach\",\"Beach\",\"Ghost Town\",\"Ghost Town\",\"Haunted House\",\"Jungle\",\"Carnival\", \"Ghost Town\", \"Highway\", \"Castle\", \"Pyramid\",\"Beach\",\"Beach\",\"Carnival\", \"Highway\", \"Castle\" ,\"Jungle\" ]\n\n for sentence in sentences:\n for word, pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))):\n if(pos == 'NN'):\n database.nouns.append(word.lower())\n sentencesTrial.append(\"NN\")\n elif (pos == 'NNS'):\n database.nounsplural.append(word.lower())\n sentencesTrial.append(\"NNS\")\n elif (pos == 'NNP'):\n database.propernounS.append(word.lower())\n sentencesTrial.append(\"NNP\")\n elif (pos == 'NNPS'):\n database.propernounP.append(word.lower())\n sentencesTrial.append(\"NNPS\")\n elif (pos == 'JJ'):\n database.adjective.append(word.lower())\n sentencesTrial.append(\"JJ\")\n elif (pos == 'VB' or pos == 'VBG' or pos == 'VBN'):\n database.verbs.append(word.lower())\n sentencesTrial.append(\"VB\")\n elif (pos == 'VBD'):\n database.verbpast.append(word.lower())\n sentencesTrial.append(\"VBD\")\n elif (pos == 'VBZ' or pos == 'VBP'):\n database.verb3person.append(word.lower())\n sentencesTrial.append(\"VBZ\")\n elif (pos == 'RB' or pos == 'RBR' or pos == 'RBS'):\n database.adverb.append(word)\n sentencesTrial.append(\"RB\".lower())\n else:\n if(word == \",\"):\n database.useless.append(word)\n sentencesTrial.append(\",\")\n break\n elif(word == \".\"):\n database.useless.append(word)\n sentencesTrial.append(\".\")\n break\n else:\n database.unUsedWords.append(word.lower())\n break\n\n nounCount = []\n trueNouns = []\n\n for x in database.nouns:\n if x in trueNouns:\n a = trueNouns.index(x)\n nounCount[a] +=1\n else:\n trueNouns.append(x)\n a = trueNouns.index(x)\n nounCount.append(1)\n\n for x in trueNouns:\n i = trueNouns.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x,'NN',locations[loc],nounCount[i]))\n\n nounpCount = []\n trueNounsp = []\n\n for x in database.nounsplural:\n if x in trueNounsp:\n a = trueNounsp.index(x)\n nounpCount[a] += 1\n else:\n trueNounsp.append(x)\n a = trueNounsp.index(x)\n nounpCount.append(1)\n\n for x in trueNounsp:\n i = trueNounsp.index(x)\n database.cursor.execute(\n \"INSERT INTO words VALUES (?, ?, ?, ?)\",\n (x, 'NNS', locations[loc], nounpCount[i]))\n\n pnounCount = []\n truepNouns = []\n\n for x in database.propernounS:\n if x in truepNouns:\n a = truepNouns.index(x)\n pnounCount[a] += 1\n else:\n truepNouns.append(x)\n a = truepNouns.index(x)\n pnounCount.append(1)\n\n for x in truepNouns:\n i = truepNouns.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'NNP', locations[loc], pnounCount[i]))\n\n pnounpCount = []\n truepNounsp = []\n\n for x in database.propernounP:\n if x in truepNounsp:\n a = truepNounsp.index(x)\n pnounpCount[a] += 1\n else:\n truepNounsp.append(x)\n a = truepNounsp.index(x)\n pnounpCount.append(1)\n\n for x in truepNounsp:\n i = truepNounsp.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'NNPS', locations[loc], pnounpCount[i]))\n\n adjectCount = []\n trueadject = []\n\n for x in database.adjective:\n if x in trueadject:\n a = trueadject.index(x)\n adjectCount[a] += 1\n else:\n trueadject.append(x)\n a = trueadject.index(x)\n adjectCount.append(1)\n\n for x in trueadject:\n i = trueadject.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'JJ', locations[loc], adjectCount[i]))\n\n verbCount = []\n trueVerb = []\n\n for x in database.verbs:\n if x in trueVerb:\n a = trueVerb.index(x)\n verbCount[a] += 1\n else:\n trueVerb.append(x)\n a = trueVerb.index(x)\n verbCount.append(1)\n\n for x in trueVerb:\n i = trueVerb.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'VB', locations[loc], verbCount[i]))\n\n verbpCount = []\n trueVerbp = []\n\n for x in database.verbpast:\n if x in trueVerbp:\n a = trueVerbp.index(x)\n verbpCount[a] += 1\n else:\n trueVerbp.append(x)\n a = trueVerbp.index(x)\n verbpCount.append(1)\n\n for x in trueVerbp:\n i = trueVerbp.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'VBD', locations[loc], verbpCount[i]))\n\n verb3pCount = []\n trueVerb3p = []\n\n for x in database.verb3person:\n if x in trueVerb3p:\n a = trueVerb3p.index(x)\n verb3pCount[a] += 1\n else:\n trueVerb3p.append(x)\n a = trueVerb3p.index(x)\n verb3pCount.append(1)\n\n for x in trueVerb3p:\n i = trueVerb3p.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'VBZ', locations[loc], verb3pCount[i]))\n\n adverbCount = []\n trueAdverb = []\n\n for x in database.adverb:\n if x in trueAdverb:\n a = trueAdverb.index(x)\n adverbCount[a] += 1\n else:\n trueAdverb.append(x)\n a = trueAdverb.index(x)\n adverbCount.append(1)\n\n for x in trueAdverb:\n i = trueAdverb.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'RB', locations[loc], adverbCount[i]))\n\n uselessCount = []\n trueUseless = []\n\n for x in database.useless:\n if x in trueUseless:\n a = trueUseless.index(x)\n uselessCount[a] += 1\n else:\n trueUseless.append(x)\n a = trueUseless.index(x)\n uselessCount.append(1)\n\n for x in trueUseless:\n i = trueUseless.index(x)\n database.cursor.execute(\n \"INSERT INTO words VALUES (?, ?, ?, ?)\",\n (x, 'PU', locations[loc], uselessCount[i]))\n\n uuWCount = []\n trueuuW = []\n\n for x in database.unUsedWords:\n if x in trueuuW:\n a = trueuuW.index(x)\n uuWCount[a] += 1\n else:\n trueuuW.append(x)\n a = trueuuW.index(x)\n uuWCount.append(1)\n\n for x in trueuuW:\n i = trueuuW.index(x)\n database.cursor.execute(\"INSERT INTO words VALUES (?, ?, ?, ?)\", (x, 'US', locations[loc], uuWCount[i]))\n\n\ndef pop2(): #populate the monster and characters table\n\n####populating the monsters\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Knight','Castle','Old Man Jenkins','Picture')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Vampire' , 'Castle' , 'Andrew the Tour', 'Vampire Make Up and fake blood')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Shadow' , 'Castle' , 'Frank the Janitor' , 'Black paint')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ghost Pirate','Beach','Bill the Lifeguard','Pirate Costume')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Seaweed Monster','Beach','Old Fisherman Joe','Seaweed')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Shark','Beach','The Mayor','Shark fins')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Cowboy Ghost','Ghost Town','Jerry the Businessman ','Cowboy hat')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Miner Ghost','Ghost Town','Gold Hunter Phil','Dusty shoes')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Headless Horse Man','Ghost Town','Envirnmentalist Paddy','Drawing of rig to appear headless')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Francinstein','Haunted House','Sir Godfree','Green paint')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Zombie','Haunted House','The Waiter','Zombie Make Up and fake boy parts')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ghost','Haunted House','Jimmy','Glow in the dark paint on cloths')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ape Man','Jungle','Explorer Fred','Ape Costume')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Animal Ghosts','Jungle','Environmentalist Jennie','Scratch Marks')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Pterodactyl','Jungle','Tour Guide Bill','Book on flight')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Clown Ghost','Carnival','Ring Master','Old Clown Costumes')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Zombie','Carnival','Blind Knife Thrower','Eye tests saying he is not blind')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Animals','Carnival','Worlds Strongest Man','Scratch marks')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Ghost Car','Highway','Old Town Mayor','Car ownership documents')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('White Lady Ghost','Highway','Miss Anderson','White Dress')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Aliens','Highway','Conspiracy Tom','Fake Space ship blueprint')\")\n\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Mummy','Pyramid','Museum Curator Petterson ','Bandages')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Sand Man','Pyramid','Ramesh the Tour Guide','Sand')\")\n database.cursor.execute(\"INSERT INTO monsters VALUES ('Sphynx','Pyramid','Tour Guide Bob','scratch marks')\")\n\n####populating the characters\n\n\n database.cursor.execute(\"INSERT INTO characters VALUES ('Scooby Doo','Scooby Dooby Doo')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Shaggy','Zoinks!')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Fred','Lets Split up and look for clues')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Velma','My glasses. I cant find my glasses')\")\n database.cursor.execute(\"INSERT INTO characters VALUES ('Daphne','Do you want a Scooby Snack')\")\n\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Castle','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Beach','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Ghost Town','Cloudy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Ghost TOwn','Foggy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Haunted House','Stormy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Haunted House','Misty')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Jungle','Raining')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Dark')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Cloudy')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Carnival','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Highway','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Overcast')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Sunny')\")\n database.cursor.execute(\"INSERT INTO location VALUES ('Pyramid','Raining')\")",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
print "test"
print "moreing"
print " a nnnnn"
|
normal
|
{
"blob_id": "551e9c696eaad6c78f2eae66e50cca34c153d9dd",
"index": 4636,
"step-1": "print \"test\"\n\nprint \"moreing\"\n\nprint \" a nnnnn\"",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import math
r = float(input())
p = int(input())
obim = 2 * r * math.pi
ukupanPut = p * obim
# centimetre pretvaramo u metre
ukupanPut = ukupanPut * 0.01
print("%.2f" % ukupanPut)
|
normal
|
{
"blob_id": "1f27b697985c7417e6d8d978703175a415c6c57d",
"index": 327,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('%.2f' % ukupanPut)\n",
"step-3": "<mask token>\nr = float(input())\np = int(input())\nobim = 2 * r * math.pi\nukupanPut = p * obim\nukupanPut = ukupanPut * 0.01\nprint('%.2f' % ukupanPut)\n",
"step-4": "import math\nr = float(input())\np = int(input())\nobim = 2 * r * math.pi\nukupanPut = p * obim\nukupanPut = ukupanPut * 0.01\nprint('%.2f' % ukupanPut)\n",
"step-5": "import math\n\nr = float(input())\np = int(input())\nobim = 2 * r * math.pi\nukupanPut = p * obim\n# centimetre pretvaramo u metre\nukupanPut = ukupanPut * 0.01\nprint(\"%.2f\" % ukupanPut)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import sys
def saludar(saludo):
print saludo
def iniciales(nombre,ape1,ape2):
iniciales=nombre[0]+'.'+ape1[0]+'.'+ape2[0]+'.'
return "Tus iniciales son:"+iniciales.upper()
def iniciales1(nombre,ape1,*apellidos):
iniciales=nombre[0]+'.'+ape1[0]
for ape in apellidos:
iniciales=iniciales+'.'+ape[0]
return iniciales.upper()
|
normal
|
{
"blob_id": "01b615f8282d4d42c5e83181fffc2d7cb612c096",
"index": 704,
"step-1": "import sys \n\n\ndef saludar(saludo):\n\tprint saludo\n\ndef iniciales(nombre,ape1,ape2):\n\tiniciales=nombre[0]+'.'+ape1[0]+'.'+ape2[0]+'.'\n\treturn \"Tus iniciales son:\"+iniciales.upper()\n\n\ndef iniciales1(nombre,ape1,*apellidos):\n\tiniciales=nombre[0]+'.'+ape1[0]\n\tfor ape in apellidos:\n\t\tiniciales=iniciales+'.'+ape[0]\n\treturn iniciales.upper()\n\n\n\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# 5. Усовершенствовать программу «Банковский депозит». Третьим аргументом в функцию должна
# передаваться фиксированная ежемесячная сумма пополнения вклада. Необходимо в главной
# функции реализовать вложенную функцию подсчета процентов для пополняемой суммы.
# Примем, что клиент вносит средства в последний день каждого месяца, кроме первого и
# последнего. Например, при сроке вклада в 6 месяцев пополнение происходит в течение 4
# месяцев. Вложенная функция возвращает сумму дополнительно внесенных средств (с
# процентами), а главная функция — общую сумму по вкладу на конец периода.
from task_1_4 import get_percent
def chargeable_deposit(amount, months, charge=0):
percent = get_percent(amount, months)
if not percent:
print('Нет подходящего тарифа')
total = amount
for month in range(months):
profit = total * percent / 100 / 12
total += profit
if month != 0 and month != months - 1:
total += charge + charge * percent / 100 / 12
print(round(total, 2))
chargeable_deposit(10000, 24, 100)
|
normal
|
{
"blob_id": "bf9e83591f737caec3060b72d86d56faec9bb23b",
"index": 8079,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef chargeable_deposit(amount, months, charge=0):\n percent = get_percent(amount, months)\n if not percent:\n print('Нет подходящего тарифа')\n total = amount\n for month in range(months):\n profit = total * percent / 100 / 12\n total += profit\n if month != 0 and month != months - 1:\n total += charge + charge * percent / 100 / 12\n print(round(total, 2))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef chargeable_deposit(amount, months, charge=0):\n percent = get_percent(amount, months)\n if not percent:\n print('Нет подходящего тарифа')\n total = amount\n for month in range(months):\n profit = total * percent / 100 / 12\n total += profit\n if month != 0 and month != months - 1:\n total += charge + charge * percent / 100 / 12\n print(round(total, 2))\n\n\nchargeable_deposit(10000, 24, 100)\n",
"step-4": "from task_1_4 import get_percent\n\n\ndef chargeable_deposit(amount, months, charge=0):\n percent = get_percent(amount, months)\n if not percent:\n print('Нет подходящего тарифа')\n total = amount\n for month in range(months):\n profit = total * percent / 100 / 12\n total += profit\n if month != 0 and month != months - 1:\n total += charge + charge * percent / 100 / 12\n print(round(total, 2))\n\n\nchargeable_deposit(10000, 24, 100)\n",
"step-5": "# 5. Усовершенствовать программу «Банковский депозит». Третьим аргументом в функцию должна\r\n# передаваться фиксированная ежемесячная сумма пополнения вклада. Необходимо в главной\r\n# функции реализовать вложенную функцию подсчета процентов для пополняемой суммы.\r\n# Примем, что клиент вносит средства в последний день каждого месяца, кроме первого и\r\n# последнего. Например, при сроке вклада в 6 месяцев пополнение происходит в течение 4\r\n# месяцев. Вложенная функция возвращает сумму дополнительно внесенных средств (с\r\n# процентами), а главная функция — общую сумму по вкладу на конец периода.\r\n\r\nfrom task_1_4 import get_percent\r\n\r\n\r\ndef chargeable_deposit(amount, months, charge=0):\r\n percent = get_percent(amount, months)\r\n if not percent:\r\n print('Нет подходящего тарифа')\r\n\r\n total = amount\r\n for month in range(months):\r\n profit = total * percent / 100 / 12\r\n total += profit\r\n if month != 0 and month != months - 1:\r\n total += charge + charge * percent / 100 / 12\r\n\r\n print(round(total, 2))\r\n\r\n\r\nchargeable_deposit(10000, 24, 100)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import logging
from bson import ObjectId
from typing import Union
from app.helper import parseControllerResponse
from models.members import Member
from schema.members import (
CreateMemberSchema,
MemberInDBSchema,
UpdateMemberSchema,
memberHelper,
)
def getAllMembersFromDB(**kwargs):
"""Finds and returns all the registered members"""
isResponseParsed = kwargs.get("isParsed", False)
logging.info("Trying to find all the users")
try:
rawMembersData = Member.objects()
parsedMembers = [
MemberInDBSchema(**memberHelper(rawMember)) for rawMember in rawMembersData
]
logging.info("Found all the users")
if not isResponseParsed:
return parsedMembers
resp = [
parsedMember.dict(exclude={"mongoDocument"})
for parsedMember in parsedMembers
]
return parseControllerResponse(
data=resp, statuscode=200, message="Successfully found the users"
)
except Exception as e:
helpfulErrorMessage = "Couldn't find all the users due to " + e
logging.error(helpfulErrorMessage)
if isResponseParsed:
return parseControllerResponse(
statuscode=500,
message="Something went wrong, try again later",
error=helpfulErrorMessage,
)
raise helpfulErrorMessage
def getMemberFromDiscordHandle(discordHandle: str):
"""Finds and returns the user with the given discord handle, if
such a user doesn't exist, return None"""
try:
member_ = Member.objects(discordHandle=discordHandle).first()
assert member_
member = MemberInDBSchema(**memberHelper(member_))
return member
except AssertionError as _:
# if the member is not found, raise a ValueError
return None
except Exception as e:
raise Exception(
"Couldn't find a user with the discord handle \
{}, due to {}".format(
discordHandle, e
)
)
def getMemberFromRollNumber(rollNumber: int, **kwargs):
"""Finds and returns the user with the given roll number, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get("isParsed", False)
rawData = kwargs.get("rawData", False)
try:
user = Member.objects(rollno=rollNumber).first()
assert user
user = Member.objects(id=id).first()
assert user
logging.debug(
"Found a user {}, with the rollno={}".format(memberHelper(user), rollNumber)
)
logging.info("Found the user with rollNumber =" + rollNumber)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(
data=(MemberInDBSchema(**memberHelper(user))).dict(
exclude={"mongoDocument"}
),
statuscode=200,
message="Successfully found the user",
)
except AssertionError as _:
# user was not found, return none or parsed response
# ! its the person who called this func's responsibility to create an error
logging.info("A user with roll numer={} does not exist".format(rollNumber))
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=404,
message="User not found",
error="A user with rollnumber={} does not exist".format(rollNumber),
)
return None
except Exception as e:
helpfulErrorMsg = f"Couldn't find a user with the {rollNumber = }, due to {e}"
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=500,
message="Something went wrong, try again later.",
error=helpfulErrorMsg,
)
raise helpfulErrorMsg
def getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):
"""Finds and returns the user with the given id, if
such a user doesn't exist, return None"""
isResponseParsed = kwargs.get("isParsed", False)
rawData = kwargs.get("rawData", False)
logging.info("Trying to find the user with the id=" + id)
try:
user = Member.objects(id=id).first()
assert user
logging.debug("Found a user {}, with the id={}".format(memberHelper(user), id))
logging.info("Found the user with id=" + id)
if not isResponseParsed:
return user if rawData else MemberInDBSchema(**memberHelper(user))
return parseControllerResponse(
data=(MemberInDBSchema(**memberHelper(user))).dict(
exclude={"mongoDocument"}
),
statuscode=200,
message="Successfully found the user",
)
except AssertionError as _:
# user was not found, return none or parsed response
logging.info("A user with id={} does not exist".format(id))
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=404,
message="User not found",
error="A user with id={} does not exist".format(id),
)
return None
except Exception as e:
helpfulErrorMsg = "Couldn't find a user with the userId {}, due to {}".format(
id, e
)
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=500,
message="Something went wrong, try again later.",
error=helpfulErrorMsg,
)
raise helpfulErrorMsg
def updateMemberWithGivenDetails(
data: UpdateMemberSchema, userId: Union[ObjectId, str], **kwargs
):
"""Finds the user with the given data, and updates their details,
raises an error if the roll number is different"""
isResponseParsed = kwargs.get("isParsed", False)
try:
user: Member = getMemberWithGivenId(id=userId, rawData=True)
assert user, "Not Found"
# A user cannot change roll number after creating a doc
assert user.rollno == data.rollno, "Roll Number Mismatch"
user.name = data.name if data.name else user.name
user.discordHandle = (
data.discordHandle if data.discordHandle else user.discordHandle
)
user.batch = data.batch if data.batch else user.batch
if data.password:
user.password = CreateMemberSchema.hashGivenText(data.password)
user.save()
logging.info("successfully updated user data")
if isResponseParsed:
return parseControllerResponse(
data=(MemberInDBSchema(**memberHelper(user))).dict(
exclude={"mongoDocument"}
),
statuscode=200,
message="Successfully updated user details",
)
return True
except AssertionError as err:
if err == "Not Found":
helpfulErrorMsg = f"A user with {userId = } doesn't exist"
logging.warn(helpfulErrorMsg)
if not isResponseParsed:
return None
return parseControllerResponse(
data=None,
statuscode=400,
message=helpfulErrorMsg,
error=helpfulErrorMsg,
)
if err == "Roll Number Mismatch":
helpfulErrorMsg = (
f"You cannot change a user's roll number after creating it."
)
if not isResponseParsed:
return None
return parseControllerResponse(
data=None,
statuscode=400,
message=helpfulErrorMsg,
error=helpfulErrorMsg,
)
except Exception as e:
helpfulErrorMsg = f"Couldn't update user={data.dict()} data, because {e=}"
logging.error(helpfulErrorMsg)
if isResponseParsed:
return parseControllerResponse(
data=None,
statuscode=500,
message="Something went wrong, try again later.",
error=helpfulErrorMsg,
)
raise helpfulErrorMsg
|
normal
|
{
"blob_id": "95f9e9a8f681679f56c3755199fba7d654af85e8",
"index": 1937,
"step-1": "<mask token>\n\n\ndef getAllMembersFromDB(**kwargs):\n \"\"\"Finds and returns all the registered members\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n logging.info('Trying to find all the users')\n try:\n rawMembersData = Member.objects()\n parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for\n rawMember in rawMembersData]\n logging.info('Found all the users')\n if not isResponseParsed:\n return parsedMembers\n resp = [parsedMember.dict(exclude={'mongoDocument'}) for\n parsedMember in parsedMembers]\n return parseControllerResponse(data=resp, statuscode=200, message=\n 'Successfully found the users')\n except Exception as e:\n helpfulErrorMessage = \"Couldn't find all the users due to \" + e\n logging.error(helpfulErrorMessage)\n if isResponseParsed:\n return parseControllerResponse(statuscode=500, message=\n 'Something went wrong, try again later', error=\n helpfulErrorMessage)\n raise helpfulErrorMessage\n\n\ndef getMemberFromDiscordHandle(discordHandle: str):\n \"\"\"Finds and returns the user with the given discord handle, if\n such a user doesn't exist, return None\"\"\"\n try:\n member_ = Member.objects(discordHandle=discordHandle).first()\n assert member_\n member = MemberInDBSchema(**memberHelper(member_))\n return member\n except AssertionError as _:\n return None\n except Exception as e:\n raise Exception(\n \"Couldn't find a user with the discord handle {}, due to {}\"\n .format(discordHandle, e))\n\n\n<mask token>\n\n\ndef getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):\n \"\"\"Finds and returns the user with the given id, if\n such a user doesn't exist, return None\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n rawData = kwargs.get('rawData', False)\n logging.info('Trying to find the user with the id=' + id)\n try:\n user = Member.objects(id=id).first()\n assert user\n logging.debug('Found a user {}, with the id={}'.format(memberHelper\n (user), id))\n logging.info('Found the user with id=' + id)\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n return parseControllerResponse(data=MemberInDBSchema(**memberHelper\n (user)).dict(exclude={'mongoDocument'}), statuscode=200,\n message='Successfully found the user')\n except AssertionError as _:\n logging.info('A user with id={} does not exist'.format(id))\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=404,\n message='User not found', error=\n 'A user with id={} does not exist'.format(id))\n return None\n except Exception as e:\n helpfulErrorMsg = (\"Couldn't find a user with the userId {}, due to {}\"\n .format(id, e))\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef getAllMembersFromDB(**kwargs):\n \"\"\"Finds and returns all the registered members\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n logging.info('Trying to find all the users')\n try:\n rawMembersData = Member.objects()\n parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for\n rawMember in rawMembersData]\n logging.info('Found all the users')\n if not isResponseParsed:\n return parsedMembers\n resp = [parsedMember.dict(exclude={'mongoDocument'}) for\n parsedMember in parsedMembers]\n return parseControllerResponse(data=resp, statuscode=200, message=\n 'Successfully found the users')\n except Exception as e:\n helpfulErrorMessage = \"Couldn't find all the users due to \" + e\n logging.error(helpfulErrorMessage)\n if isResponseParsed:\n return parseControllerResponse(statuscode=500, message=\n 'Something went wrong, try again later', error=\n helpfulErrorMessage)\n raise helpfulErrorMessage\n\n\ndef getMemberFromDiscordHandle(discordHandle: str):\n \"\"\"Finds and returns the user with the given discord handle, if\n such a user doesn't exist, return None\"\"\"\n try:\n member_ = Member.objects(discordHandle=discordHandle).first()\n assert member_\n member = MemberInDBSchema(**memberHelper(member_))\n return member\n except AssertionError as _:\n return None\n except Exception as e:\n raise Exception(\n \"Couldn't find a user with the discord handle {}, due to {}\"\n .format(discordHandle, e))\n\n\ndef getMemberFromRollNumber(rollNumber: int, **kwargs):\n \"\"\"Finds and returns the user with the given roll number, if\n such a user doesn't exist, return None\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n rawData = kwargs.get('rawData', False)\n try:\n user = Member.objects(rollno=rollNumber).first()\n assert user\n user = Member.objects(id=id).first()\n assert user\n logging.debug('Found a user {}, with the rollno={}'.format(\n memberHelper(user), rollNumber))\n logging.info('Found the user with rollNumber =' + rollNumber)\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n return parseControllerResponse(data=MemberInDBSchema(**memberHelper\n (user)).dict(exclude={'mongoDocument'}), statuscode=200,\n message='Successfully found the user')\n except AssertionError as _:\n logging.info('A user with roll numer={} does not exist'.format(\n rollNumber))\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=404,\n message='User not found', error=\n 'A user with rollnumber={} does not exist'.format(rollNumber))\n return None\n except Exception as e:\n helpfulErrorMsg = (\n f\"Couldn't find a user with the rollNumber = {rollNumber!r}, due to {e}\"\n )\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n\n\ndef getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):\n \"\"\"Finds and returns the user with the given id, if\n such a user doesn't exist, return None\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n rawData = kwargs.get('rawData', False)\n logging.info('Trying to find the user with the id=' + id)\n try:\n user = Member.objects(id=id).first()\n assert user\n logging.debug('Found a user {}, with the id={}'.format(memberHelper\n (user), id))\n logging.info('Found the user with id=' + id)\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n return parseControllerResponse(data=MemberInDBSchema(**memberHelper\n (user)).dict(exclude={'mongoDocument'}), statuscode=200,\n message='Successfully found the user')\n except AssertionError as _:\n logging.info('A user with id={} does not exist'.format(id))\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=404,\n message='User not found', error=\n 'A user with id={} does not exist'.format(id))\n return None\n except Exception as e:\n helpfulErrorMsg = (\"Couldn't find a user with the userId {}, due to {}\"\n .format(id, e))\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef getAllMembersFromDB(**kwargs):\n \"\"\"Finds and returns all the registered members\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n logging.info('Trying to find all the users')\n try:\n rawMembersData = Member.objects()\n parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for\n rawMember in rawMembersData]\n logging.info('Found all the users')\n if not isResponseParsed:\n return parsedMembers\n resp = [parsedMember.dict(exclude={'mongoDocument'}) for\n parsedMember in parsedMembers]\n return parseControllerResponse(data=resp, statuscode=200, message=\n 'Successfully found the users')\n except Exception as e:\n helpfulErrorMessage = \"Couldn't find all the users due to \" + e\n logging.error(helpfulErrorMessage)\n if isResponseParsed:\n return parseControllerResponse(statuscode=500, message=\n 'Something went wrong, try again later', error=\n helpfulErrorMessage)\n raise helpfulErrorMessage\n\n\ndef getMemberFromDiscordHandle(discordHandle: str):\n \"\"\"Finds and returns the user with the given discord handle, if\n such a user doesn't exist, return None\"\"\"\n try:\n member_ = Member.objects(discordHandle=discordHandle).first()\n assert member_\n member = MemberInDBSchema(**memberHelper(member_))\n return member\n except AssertionError as _:\n return None\n except Exception as e:\n raise Exception(\n \"Couldn't find a user with the discord handle {}, due to {}\"\n .format(discordHandle, e))\n\n\ndef getMemberFromRollNumber(rollNumber: int, **kwargs):\n \"\"\"Finds and returns the user with the given roll number, if\n such a user doesn't exist, return None\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n rawData = kwargs.get('rawData', False)\n try:\n user = Member.objects(rollno=rollNumber).first()\n assert user\n user = Member.objects(id=id).first()\n assert user\n logging.debug('Found a user {}, with the rollno={}'.format(\n memberHelper(user), rollNumber))\n logging.info('Found the user with rollNumber =' + rollNumber)\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n return parseControllerResponse(data=MemberInDBSchema(**memberHelper\n (user)).dict(exclude={'mongoDocument'}), statuscode=200,\n message='Successfully found the user')\n except AssertionError as _:\n logging.info('A user with roll numer={} does not exist'.format(\n rollNumber))\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=404,\n message='User not found', error=\n 'A user with rollnumber={} does not exist'.format(rollNumber))\n return None\n except Exception as e:\n helpfulErrorMsg = (\n f\"Couldn't find a user with the rollNumber = {rollNumber!r}, due to {e}\"\n )\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n\n\ndef getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):\n \"\"\"Finds and returns the user with the given id, if\n such a user doesn't exist, return None\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n rawData = kwargs.get('rawData', False)\n logging.info('Trying to find the user with the id=' + id)\n try:\n user = Member.objects(id=id).first()\n assert user\n logging.debug('Found a user {}, with the id={}'.format(memberHelper\n (user), id))\n logging.info('Found the user with id=' + id)\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n return parseControllerResponse(data=MemberInDBSchema(**memberHelper\n (user)).dict(exclude={'mongoDocument'}), statuscode=200,\n message='Successfully found the user')\n except AssertionError as _:\n logging.info('A user with id={} does not exist'.format(id))\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=404,\n message='User not found', error=\n 'A user with id={} does not exist'.format(id))\n return None\n except Exception as e:\n helpfulErrorMsg = (\"Couldn't find a user with the userId {}, due to {}\"\n .format(id, e))\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n\n\ndef updateMemberWithGivenDetails(data: UpdateMemberSchema, userId: Union[\n ObjectId, str], **kwargs):\n \"\"\"Finds the user with the given data, and updates their details,\n raises an error if the roll number is different\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n try:\n user: Member = getMemberWithGivenId(id=userId, rawData=True)\n assert user, 'Not Found'\n assert user.rollno == data.rollno, 'Roll Number Mismatch'\n user.name = data.name if data.name else user.name\n user.discordHandle = (data.discordHandle if data.discordHandle else\n user.discordHandle)\n user.batch = data.batch if data.batch else user.batch\n if data.password:\n user.password = CreateMemberSchema.hashGivenText(data.password)\n user.save()\n logging.info('successfully updated user data')\n if isResponseParsed:\n return parseControllerResponse(data=MemberInDBSchema(**\n memberHelper(user)).dict(exclude={'mongoDocument'}),\n statuscode=200, message='Successfully updated user details')\n return True\n except AssertionError as err:\n if err == 'Not Found':\n helpfulErrorMsg = f\"A user with userId = {userId!r} doesn't exist\"\n logging.warn(helpfulErrorMsg)\n if not isResponseParsed:\n return None\n return parseControllerResponse(data=None, statuscode=400,\n message=helpfulErrorMsg, error=helpfulErrorMsg)\n if err == 'Roll Number Mismatch':\n helpfulErrorMsg = (\n f\"You cannot change a user's roll number after creating it.\")\n if not isResponseParsed:\n return None\n return parseControllerResponse(data=None, statuscode=400,\n message=helpfulErrorMsg, error=helpfulErrorMsg)\n except Exception as e:\n helpfulErrorMsg = (\n f\"Couldn't update user={data.dict()} data, because e={e!r}\")\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n",
"step-4": "import logging\nfrom bson import ObjectId\nfrom typing import Union\nfrom app.helper import parseControllerResponse\nfrom models.members import Member\nfrom schema.members import CreateMemberSchema, MemberInDBSchema, UpdateMemberSchema, memberHelper\n\n\ndef getAllMembersFromDB(**kwargs):\n \"\"\"Finds and returns all the registered members\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n logging.info('Trying to find all the users')\n try:\n rawMembersData = Member.objects()\n parsedMembers = [MemberInDBSchema(**memberHelper(rawMember)) for\n rawMember in rawMembersData]\n logging.info('Found all the users')\n if not isResponseParsed:\n return parsedMembers\n resp = [parsedMember.dict(exclude={'mongoDocument'}) for\n parsedMember in parsedMembers]\n return parseControllerResponse(data=resp, statuscode=200, message=\n 'Successfully found the users')\n except Exception as e:\n helpfulErrorMessage = \"Couldn't find all the users due to \" + e\n logging.error(helpfulErrorMessage)\n if isResponseParsed:\n return parseControllerResponse(statuscode=500, message=\n 'Something went wrong, try again later', error=\n helpfulErrorMessage)\n raise helpfulErrorMessage\n\n\ndef getMemberFromDiscordHandle(discordHandle: str):\n \"\"\"Finds and returns the user with the given discord handle, if\n such a user doesn't exist, return None\"\"\"\n try:\n member_ = Member.objects(discordHandle=discordHandle).first()\n assert member_\n member = MemberInDBSchema(**memberHelper(member_))\n return member\n except AssertionError as _:\n return None\n except Exception as e:\n raise Exception(\n \"Couldn't find a user with the discord handle {}, due to {}\"\n .format(discordHandle, e))\n\n\ndef getMemberFromRollNumber(rollNumber: int, **kwargs):\n \"\"\"Finds and returns the user with the given roll number, if\n such a user doesn't exist, return None\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n rawData = kwargs.get('rawData', False)\n try:\n user = Member.objects(rollno=rollNumber).first()\n assert user\n user = Member.objects(id=id).first()\n assert user\n logging.debug('Found a user {}, with the rollno={}'.format(\n memberHelper(user), rollNumber))\n logging.info('Found the user with rollNumber =' + rollNumber)\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n return parseControllerResponse(data=MemberInDBSchema(**memberHelper\n (user)).dict(exclude={'mongoDocument'}), statuscode=200,\n message='Successfully found the user')\n except AssertionError as _:\n logging.info('A user with roll numer={} does not exist'.format(\n rollNumber))\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=404,\n message='User not found', error=\n 'A user with rollnumber={} does not exist'.format(rollNumber))\n return None\n except Exception as e:\n helpfulErrorMsg = (\n f\"Couldn't find a user with the rollNumber = {rollNumber!r}, due to {e}\"\n )\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n\n\ndef getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):\n \"\"\"Finds and returns the user with the given id, if\n such a user doesn't exist, return None\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n rawData = kwargs.get('rawData', False)\n logging.info('Trying to find the user with the id=' + id)\n try:\n user = Member.objects(id=id).first()\n assert user\n logging.debug('Found a user {}, with the id={}'.format(memberHelper\n (user), id))\n logging.info('Found the user with id=' + id)\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n return parseControllerResponse(data=MemberInDBSchema(**memberHelper\n (user)).dict(exclude={'mongoDocument'}), statuscode=200,\n message='Successfully found the user')\n except AssertionError as _:\n logging.info('A user with id={} does not exist'.format(id))\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=404,\n message='User not found', error=\n 'A user with id={} does not exist'.format(id))\n return None\n except Exception as e:\n helpfulErrorMsg = (\"Couldn't find a user with the userId {}, due to {}\"\n .format(id, e))\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n\n\ndef updateMemberWithGivenDetails(data: UpdateMemberSchema, userId: Union[\n ObjectId, str], **kwargs):\n \"\"\"Finds the user with the given data, and updates their details,\n raises an error if the roll number is different\"\"\"\n isResponseParsed = kwargs.get('isParsed', False)\n try:\n user: Member = getMemberWithGivenId(id=userId, rawData=True)\n assert user, 'Not Found'\n assert user.rollno == data.rollno, 'Roll Number Mismatch'\n user.name = data.name if data.name else user.name\n user.discordHandle = (data.discordHandle if data.discordHandle else\n user.discordHandle)\n user.batch = data.batch if data.batch else user.batch\n if data.password:\n user.password = CreateMemberSchema.hashGivenText(data.password)\n user.save()\n logging.info('successfully updated user data')\n if isResponseParsed:\n return parseControllerResponse(data=MemberInDBSchema(**\n memberHelper(user)).dict(exclude={'mongoDocument'}),\n statuscode=200, message='Successfully updated user details')\n return True\n except AssertionError as err:\n if err == 'Not Found':\n helpfulErrorMsg = f\"A user with userId = {userId!r} doesn't exist\"\n logging.warn(helpfulErrorMsg)\n if not isResponseParsed:\n return None\n return parseControllerResponse(data=None, statuscode=400,\n message=helpfulErrorMsg, error=helpfulErrorMsg)\n if err == 'Roll Number Mismatch':\n helpfulErrorMsg = (\n f\"You cannot change a user's roll number after creating it.\")\n if not isResponseParsed:\n return None\n return parseControllerResponse(data=None, statuscode=400,\n message=helpfulErrorMsg, error=helpfulErrorMsg)\n except Exception as e:\n helpfulErrorMsg = (\n f\"Couldn't update user={data.dict()} data, because e={e!r}\")\n logging.error(helpfulErrorMsg)\n if isResponseParsed:\n return parseControllerResponse(data=None, statuscode=500,\n message='Something went wrong, try again later.', error=\n helpfulErrorMsg)\n raise helpfulErrorMsg\n",
"step-5": "import logging\nfrom bson import ObjectId\nfrom typing import Union\n\nfrom app.helper import parseControllerResponse\n\nfrom models.members import Member\nfrom schema.members import (\n CreateMemberSchema,\n MemberInDBSchema,\n UpdateMemberSchema,\n memberHelper,\n)\n\n\ndef getAllMembersFromDB(**kwargs):\n \"\"\"Finds and returns all the registered members\"\"\"\n\n isResponseParsed = kwargs.get(\"isParsed\", False)\n logging.info(\"Trying to find all the users\")\n\n try:\n rawMembersData = Member.objects()\n\n parsedMembers = [\n MemberInDBSchema(**memberHelper(rawMember)) for rawMember in rawMembersData\n ]\n\n logging.info(\"Found all the users\")\n if not isResponseParsed:\n return parsedMembers\n\n resp = [\n parsedMember.dict(exclude={\"mongoDocument\"})\n for parsedMember in parsedMembers\n ]\n return parseControllerResponse(\n data=resp, statuscode=200, message=\"Successfully found the users\"\n )\n\n except Exception as e:\n helpfulErrorMessage = \"Couldn't find all the users due to \" + e\n\n logging.error(helpfulErrorMessage)\n if isResponseParsed:\n return parseControllerResponse(\n statuscode=500,\n message=\"Something went wrong, try again later\",\n error=helpfulErrorMessage,\n )\n raise helpfulErrorMessage\n\n\ndef getMemberFromDiscordHandle(discordHandle: str):\n \"\"\"Finds and returns the user with the given discord handle, if\n such a user doesn't exist, return None\"\"\"\n try:\n member_ = Member.objects(discordHandle=discordHandle).first()\n assert member_\n member = MemberInDBSchema(**memberHelper(member_))\n return member\n except AssertionError as _:\n # if the member is not found, raise a ValueError\n return None\n except Exception as e:\n raise Exception(\n \"Couldn't find a user with the discord handle \\\n {}, due to {}\".format(\n discordHandle, e\n )\n )\n\n\ndef getMemberFromRollNumber(rollNumber: int, **kwargs):\n \"\"\"Finds and returns the user with the given roll number, if\n such a user doesn't exist, return None\"\"\"\n\n isResponseParsed = kwargs.get(\"isParsed\", False)\n rawData = kwargs.get(\"rawData\", False)\n\n try:\n user = Member.objects(rollno=rollNumber).first()\n assert user\n\n user = Member.objects(id=id).first()\n\n assert user\n\n logging.debug(\n \"Found a user {}, with the rollno={}\".format(memberHelper(user), rollNumber)\n )\n logging.info(\"Found the user with rollNumber =\" + rollNumber)\n\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n\n return parseControllerResponse(\n data=(MemberInDBSchema(**memberHelper(user))).dict(\n exclude={\"mongoDocument\"}\n ),\n statuscode=200,\n message=\"Successfully found the user\",\n )\n\n except AssertionError as _:\n # user was not found, return none or parsed response\n # ! its the person who called this func's responsibility to create an error\n logging.info(\"A user with roll numer={} does not exist\".format(rollNumber))\n\n if isResponseParsed:\n return parseControllerResponse(\n data=None,\n statuscode=404,\n message=\"User not found\",\n error=\"A user with rollnumber={} does not exist\".format(rollNumber),\n )\n return None\n except Exception as e:\n helpfulErrorMsg = f\"Couldn't find a user with the {rollNumber = }, due to {e}\"\n\n logging.error(helpfulErrorMsg)\n\n if isResponseParsed:\n return parseControllerResponse(\n data=None,\n statuscode=500,\n message=\"Something went wrong, try again later.\",\n error=helpfulErrorMsg,\n )\n raise helpfulErrorMsg\n\n\ndef getMemberWithGivenId(id: Union[str, ObjectId], **kwargs):\n \"\"\"Finds and returns the user with the given id, if\n such a user doesn't exist, return None\"\"\"\n\n isResponseParsed = kwargs.get(\"isParsed\", False)\n rawData = kwargs.get(\"rawData\", False)\n\n logging.info(\"Trying to find the user with the id=\" + id)\n try:\n\n user = Member.objects(id=id).first()\n\n assert user\n\n logging.debug(\"Found a user {}, with the id={}\".format(memberHelper(user), id))\n logging.info(\"Found the user with id=\" + id)\n\n if not isResponseParsed:\n return user if rawData else MemberInDBSchema(**memberHelper(user))\n\n return parseControllerResponse(\n data=(MemberInDBSchema(**memberHelper(user))).dict(\n exclude={\"mongoDocument\"}\n ),\n statuscode=200,\n message=\"Successfully found the user\",\n )\n\n except AssertionError as _:\n # user was not found, return none or parsed response\n logging.info(\"A user with id={} does not exist\".format(id))\n\n if isResponseParsed:\n return parseControllerResponse(\n data=None,\n statuscode=404,\n message=\"User not found\",\n error=\"A user with id={} does not exist\".format(id),\n )\n return None\n\n except Exception as e:\n helpfulErrorMsg = \"Couldn't find a user with the userId {}, due to {}\".format(\n id, e\n )\n logging.error(helpfulErrorMsg)\n\n if isResponseParsed:\n return parseControllerResponse(\n data=None,\n statuscode=500,\n message=\"Something went wrong, try again later.\",\n error=helpfulErrorMsg,\n )\n raise helpfulErrorMsg\n\n\ndef updateMemberWithGivenDetails(\n data: UpdateMemberSchema, userId: Union[ObjectId, str], **kwargs\n):\n \"\"\"Finds the user with the given data, and updates their details,\n raises an error if the roll number is different\"\"\"\n\n isResponseParsed = kwargs.get(\"isParsed\", False)\n\n try:\n user: Member = getMemberWithGivenId(id=userId, rawData=True)\n\n assert user, \"Not Found\"\n\n # A user cannot change roll number after creating a doc\n assert user.rollno == data.rollno, \"Roll Number Mismatch\"\n\n user.name = data.name if data.name else user.name\n user.discordHandle = (\n data.discordHandle if data.discordHandle else user.discordHandle\n )\n user.batch = data.batch if data.batch else user.batch\n\n if data.password:\n user.password = CreateMemberSchema.hashGivenText(data.password)\n\n user.save()\n\n logging.info(\"successfully updated user data\")\n\n if isResponseParsed:\n return parseControllerResponse(\n data=(MemberInDBSchema(**memberHelper(user))).dict(\n exclude={\"mongoDocument\"}\n ),\n statuscode=200,\n message=\"Successfully updated user details\",\n )\n\n return True\n\n except AssertionError as err:\n if err == \"Not Found\":\n helpfulErrorMsg = f\"A user with {userId = } doesn't exist\"\n logging.warn(helpfulErrorMsg)\n if not isResponseParsed:\n return None\n return parseControllerResponse(\n data=None,\n statuscode=400,\n message=helpfulErrorMsg,\n error=helpfulErrorMsg,\n )\n if err == \"Roll Number Mismatch\":\n helpfulErrorMsg = (\n f\"You cannot change a user's roll number after creating it.\"\n )\n if not isResponseParsed:\n return None\n return parseControllerResponse(\n data=None,\n statuscode=400,\n message=helpfulErrorMsg,\n error=helpfulErrorMsg,\n )\n\n except Exception as e:\n helpfulErrorMsg = f\"Couldn't update user={data.dict()} data, because {e=}\"\n\n logging.error(helpfulErrorMsg)\n\n if isResponseParsed:\n return parseControllerResponse(\n data=None,\n statuscode=500,\n message=\"Something went wrong, try again later.\",\n error=helpfulErrorMsg,\n )\n raise helpfulErrorMsg\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import caffe
import numpy as np
class PyLayer(caffe.Layer):
def setup(self, bottom, top):
if len(bottom) != 2:
raise Exception("Need two inputs to compute distance")
def reshape(self, bottom, top):
if bottom[0].count != bottom[1].count:
raise Exception("Inputs must have the same dimension")
self.diff = np.zeros(bottom[0].data.shape, dtype=np.float32)
top[0].reshape(1)
def forward(self, bottom, top):
self.diff[...] = bottom[0].data - bottom[1].data
top[0].data[...] = np.sum(self.diff ** 2) * (0.5 / bottom[0].num)
def backward(self, top, propagate_down, bottom):
for i in range(2):
if not propagate_down[i]:
continue
if i == 0:
bottom[i].diff[...] = self.diff * (1 / bottom[i].num)
else:
bottom[i].diff[...] = self.diff * (-1 / bottom[i].num)
|
normal
|
{
"blob_id": "8040b47dc3fd6b03432f64d7fb8a4267cc94ac9a",
"index": 2698,
"step-1": "<mask token>\n\n\nclass PyLayer(caffe.Layer):\n\n def setup(self, bottom, top):\n if len(bottom) != 2:\n raise Exception('Need two inputs to compute distance')\n\n def reshape(self, bottom, top):\n if bottom[0].count != bottom[1].count:\n raise Exception('Inputs must have the same dimension')\n self.diff = np.zeros(bottom[0].data.shape, dtype=np.float32)\n top[0].reshape(1)\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass PyLayer(caffe.Layer):\n\n def setup(self, bottom, top):\n if len(bottom) != 2:\n raise Exception('Need two inputs to compute distance')\n\n def reshape(self, bottom, top):\n if bottom[0].count != bottom[1].count:\n raise Exception('Inputs must have the same dimension')\n self.diff = np.zeros(bottom[0].data.shape, dtype=np.float32)\n top[0].reshape(1)\n <mask token>\n\n def backward(self, top, propagate_down, bottom):\n for i in range(2):\n if not propagate_down[i]:\n continue\n if i == 0:\n bottom[i].diff[...] = self.diff * (1 / bottom[i].num)\n else:\n bottom[i].diff[...] = self.diff * (-1 / bottom[i].num)\n",
"step-3": "<mask token>\n\n\nclass PyLayer(caffe.Layer):\n\n def setup(self, bottom, top):\n if len(bottom) != 2:\n raise Exception('Need two inputs to compute distance')\n\n def reshape(self, bottom, top):\n if bottom[0].count != bottom[1].count:\n raise Exception('Inputs must have the same dimension')\n self.diff = np.zeros(bottom[0].data.shape, dtype=np.float32)\n top[0].reshape(1)\n\n def forward(self, bottom, top):\n self.diff[...] = bottom[0].data - bottom[1].data\n top[0].data[...] = np.sum(self.diff ** 2) * (0.5 / bottom[0].num)\n\n def backward(self, top, propagate_down, bottom):\n for i in range(2):\n if not propagate_down[i]:\n continue\n if i == 0:\n bottom[i].diff[...] = self.diff * (1 / bottom[i].num)\n else:\n bottom[i].diff[...] = self.diff * (-1 / bottom[i].num)\n",
"step-4": "import caffe\nimport numpy as np\n\n\nclass PyLayer(caffe.Layer):\n\n def setup(self, bottom, top):\n if len(bottom) != 2:\n raise Exception('Need two inputs to compute distance')\n\n def reshape(self, bottom, top):\n if bottom[0].count != bottom[1].count:\n raise Exception('Inputs must have the same dimension')\n self.diff = np.zeros(bottom[0].data.shape, dtype=np.float32)\n top[0].reshape(1)\n\n def forward(self, bottom, top):\n self.diff[...] = bottom[0].data - bottom[1].data\n top[0].data[...] = np.sum(self.diff ** 2) * (0.5 / bottom[0].num)\n\n def backward(self, top, propagate_down, bottom):\n for i in range(2):\n if not propagate_down[i]:\n continue\n if i == 0:\n bottom[i].diff[...] = self.diff * (1 / bottom[i].num)\n else:\n bottom[i].diff[...] = self.diff * (-1 / bottom[i].num)\n",
"step-5": "import caffe\nimport numpy as np\n\nclass PyLayer(caffe.Layer):\n def setup(self, bottom, top):\n if len(bottom) != 2:\n raise Exception(\"Need two inputs to compute distance\")\n\n def reshape(self, bottom, top):\n if bottom[0].count != bottom[1].count:\n raise Exception(\"Inputs must have the same dimension\")\n self.diff = np.zeros(bottom[0].data.shape, dtype=np.float32)\n top[0].reshape(1)\n\n def forward(self, bottom, top):\n self.diff[...] = bottom[0].data - bottom[1].data\n top[0].data[...] = np.sum(self.diff ** 2) * (0.5 / bottom[0].num)\n\n def backward(self, top, propagate_down, bottom):\n for i in range(2):\n if not propagate_down[i]:\n continue\n if i == 0:\n bottom[i].diff[...] = self.diff * (1 / bottom[i].num)\n else:\n bottom[i].diff[...] = self.diff * (-1 / bottom[i].num)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from __future__ import annotations
import logging
import os
import sys
from argparse import Namespace
from pathlib import Path
from uuid import uuid4
import pytest
from virtualenv.discovery.builtin import Builtin, get_interpreter
from virtualenv.discovery.py_info import PythonInfo
from virtualenv.info import fs_supports_symlink
@pytest.mark.skipif(not fs_supports_symlink(), reason="symlink not supported")
@pytest.mark.parametrize("case", ["mixed", "lower", "upper"])
def test_discovery_via_path(monkeypatch, case, tmp_path, caplog, session_app_data):
caplog.set_level(logging.DEBUG)
current = PythonInfo.current_system(session_app_data)
core = f"somethingVeryCryptic{'.'.join(str(i) for i in current.version_info[0:3])}"
name = "somethingVeryCryptic"
if case == "lower":
name = name.lower()
elif case == "upper":
name = name.upper()
exe_name = f"{name}{current.version_info.major}{'.exe' if sys.platform == 'win32' else ''}"
target = tmp_path / current.install_path("scripts")
target.mkdir(parents=True)
executable = target / exe_name
os.symlink(sys.executable, str(executable))
pyvenv_cfg = Path(sys.executable).parents[1] / "pyvenv.cfg"
if pyvenv_cfg.exists():
(target / pyvenv_cfg.name).write_bytes(pyvenv_cfg.read_bytes())
new_path = os.pathsep.join([str(target), *os.environ.get("PATH", "").split(os.pathsep)])
monkeypatch.setenv("PATH", new_path)
interpreter = get_interpreter(core, [])
assert interpreter is not None
def test_discovery_via_path_not_found(tmp_path, monkeypatch):
monkeypatch.setenv("PATH", str(tmp_path))
interpreter = get_interpreter(uuid4().hex, [])
assert interpreter is None
def test_relative_path(session_app_data, monkeypatch):
sys_executable = Path(PythonInfo.current_system(app_data=session_app_data).system_executable)
cwd = sys_executable.parents[1]
monkeypatch.chdir(str(cwd))
relative = str(sys_executable.relative_to(cwd))
result = get_interpreter(relative, [], session_app_data)
assert result is not None
def test_discovery_fallback_fail(session_app_data, caplog):
caplog.set_level(logging.DEBUG)
builtin = Builtin(
Namespace(app_data=session_app_data, try_first_with=[], python=["magic-one", "magic-two"], env=os.environ),
)
result = builtin.run()
assert result is None
assert "accepted" not in caplog.text
def test_discovery_fallback_ok(session_app_data, caplog):
caplog.set_level(logging.DEBUG)
builtin = Builtin(
Namespace(app_data=session_app_data, try_first_with=[], python=["magic-one", sys.executable], env=os.environ),
)
result = builtin.run()
assert result is not None, caplog.text
assert result.executable == sys.executable, caplog.text
assert "accepted" in caplog.text
|
normal
|
{
"blob_id": "55d4f4bba2b72ec93cb883527d2a9c2ebe8ec337",
"index": 4910,
"step-1": "<mask token>\n\n\ndef test_relative_path(session_app_data, monkeypatch):\n sys_executable = Path(PythonInfo.current_system(app_data=\n session_app_data).system_executable)\n cwd = sys_executable.parents[1]\n monkeypatch.chdir(str(cwd))\n relative = str(sys_executable.relative_to(cwd))\n result = get_interpreter(relative, [], session_app_data)\n assert result is not None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected](not fs_supports_symlink(), reason='symlink not supported')\[email protected]('case', ['mixed', 'lower', 'upper'])\ndef test_discovery_via_path(monkeypatch, case, tmp_path, caplog,\n session_app_data):\n caplog.set_level(logging.DEBUG)\n current = PythonInfo.current_system(session_app_data)\n core = (\n f\"somethingVeryCryptic{'.'.join(str(i) for i in current.version_info[0:3])}\"\n )\n name = 'somethingVeryCryptic'\n if case == 'lower':\n name = name.lower()\n elif case == 'upper':\n name = name.upper()\n exe_name = (\n f\"{name}{current.version_info.major}{'.exe' if sys.platform == 'win32' else ''}\"\n )\n target = tmp_path / current.install_path('scripts')\n target.mkdir(parents=True)\n executable = target / exe_name\n os.symlink(sys.executable, str(executable))\n pyvenv_cfg = Path(sys.executable).parents[1] / 'pyvenv.cfg'\n if pyvenv_cfg.exists():\n (target / pyvenv_cfg.name).write_bytes(pyvenv_cfg.read_bytes())\n new_path = os.pathsep.join([str(target), *os.environ.get('PATH', '').\n split(os.pathsep)])\n monkeypatch.setenv('PATH', new_path)\n interpreter = get_interpreter(core, [])\n assert interpreter is not None\n\n\ndef test_discovery_via_path_not_found(tmp_path, monkeypatch):\n monkeypatch.setenv('PATH', str(tmp_path))\n interpreter = get_interpreter(uuid4().hex, [])\n assert interpreter is None\n\n\ndef test_relative_path(session_app_data, monkeypatch):\n sys_executable = Path(PythonInfo.current_system(app_data=\n session_app_data).system_executable)\n cwd = sys_executable.parents[1]\n monkeypatch.chdir(str(cwd))\n relative = str(sys_executable.relative_to(cwd))\n result = get_interpreter(relative, [], session_app_data)\n assert result is not None\n\n\n<mask token>\n\n\ndef test_discovery_fallback_ok(session_app_data, caplog):\n caplog.set_level(logging.DEBUG)\n builtin = Builtin(Namespace(app_data=session_app_data, try_first_with=[\n ], python=['magic-one', sys.executable], env=os.environ))\n result = builtin.run()\n assert result is not None, caplog.text\n assert result.executable == sys.executable, caplog.text\n assert 'accepted' in caplog.text\n",
"step-3": "<mask token>\n\n\[email protected](not fs_supports_symlink(), reason='symlink not supported')\[email protected]('case', ['mixed', 'lower', 'upper'])\ndef test_discovery_via_path(monkeypatch, case, tmp_path, caplog,\n session_app_data):\n caplog.set_level(logging.DEBUG)\n current = PythonInfo.current_system(session_app_data)\n core = (\n f\"somethingVeryCryptic{'.'.join(str(i) for i in current.version_info[0:3])}\"\n )\n name = 'somethingVeryCryptic'\n if case == 'lower':\n name = name.lower()\n elif case == 'upper':\n name = name.upper()\n exe_name = (\n f\"{name}{current.version_info.major}{'.exe' if sys.platform == 'win32' else ''}\"\n )\n target = tmp_path / current.install_path('scripts')\n target.mkdir(parents=True)\n executable = target / exe_name\n os.symlink(sys.executable, str(executable))\n pyvenv_cfg = Path(sys.executable).parents[1] / 'pyvenv.cfg'\n if pyvenv_cfg.exists():\n (target / pyvenv_cfg.name).write_bytes(pyvenv_cfg.read_bytes())\n new_path = os.pathsep.join([str(target), *os.environ.get('PATH', '').\n split(os.pathsep)])\n monkeypatch.setenv('PATH', new_path)\n interpreter = get_interpreter(core, [])\n assert interpreter is not None\n\n\ndef test_discovery_via_path_not_found(tmp_path, monkeypatch):\n monkeypatch.setenv('PATH', str(tmp_path))\n interpreter = get_interpreter(uuid4().hex, [])\n assert interpreter is None\n\n\ndef test_relative_path(session_app_data, monkeypatch):\n sys_executable = Path(PythonInfo.current_system(app_data=\n session_app_data).system_executable)\n cwd = sys_executable.parents[1]\n monkeypatch.chdir(str(cwd))\n relative = str(sys_executable.relative_to(cwd))\n result = get_interpreter(relative, [], session_app_data)\n assert result is not None\n\n\ndef test_discovery_fallback_fail(session_app_data, caplog):\n caplog.set_level(logging.DEBUG)\n builtin = Builtin(Namespace(app_data=session_app_data, try_first_with=[\n ], python=['magic-one', 'magic-two'], env=os.environ))\n result = builtin.run()\n assert result is None\n assert 'accepted' not in caplog.text\n\n\ndef test_discovery_fallback_ok(session_app_data, caplog):\n caplog.set_level(logging.DEBUG)\n builtin = Builtin(Namespace(app_data=session_app_data, try_first_with=[\n ], python=['magic-one', sys.executable], env=os.environ))\n result = builtin.run()\n assert result is not None, caplog.text\n assert result.executable == sys.executable, caplog.text\n assert 'accepted' in caplog.text\n",
"step-4": "from __future__ import annotations\nimport logging\nimport os\nimport sys\nfrom argparse import Namespace\nfrom pathlib import Path\nfrom uuid import uuid4\nimport pytest\nfrom virtualenv.discovery.builtin import Builtin, get_interpreter\nfrom virtualenv.discovery.py_info import PythonInfo\nfrom virtualenv.info import fs_supports_symlink\n\n\[email protected](not fs_supports_symlink(), reason='symlink not supported')\[email protected]('case', ['mixed', 'lower', 'upper'])\ndef test_discovery_via_path(monkeypatch, case, tmp_path, caplog,\n session_app_data):\n caplog.set_level(logging.DEBUG)\n current = PythonInfo.current_system(session_app_data)\n core = (\n f\"somethingVeryCryptic{'.'.join(str(i) for i in current.version_info[0:3])}\"\n )\n name = 'somethingVeryCryptic'\n if case == 'lower':\n name = name.lower()\n elif case == 'upper':\n name = name.upper()\n exe_name = (\n f\"{name}{current.version_info.major}{'.exe' if sys.platform == 'win32' else ''}\"\n )\n target = tmp_path / current.install_path('scripts')\n target.mkdir(parents=True)\n executable = target / exe_name\n os.symlink(sys.executable, str(executable))\n pyvenv_cfg = Path(sys.executable).parents[1] / 'pyvenv.cfg'\n if pyvenv_cfg.exists():\n (target / pyvenv_cfg.name).write_bytes(pyvenv_cfg.read_bytes())\n new_path = os.pathsep.join([str(target), *os.environ.get('PATH', '').\n split(os.pathsep)])\n monkeypatch.setenv('PATH', new_path)\n interpreter = get_interpreter(core, [])\n assert interpreter is not None\n\n\ndef test_discovery_via_path_not_found(tmp_path, monkeypatch):\n monkeypatch.setenv('PATH', str(tmp_path))\n interpreter = get_interpreter(uuid4().hex, [])\n assert interpreter is None\n\n\ndef test_relative_path(session_app_data, monkeypatch):\n sys_executable = Path(PythonInfo.current_system(app_data=\n session_app_data).system_executable)\n cwd = sys_executable.parents[1]\n monkeypatch.chdir(str(cwd))\n relative = str(sys_executable.relative_to(cwd))\n result = get_interpreter(relative, [], session_app_data)\n assert result is not None\n\n\ndef test_discovery_fallback_fail(session_app_data, caplog):\n caplog.set_level(logging.DEBUG)\n builtin = Builtin(Namespace(app_data=session_app_data, try_first_with=[\n ], python=['magic-one', 'magic-two'], env=os.environ))\n result = builtin.run()\n assert result is None\n assert 'accepted' not in caplog.text\n\n\ndef test_discovery_fallback_ok(session_app_data, caplog):\n caplog.set_level(logging.DEBUG)\n builtin = Builtin(Namespace(app_data=session_app_data, try_first_with=[\n ], python=['magic-one', sys.executable], env=os.environ))\n result = builtin.run()\n assert result is not None, caplog.text\n assert result.executable == sys.executable, caplog.text\n assert 'accepted' in caplog.text\n",
"step-5": "from __future__ import annotations\n\nimport logging\nimport os\nimport sys\nfrom argparse import Namespace\nfrom pathlib import Path\nfrom uuid import uuid4\n\nimport pytest\n\nfrom virtualenv.discovery.builtin import Builtin, get_interpreter\nfrom virtualenv.discovery.py_info import PythonInfo\nfrom virtualenv.info import fs_supports_symlink\n\n\[email protected](not fs_supports_symlink(), reason=\"symlink not supported\")\[email protected](\"case\", [\"mixed\", \"lower\", \"upper\"])\ndef test_discovery_via_path(monkeypatch, case, tmp_path, caplog, session_app_data):\n caplog.set_level(logging.DEBUG)\n current = PythonInfo.current_system(session_app_data)\n core = f\"somethingVeryCryptic{'.'.join(str(i) for i in current.version_info[0:3])}\"\n name = \"somethingVeryCryptic\"\n if case == \"lower\":\n name = name.lower()\n elif case == \"upper\":\n name = name.upper()\n exe_name = f\"{name}{current.version_info.major}{'.exe' if sys.platform == 'win32' else ''}\"\n target = tmp_path / current.install_path(\"scripts\")\n target.mkdir(parents=True)\n executable = target / exe_name\n os.symlink(sys.executable, str(executable))\n pyvenv_cfg = Path(sys.executable).parents[1] / \"pyvenv.cfg\"\n if pyvenv_cfg.exists():\n (target / pyvenv_cfg.name).write_bytes(pyvenv_cfg.read_bytes())\n new_path = os.pathsep.join([str(target), *os.environ.get(\"PATH\", \"\").split(os.pathsep)])\n monkeypatch.setenv(\"PATH\", new_path)\n interpreter = get_interpreter(core, [])\n\n assert interpreter is not None\n\n\ndef test_discovery_via_path_not_found(tmp_path, monkeypatch):\n monkeypatch.setenv(\"PATH\", str(tmp_path))\n interpreter = get_interpreter(uuid4().hex, [])\n assert interpreter is None\n\n\ndef test_relative_path(session_app_data, monkeypatch):\n sys_executable = Path(PythonInfo.current_system(app_data=session_app_data).system_executable)\n cwd = sys_executable.parents[1]\n monkeypatch.chdir(str(cwd))\n relative = str(sys_executable.relative_to(cwd))\n result = get_interpreter(relative, [], session_app_data)\n assert result is not None\n\n\ndef test_discovery_fallback_fail(session_app_data, caplog):\n caplog.set_level(logging.DEBUG)\n builtin = Builtin(\n Namespace(app_data=session_app_data, try_first_with=[], python=[\"magic-one\", \"magic-two\"], env=os.environ),\n )\n\n result = builtin.run()\n assert result is None\n\n assert \"accepted\" not in caplog.text\n\n\ndef test_discovery_fallback_ok(session_app_data, caplog):\n caplog.set_level(logging.DEBUG)\n builtin = Builtin(\n Namespace(app_data=session_app_data, try_first_with=[], python=[\"magic-one\", sys.executable], env=os.environ),\n )\n\n result = builtin.run()\n assert result is not None, caplog.text\n assert result.executable == sys.executable, caplog.text\n\n assert \"accepted\" in caplog.text\n",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
# template for "Stopwatch: The Game"
import math
import simplegui
# define global variables
successcount = 0;
totalstopcount = 0;
count = 0;
T = True;
F = True;
# define helper function format that converts time
# in tenths of seconds into formatted string A:BC.D
def format(t):
A = str(t // 600);
tem = (t // 10);
tem = (tem) % 60;
B = str(tem // 10);
C = str(tem % 10);
D = str(t % 10);
return A + ":" + B + C + "." + D;
# define event handlers for buttons; "Start", "Stop", "Reset"
def stop():
global successcount, totalstopcount, T;
timer.stop();
if (T == True):
if (F == False):
totalstopcount = totalstopcount + 1;
T = False;
if ((count % 10 == 0) and (count != 0)):
successcount = successcount + 1;
def start():
global T, F;
T = True;
F = False;
timer.start();
def reset():
global successcount, totalstopcount, count, F;
count = 0;
successcount = 0;
totalstopcount = 0;
F = True;
# define event handler for timer with 0.1 sec interval
def tick():
global count;
count = count + 1;
# define draw handler
def draw(canvas):
global count;
canvas.draw_text(format(count), [250, 250], 40, "red");
canvas.draw_text(str(successcount) + "/" + str(totalstopcount), [400, 100], 30, "orange");
# create frame
frame = simplegui.create_frame("Stopwatch", 500, 500);
frame.add_button("START", start);
frame.add_button("STOP", stop);
frame.add_button("RESET", reset);
# register event handlers
frame.set_draw_handler(draw);
timer = simplegui.create_timer(100, tick)
# start frame
frame.start();
# Please remember to review the grading rubric
|
normal
|
{
"blob_id": "bb198978ffc799bb43acf870467496e1dcc54d4b",
"index": 3710,
"step-1": "<mask token>\n\n\ndef format(t):\n A = str(t // 600)\n tem = t // 10\n tem = tem % 60\n B = str(tem // 10)\n C = str(tem % 10)\n D = str(t % 10)\n return A + ':' + B + C + '.' + D\n\n\n<mask token>\n\n\ndef reset():\n global successcount, totalstopcount, count, F\n count = 0\n successcount = 0\n totalstopcount = 0\n F = True\n\n\ndef tick():\n global count\n count = count + 1\n\n\ndef draw(canvas):\n global count\n canvas.draw_text(format(count), [250, 250], 40, 'red')\n canvas.draw_text(str(successcount) + '/' + str(totalstopcount), [400, \n 100], 30, 'orange')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef format(t):\n A = str(t // 600)\n tem = t // 10\n tem = tem % 60\n B = str(tem // 10)\n C = str(tem % 10)\n D = str(t % 10)\n return A + ':' + B + C + '.' + D\n\n\ndef stop():\n global successcount, totalstopcount, T\n timer.stop()\n if T == True:\n if F == False:\n totalstopcount = totalstopcount + 1\n T = False\n if count % 10 == 0 and count != 0:\n successcount = successcount + 1\n\n\n<mask token>\n\n\ndef reset():\n global successcount, totalstopcount, count, F\n count = 0\n successcount = 0\n totalstopcount = 0\n F = True\n\n\ndef tick():\n global count\n count = count + 1\n\n\ndef draw(canvas):\n global count\n canvas.draw_text(format(count), [250, 250], 40, 'red')\n canvas.draw_text(str(successcount) + '/' + str(totalstopcount), [400, \n 100], 30, 'orange')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef format(t):\n A = str(t // 600)\n tem = t // 10\n tem = tem % 60\n B = str(tem // 10)\n C = str(tem % 10)\n D = str(t % 10)\n return A + ':' + B + C + '.' + D\n\n\ndef stop():\n global successcount, totalstopcount, T\n timer.stop()\n if T == True:\n if F == False:\n totalstopcount = totalstopcount + 1\n T = False\n if count % 10 == 0 and count != 0:\n successcount = successcount + 1\n\n\ndef start():\n global T, F\n T = True\n F = False\n timer.start()\n\n\ndef reset():\n global successcount, totalstopcount, count, F\n count = 0\n successcount = 0\n totalstopcount = 0\n F = True\n\n\ndef tick():\n global count\n count = count + 1\n\n\ndef draw(canvas):\n global count\n canvas.draw_text(format(count), [250, 250], 40, 'red')\n canvas.draw_text(str(successcount) + '/' + str(totalstopcount), [400, \n 100], 30, 'orange')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef format(t):\n A = str(t // 600)\n tem = t // 10\n tem = tem % 60\n B = str(tem // 10)\n C = str(tem % 10)\n D = str(t % 10)\n return A + ':' + B + C + '.' + D\n\n\ndef stop():\n global successcount, totalstopcount, T\n timer.stop()\n if T == True:\n if F == False:\n totalstopcount = totalstopcount + 1\n T = False\n if count % 10 == 0 and count != 0:\n successcount = successcount + 1\n\n\ndef start():\n global T, F\n T = True\n F = False\n timer.start()\n\n\ndef reset():\n global successcount, totalstopcount, count, F\n count = 0\n successcount = 0\n totalstopcount = 0\n F = True\n\n\ndef tick():\n global count\n count = count + 1\n\n\ndef draw(canvas):\n global count\n canvas.draw_text(format(count), [250, 250], 40, 'red')\n canvas.draw_text(str(successcount) + '/' + str(totalstopcount), [400, \n 100], 30, 'orange')\n\n\n<mask token>\nframe.add_button('START', start)\nframe.add_button('STOP', stop)\nframe.add_button('RESET', reset)\nframe.set_draw_handler(draw)\n<mask token>\nframe.start()\n",
"step-5": "# template for \"Stopwatch: The Game\"\nimport math\nimport simplegui\n\n\n# define global variables\nsuccesscount = 0;\ntotalstopcount = 0;\ncount = 0;\nT = True;\nF = True;\n\n\n# define helper function format that converts time\n# in tenths of seconds into formatted string A:BC.D\ndef format(t):\n A = str(t // 600);\n tem = (t // 10);\n tem = (tem) % 60;\n B = str(tem // 10);\n C = str(tem % 10);\n D = str(t % 10);\n return A + \":\" + B + C + \".\" + D;\n\n\n# define event handlers for buttons; \"Start\", \"Stop\", \"Reset\"\ndef stop():\n global successcount, totalstopcount, T;\n timer.stop();\n if (T == True):\n if (F == False):\n totalstopcount = totalstopcount + 1;\n T = False;\n if ((count % 10 == 0) and (count != 0)):\n successcount = successcount + 1;\n\n\ndef start():\n global T, F;\n T = True;\n F = False;\n timer.start();\n\n\ndef reset():\n global successcount, totalstopcount, count, F;\n count = 0;\n successcount = 0;\n totalstopcount = 0;\n F = True;\n\n\n# define event handler for timer with 0.1 sec interval\ndef tick():\n global count;\n count = count + 1;\n\n\n# define draw handler\ndef draw(canvas):\n global count;\n canvas.draw_text(format(count), [250, 250], 40, \"red\");\n canvas.draw_text(str(successcount) + \"/\" + str(totalstopcount), [400, 100], 30, \"orange\");\n\n\n# create frame\nframe = simplegui.create_frame(\"Stopwatch\", 500, 500);\nframe.add_button(\"START\", start);\nframe.add_button(\"STOP\", stop);\nframe.add_button(\"RESET\", reset);\n\n# register event handlers\nframe.set_draw_handler(draw);\ntimer = simplegui.create_timer(100, tick)\n\n# start frame\nframe.start();\n\n# Please remember to review the grading rubric\n\n",
"step-ids": [
4,
5,
6,
7,
10
]
}
|
[
4,
5,
6,
7,
10
] |
from datetime import datetime as dt
YEAR = dt.today().year
BINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}
CHROME_DRIVER_PATH = r'C:\Users\pavithra\Downloads\chromedriver_win32\chromedriver.exe'
EXTRACTED_DIR = r'C:\Users\pavithra\Documents\fintuple-automation-projects\BseBhavCopy\dailybhavcopy\dailybhavcopy' \
r'\csv_files'
ZIP_DIR = r'C:\Users\pavithra\Documents\fintuple-automation-projects\BseBhavCopy\dailybhavcopy\dailybhavcopy\zip_files'
HEADLESS_OPTIONS = {'headless': '--headless',
'window_size': '--window-size=1920x1080'}
DOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,
'download.prompt_for_download': False}
def enable_download(driver, directory):
"""
:param driver: Selenium web driver
:param directory: Directory to store the file
This function allows the Selenium web driver to store the file in the given directory.
"""
driver.command_executor._commands["send_command"] = ("POST", '/session/$sessionId/chromium/send_command')
params = {'cmd': 'Page.setDownloadBehavior',
'params': {'behavior': 'allow',
'downloadPath': directory}}
driver.execute("send_command", params)
|
normal
|
{
"blob_id": "95422348c8db9753830cc0a7c8785c05b44886b1",
"index": 842,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n",
"step-3": "<mask token>\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location':\n 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = (\n 'C:\\\\Users\\\\pavithra\\\\Downloads\\\\chromedriver_win32\\\\chromedriver.exe')\nEXTRACTED_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\csv_files'\n )\nZIP_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\zip_files'\n )\nHEADLESS_OPTIONS = {'headless': '--headless', 'window_size':\n '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n",
"step-4": "from datetime import datetime as dt\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location':\n 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = (\n 'C:\\\\Users\\\\pavithra\\\\Downloads\\\\chromedriver_win32\\\\chromedriver.exe')\nEXTRACTED_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\csv_files'\n )\nZIP_DIR = (\n 'C:\\\\Users\\\\pavithra\\\\Documents\\\\fintuple-automation-projects\\\\BseBhavCopy\\\\dailybhavcopy\\\\dailybhavcopy\\\\zip_files'\n )\nHEADLESS_OPTIONS = {'headless': '--headless', 'window_size':\n '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands['send_command'\n ] = 'POST', '/session/$sessionId/chromium/send_command'\n params = {'cmd': 'Page.setDownloadBehavior', 'params': {'behavior':\n 'allow', 'downloadPath': directory}}\n driver.execute('send_command', params)\n",
"step-5": "from datetime import datetime as dt\n\nYEAR = dt.today().year\nBINARY_LOCATION = {'binary_location': 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe'}\nCHROME_DRIVER_PATH = r'C:\\Users\\pavithra\\Downloads\\chromedriver_win32\\chromedriver.exe'\nEXTRACTED_DIR = r'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy' \\\n r'\\csv_files'\nZIP_DIR = r'C:\\Users\\pavithra\\Documents\\fintuple-automation-projects\\BseBhavCopy\\dailybhavcopy\\dailybhavcopy\\zip_files'\nHEADLESS_OPTIONS = {'headless': '--headless',\n 'window_size': '--window-size=1920x1080'}\nDOWNLOAD_PREFERENCES = {'download.default_directory': EXTRACTED_DIR,\n 'download.prompt_for_download': False}\n\n\ndef enable_download(driver, directory):\n \"\"\"\n\n :param driver: Selenium web driver\n :param directory: Directory to store the file\n\n This function allows the Selenium web driver to store the file in the given directory.\n \"\"\"\n driver.command_executor._commands[\"send_command\"] = (\"POST\", '/session/$sessionId/chromium/send_command')\n params = {'cmd': 'Page.setDownloadBehavior',\n 'params': {'behavior': 'allow',\n 'downloadPath': directory}}\n driver.execute(\"send_command\", params)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
import RPi.GPIO as gpio # 导入Rpi.GPIO库函数命名为GPIO
import time
gpio.setmode(gpio.BOARD) #将GPIO编程方式设置为BOARD模式
pin = 40
gpio.setup(pin, gpio.OUT) #控制pin号引脚
gpio.output(pin, gpio.HIGH) #11号引脚输出高电平
time.sleep(5) #计时0.5秒
gpio.output(pin, gpio.LOW) #11号引脚输出低电平
time.sleep(1) #计时1秒
gpio.cleanup() #释放使用的GPIO引脚
|
normal
|
{
"blob_id": "cfdfc490396546b7af732417b506100357cd9a1f",
"index": 6762,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ngpio.setmode(gpio.BOARD)\n<mask token>\ngpio.setup(pin, gpio.OUT)\ngpio.output(pin, gpio.HIGH)\ntime.sleep(5)\ngpio.output(pin, gpio.LOW)\ntime.sleep(1)\ngpio.cleanup()\n",
"step-3": "<mask token>\ngpio.setmode(gpio.BOARD)\npin = 40\ngpio.setup(pin, gpio.OUT)\ngpio.output(pin, gpio.HIGH)\ntime.sleep(5)\ngpio.output(pin, gpio.LOW)\ntime.sleep(1)\ngpio.cleanup()\n",
"step-4": "import RPi.GPIO as gpio\nimport time\ngpio.setmode(gpio.BOARD)\npin = 40\ngpio.setup(pin, gpio.OUT)\ngpio.output(pin, gpio.HIGH)\ntime.sleep(5)\ngpio.output(pin, gpio.LOW)\ntime.sleep(1)\ngpio.cleanup()\n",
"step-5": "#!/usr/bin/python3\n# -*- coding: UTF-8 -*-\n\nimport RPi.GPIO as gpio # 导入Rpi.GPIO库函数命名为GPIO\nimport time\n\ngpio.setmode(gpio.BOARD) #将GPIO编程方式设置为BOARD模式\n\npin = 40\n\ngpio.setup(pin, gpio.OUT) #控制pin号引脚\n\ngpio.output(pin, gpio.HIGH) #11号引脚输出高电平\ntime.sleep(5) #计时0.5秒\ngpio.output(pin, gpio.LOW) #11号引脚输出低电平\ntime.sleep(1) #计时1秒\n\ngpio.cleanup() #释放使用的GPIO引脚",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from .core import S3FileSystem, S3File
from .mapping import S3Map
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
normal
|
{
"blob_id": "32e60c672d6e73600d442c4344743deccaed6796",
"index": 8819,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndel get_versions\n",
"step-3": "<mask token>\n__version__ = get_versions()['version']\ndel get_versions\n",
"step-4": "from .core import S3FileSystem, S3File\nfrom .mapping import S3Map\nfrom ._version import get_versions\n__version__ = get_versions()['version']\ndel get_versions\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# Author : cold
# E-mail : [email protected]
# Date : 13/09/05 11:16:58
# Desc :
#
import twqq
from setuptools import setup
requires = ["tornado", "pycurl", "tornadohttpclient"]
packages = ["twqq"]
entry_points = {
}
setup(
name = "twqq",
version = twqq.__version__,
description = 'An asynchronous webqq client library based on tornado',
long_description = open("README.rst").read(),
author = 'cold',
author_email = '[email protected]',
url = 'http://www.linuxzen.com',
license = 'Apache 2.0',
platforms = 'any',
packages = packages,
package_data = {
},
entry_points = entry_points,
install_requires = requires,
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Console',
"Intended Audience :: Developers",
'License :: OSI Approved :: Apache Software License',
'Topic :: Internet :: WWW/HTTP',
'Programming Language :: Python :: 2.7',
],
)
|
normal
|
{
"blob_id": "9492142a569da1d21b1927e79d97f9cf6276efdc",
"index": 2800,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='[email protected]', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n",
"step-3": "<mask token>\nrequires = ['tornado', 'pycurl', 'tornadohttpclient']\npackages = ['twqq']\nentry_points = {}\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='[email protected]', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n",
"step-4": "import twqq\nfrom setuptools import setup\nrequires = ['tornado', 'pycurl', 'tornadohttpclient']\npackages = ['twqq']\nentry_points = {}\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='[email protected]', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n",
"step-5": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n#\n# Author : cold\n# E-mail : [email protected]\n# Date : 13/09/05 11:16:58\n# Desc :\n#\nimport twqq\nfrom setuptools import setup\n\nrequires = [\"tornado\", \"pycurl\", \"tornadohttpclient\"]\n\npackages = [\"twqq\"]\n\nentry_points = {\n}\n\n\nsetup(\n name = \"twqq\",\n version = twqq.__version__,\n description = 'An asynchronous webqq client library based on tornado',\n long_description = open(\"README.rst\").read(),\n author = 'cold',\n author_email = '[email protected]',\n url = 'http://www.linuxzen.com',\n license = 'Apache 2.0',\n platforms = 'any',\n packages = packages,\n package_data = {\n },\n entry_points = entry_points,\n install_requires = requires,\n classifiers=['Development Status :: 3 - Alpha',\n 'Environment :: Console',\n \"Intended Audience :: Developers\",\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP',\n 'Programming Language :: Python :: 2.7',\n ],\n)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import pandas as pd
from tabulate import tabulate
if __name__ == '__main__':
bestPrecision = [0,0,0,0,0,0]
bestPrecisionFile = ['','','','','','']
bestRecall = [0,0,0,0,0,0]
bestRecallFile = ['','','','','','']
bestSupport = [0,0,0,0,0,0]
bestSupportFile = ['','','','','','']
bestF1_Score = [0,0,0,0,0,0]
bestF1_ScoreFile = ['','','','','','']
bestPrecisionOverall = 0
bestPrecisionOverallFile = ''
bestRecallOverall = 0
bestRecallOverallFile = ''
bestSupportOverall = 0
bestSupportOverallFile = ''
bestF1_ScoreOverall = 0
bestF1_ScoreOverallFile = ''
for file in os.listdir("results"):
# (0.359*a)+(0.256*b)+(0.205*c)+(0.087*d)+(0.073*e)+(0.016*f)
df = pd.read_csv("results/"+file)
for i in range(0,6):
if bestF1_Score[i] < df["f1_score"][i]:
bestF1_Score[i] = df["f1_score"][i]
bestF1_ScoreFile[i]=file
if bestPrecision[i] < df["precision"][i]:
bestPrecision[i] = df["precision"][i]
bestPrecisionFile[i] = file
if bestRecall[i] < df["recall"][i]:
bestRecall[i] = df["recall"][i]
bestRecallFile[i] = file
if bestSupport[i] < df["support"][i]:
bestSupport[i] = df["support"][i]
bestSupportFile[i] = file
currPrecision = 0
currRecall = 0
currSupport = 0
currF1_Score = 0
for idx,value in enumerate([0.359,0.256,0.205,0.087,0.073,0.016]):
currF1_Score += (value * df["f1_score"][idx])
currPrecision += (value * df["precision"][idx])
currRecall += (value * df["recall"][idx])
currSupport += (value * df["support"][idx])
if currPrecision > bestPrecisionOverall:
bestPrecisionOverall=currPrecision
bestPrecisionOverallFile = file
print(file)
print(bestPrecisionOverall)
if currRecall > bestRecallOverall:
bestRecallOverall=currRecall
bestRecallOverallFile = file
if currSupport > bestSupportOverall:
bestSupportOverall=currSupport
bestSupportOverallFile = file
if currF1_Score > bestF1_ScoreOverall:
bestF1_ScoreOverall=currF1_Score
bestF1_ScoreOverallFile = file
bestPrecision.insert(0,"Precision")
bestPrecisionFile.insert(0, "Precision")
bestRecall.insert(0, "Recall")
bestRecallFile.insert(0, "Recall")
bestSupport.insert(0, "Support")
bestSupportFile.insert(0, "Support")
bestF1_Score.insert(0, "F1_SCORE")
bestF1_ScoreFile.insert(0, "F1_SCORE")
tableSpecific = [["","Class0","Class1","Class2","Class3","Class4","Class5"],
bestPrecision,bestPrecisionFile,bestRecall,bestRecallFile,
bestSupport,bestSupportFile,bestF1_Score,bestF1_ScoreFile]
tableGeneral = [ ["Precision Best","Recall Best","Support Best","F1_Score Best"],
[bestPrecisionOverall,bestRecallOverall,bestSupportOverall,bestF1_ScoreOverall],
[bestPrecisionOverallFile,bestRecallOverallFile,bestSupportOverallFile,bestF1_ScoreOverallFile]]
print(tabulate(tableSpecific))
print(tabulate(tableGeneral))
|
normal
|
{
"blob_id": "22c498d84f40455d89ed32ccf3bf8778cb159579",
"index": 79,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n bestPrecision = [0, 0, 0, 0, 0, 0]\n bestPrecisionFile = ['', '', '', '', '', '']\n bestRecall = [0, 0, 0, 0, 0, 0]\n bestRecallFile = ['', '', '', '', '', '']\n bestSupport = [0, 0, 0, 0, 0, 0]\n bestSupportFile = ['', '', '', '', '', '']\n bestF1_Score = [0, 0, 0, 0, 0, 0]\n bestF1_ScoreFile = ['', '', '', '', '', '']\n bestPrecisionOverall = 0\n bestPrecisionOverallFile = ''\n bestRecallOverall = 0\n bestRecallOverallFile = ''\n bestSupportOverall = 0\n bestSupportOverallFile = ''\n bestF1_ScoreOverall = 0\n bestF1_ScoreOverallFile = ''\n for file in os.listdir('results'):\n df = pd.read_csv('results/' + file)\n for i in range(0, 6):\n if bestF1_Score[i] < df['f1_score'][i]:\n bestF1_Score[i] = df['f1_score'][i]\n bestF1_ScoreFile[i] = file\n if bestPrecision[i] < df['precision'][i]:\n bestPrecision[i] = df['precision'][i]\n bestPrecisionFile[i] = file\n if bestRecall[i] < df['recall'][i]:\n bestRecall[i] = df['recall'][i]\n bestRecallFile[i] = file\n if bestSupport[i] < df['support'][i]:\n bestSupport[i] = df['support'][i]\n bestSupportFile[i] = file\n currPrecision = 0\n currRecall = 0\n currSupport = 0\n currF1_Score = 0\n for idx, value in enumerate([0.359, 0.256, 0.205, 0.087, 0.073, 0.016]\n ):\n currF1_Score += value * df['f1_score'][idx]\n currPrecision += value * df['precision'][idx]\n currRecall += value * df['recall'][idx]\n currSupport += value * df['support'][idx]\n if currPrecision > bestPrecisionOverall:\n bestPrecisionOverall = currPrecision\n bestPrecisionOverallFile = file\n print(file)\n print(bestPrecisionOverall)\n if currRecall > bestRecallOverall:\n bestRecallOverall = currRecall\n bestRecallOverallFile = file\n if currSupport > bestSupportOverall:\n bestSupportOverall = currSupport\n bestSupportOverallFile = file\n if currF1_Score > bestF1_ScoreOverall:\n bestF1_ScoreOverall = currF1_Score\n bestF1_ScoreOverallFile = file\n bestPrecision.insert(0, 'Precision')\n bestPrecisionFile.insert(0, 'Precision')\n bestRecall.insert(0, 'Recall')\n bestRecallFile.insert(0, 'Recall')\n bestSupport.insert(0, 'Support')\n bestSupportFile.insert(0, 'Support')\n bestF1_Score.insert(0, 'F1_SCORE')\n bestF1_ScoreFile.insert(0, 'F1_SCORE')\n tableSpecific = [['', 'Class0', 'Class1', 'Class2', 'Class3', 'Class4',\n 'Class5'], bestPrecision, bestPrecisionFile, bestRecall,\n bestRecallFile, bestSupport, bestSupportFile, bestF1_Score,\n bestF1_ScoreFile]\n tableGeneral = [['Precision Best', 'Recall Best', 'Support Best',\n 'F1_Score Best'], [bestPrecisionOverall, bestRecallOverall,\n bestSupportOverall, bestF1_ScoreOverall], [bestPrecisionOverallFile,\n bestRecallOverallFile, bestSupportOverallFile, bestF1_ScoreOverallFile]\n ]\n print(tabulate(tableSpecific))\n print(tabulate(tableGeneral))\n",
"step-3": "import os\nimport pandas as pd\nfrom tabulate import tabulate\nif __name__ == '__main__':\n bestPrecision = [0, 0, 0, 0, 0, 0]\n bestPrecisionFile = ['', '', '', '', '', '']\n bestRecall = [0, 0, 0, 0, 0, 0]\n bestRecallFile = ['', '', '', '', '', '']\n bestSupport = [0, 0, 0, 0, 0, 0]\n bestSupportFile = ['', '', '', '', '', '']\n bestF1_Score = [0, 0, 0, 0, 0, 0]\n bestF1_ScoreFile = ['', '', '', '', '', '']\n bestPrecisionOverall = 0\n bestPrecisionOverallFile = ''\n bestRecallOverall = 0\n bestRecallOverallFile = ''\n bestSupportOverall = 0\n bestSupportOverallFile = ''\n bestF1_ScoreOverall = 0\n bestF1_ScoreOverallFile = ''\n for file in os.listdir('results'):\n df = pd.read_csv('results/' + file)\n for i in range(0, 6):\n if bestF1_Score[i] < df['f1_score'][i]:\n bestF1_Score[i] = df['f1_score'][i]\n bestF1_ScoreFile[i] = file\n if bestPrecision[i] < df['precision'][i]:\n bestPrecision[i] = df['precision'][i]\n bestPrecisionFile[i] = file\n if bestRecall[i] < df['recall'][i]:\n bestRecall[i] = df['recall'][i]\n bestRecallFile[i] = file\n if bestSupport[i] < df['support'][i]:\n bestSupport[i] = df['support'][i]\n bestSupportFile[i] = file\n currPrecision = 0\n currRecall = 0\n currSupport = 0\n currF1_Score = 0\n for idx, value in enumerate([0.359, 0.256, 0.205, 0.087, 0.073, 0.016]\n ):\n currF1_Score += value * df['f1_score'][idx]\n currPrecision += value * df['precision'][idx]\n currRecall += value * df['recall'][idx]\n currSupport += value * df['support'][idx]\n if currPrecision > bestPrecisionOverall:\n bestPrecisionOverall = currPrecision\n bestPrecisionOverallFile = file\n print(file)\n print(bestPrecisionOverall)\n if currRecall > bestRecallOverall:\n bestRecallOverall = currRecall\n bestRecallOverallFile = file\n if currSupport > bestSupportOverall:\n bestSupportOverall = currSupport\n bestSupportOverallFile = file\n if currF1_Score > bestF1_ScoreOverall:\n bestF1_ScoreOverall = currF1_Score\n bestF1_ScoreOverallFile = file\n bestPrecision.insert(0, 'Precision')\n bestPrecisionFile.insert(0, 'Precision')\n bestRecall.insert(0, 'Recall')\n bestRecallFile.insert(0, 'Recall')\n bestSupport.insert(0, 'Support')\n bestSupportFile.insert(0, 'Support')\n bestF1_Score.insert(0, 'F1_SCORE')\n bestF1_ScoreFile.insert(0, 'F1_SCORE')\n tableSpecific = [['', 'Class0', 'Class1', 'Class2', 'Class3', 'Class4',\n 'Class5'], bestPrecision, bestPrecisionFile, bestRecall,\n bestRecallFile, bestSupport, bestSupportFile, bestF1_Score,\n bestF1_ScoreFile]\n tableGeneral = [['Precision Best', 'Recall Best', 'Support Best',\n 'F1_Score Best'], [bestPrecisionOverall, bestRecallOverall,\n bestSupportOverall, bestF1_ScoreOverall], [bestPrecisionOverallFile,\n bestRecallOverallFile, bestSupportOverallFile, bestF1_ScoreOverallFile]\n ]\n print(tabulate(tableSpecific))\n print(tabulate(tableGeneral))\n",
"step-4": "import os\nimport pandas as pd\nfrom tabulate import tabulate\n\nif __name__ == '__main__':\n\n bestPrecision = [0,0,0,0,0,0]\n bestPrecisionFile = ['','','','','','']\n bestRecall = [0,0,0,0,0,0]\n bestRecallFile = ['','','','','','']\n bestSupport = [0,0,0,0,0,0]\n bestSupportFile = ['','','','','','']\n bestF1_Score = [0,0,0,0,0,0]\n bestF1_ScoreFile = ['','','','','','']\n\n bestPrecisionOverall = 0\n bestPrecisionOverallFile = ''\n bestRecallOverall = 0\n bestRecallOverallFile = ''\n bestSupportOverall = 0\n bestSupportOverallFile = ''\n bestF1_ScoreOverall = 0\n bestF1_ScoreOverallFile = ''\n\n for file in os.listdir(\"results\"):\n\n # (0.359*a)+(0.256*b)+(0.205*c)+(0.087*d)+(0.073*e)+(0.016*f)\n df = pd.read_csv(\"results/\"+file)\n\n for i in range(0,6):\n if bestF1_Score[i] < df[\"f1_score\"][i]:\n bestF1_Score[i] = df[\"f1_score\"][i]\n bestF1_ScoreFile[i]=file\n if bestPrecision[i] < df[\"precision\"][i]:\n bestPrecision[i] = df[\"precision\"][i]\n bestPrecisionFile[i] = file\n if bestRecall[i] < df[\"recall\"][i]:\n bestRecall[i] = df[\"recall\"][i]\n bestRecallFile[i] = file\n if bestSupport[i] < df[\"support\"][i]:\n bestSupport[i] = df[\"support\"][i]\n bestSupportFile[i] = file\n\n currPrecision = 0\n currRecall = 0\n currSupport = 0\n currF1_Score = 0\n\n for idx,value in enumerate([0.359,0.256,0.205,0.087,0.073,0.016]):\n currF1_Score += (value * df[\"f1_score\"][idx])\n currPrecision += (value * df[\"precision\"][idx])\n currRecall += (value * df[\"recall\"][idx])\n currSupport += (value * df[\"support\"][idx])\n\n if currPrecision > bestPrecisionOverall:\n bestPrecisionOverall=currPrecision\n bestPrecisionOverallFile = file\n print(file)\n print(bestPrecisionOverall)\n if currRecall > bestRecallOverall:\n bestRecallOverall=currRecall\n bestRecallOverallFile = file\n if currSupport > bestSupportOverall:\n bestSupportOverall=currSupport\n bestSupportOverallFile = file\n if currF1_Score > bestF1_ScoreOverall:\n bestF1_ScoreOverall=currF1_Score\n bestF1_ScoreOverallFile = file\n\n bestPrecision.insert(0,\"Precision\")\n bestPrecisionFile.insert(0, \"Precision\")\n bestRecall.insert(0, \"Recall\")\n bestRecallFile.insert(0, \"Recall\")\n bestSupport.insert(0, \"Support\")\n bestSupportFile.insert(0, \"Support\")\n bestF1_Score.insert(0, \"F1_SCORE\")\n bestF1_ScoreFile.insert(0, \"F1_SCORE\")\n\n tableSpecific = [[\"\",\"Class0\",\"Class1\",\"Class2\",\"Class3\",\"Class4\",\"Class5\"],\n bestPrecision,bestPrecisionFile,bestRecall,bestRecallFile,\n bestSupport,bestSupportFile,bestF1_Score,bestF1_ScoreFile]\n\n tableGeneral = [ [\"Precision Best\",\"Recall Best\",\"Support Best\",\"F1_Score Best\"],\n [bestPrecisionOverall,bestRecallOverall,bestSupportOverall,bestF1_ScoreOverall],\n [bestPrecisionOverallFile,bestRecallOverallFile,bestSupportOverallFile,bestF1_ScoreOverallFile]]\n\n print(tabulate(tableSpecific))\n print(tabulate(tableGeneral))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
This file is part of GALE,
Copyright Joe Krall, 2014.
GALE is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GALE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with GALE. If not, see <http://www.gnu.org/licenses/>.
"""
from Fastmap.Slurp import *
from Fastmap.Moo import *
from jmoo_individual import *
def gale_64_WHERE(problem, population, configuration, values_to_be_passed):
"The Core method behind GALE"
# Compile population into table form used by WHERE
t = slurp([[x for x in row.decisionValues] + ["?" for y in problem.objectives] for row in population],
problem.buildHeader().split(","))
# Initialize some parameters for WHERE
The.allowDomination = True
The.alpha = 1
for i, row in enumerate(t.rows):
row.evaluated = False
# Run WHERE
m = Moo(problem, t, len(t.rows), N=1).divide(minnie=rstop(t))
# Organizing
NDLeafs = m.nonPrunedLeaves() # The surviving non-dominated leafs
allLeafs = m.nonPrunedLeaves() + m.prunedLeaves() # All of the leafs
# After mutation: Check how many rows were actually evaluated
numEval = 0
for leaf in allLeafs:
for row in leaf.table.rows:
if row.evaluated:
numEval += 1
return NDLeafs, numEval
def polynomial_mutation(problem, individual, configuration):
from numpy.random import random
eta_m_ = configuration["NSGAIII"]["ETA_M_DEFAULT_"]
distributionIndex_ = eta_m_
output = jmoo_individual(problem, individual.decisionValues)
probability = 1/len(problem.decisions)
for var in xrange(len(problem.decisions)):
if random() <= probability:
y = individual.decisionValues[var]
yU = problem.decisions[var].up
yL = problem.decisions[var].low
delta1 = (y - yL)/(yU - yL)
delta2 = (yU - y)/(yU - yL)
rnd = random()
mut_pow = 1.0/(eta_m_ + 1.0)
if rnd < 0.5:
xy = 1.0 - delta1
val = 2.0 * rnd + (1 - 2 * rnd) * (xy ** (distributionIndex_ + 1.0))
deltaq = val ** mut_pow - 1
else:
xy = 1.0 - delta2
val = 2.0 * (1.0-rnd) + 2.0 * (rnd-0.5) * (xy ** (distributionIndex_+1.0))
deltaq = 1.0 - (val ** mut_pow)
y += deltaq * (yU - yL)
if y < yL: y = yL
if y > yU: y = yU
output.decisionValues[var] = y
return output
def sbxcrossover(problem, parent1, parent2, configuration):
EPS = 1.0e-14
distribution_index = configuration["NSGAIII"]["ETA_C_DEFAULT_"]
probability = configuration["NSGAIII"]["SBX_Probability"]
from numpy.random import random
offspring1 = jmoo_individual(problem, parent1.decisionValues)
offspring2 = jmoo_individual(problem, parent2.decisionValues)
number_of_variables = len(problem.decisions)
if random() <= probability:
for i in xrange(number_of_variables):
valuex1 = offspring1.decisionValues[i]
valuex2 = offspring2.decisionValues[i]
if random() <= 0.5:
if abs(valuex1 - valuex2) > EPS:
if valuex1 < valuex2:
y1 = valuex1
y2 = valuex2
else:
y1 = valuex2
y2 = valuex1
yL = problem.decisions[i].low
yU = problem.decisions[i].up
rand = random()
beta = 1.0 + (2.0 * (y1 - yL) / (y2 - y1))
alpha = 2.0 - beta ** (-1 * (distribution_index + 1.0))
if rand <= 1/alpha:
betaq = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (distribution_index + 1.0))
else:
betaq = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (distribution_index + 1.0))
c1 = 0.5 * ((y1 + y2) - betaq * (y2 - y1))
beta = 1.0 + (2.0 * (yU - y2) / (y2 - y1))
alpha = 2.0 - beta ** -(distribution_index + 1.0)
if rand <= (1.0 / alpha):
betaq = (rand * alpha) ** (1.0 / (distribution_index + 1.0))
else:
betaq = ((1.0 / (2.0 - rand * alpha)) ** (1.0 / (distribution_index + 1.0)))
c2 = 0.5 * ((y1 + y2) + betaq * (y2 - y1))
if c1 < yL: c1 = yL
if c2 < yL: c2 = yL
if c1 > yU: c1 = yU
if c2 > yU: c2 = yU
if random() <= 0.5:
offspring1.decisionValues[i] = c2
offspring2.decisionValues[i] = c1
else:
offspring1.decisionValues[i] = c1
offspring2.decisionValues[i] = c2
else:
offspring1.decisionValues[i] = valuex1
offspring2.decisionValues[i] = valuex2
else:
offspring1.decisionValues[i] = valuex2
offspring2.decisionValues[i] = valuex1
return offspring1, offspring2
def variation(problem, individual_index, population, configuration):
""" SBX regeneration Technique """
from random import randint
another_parent = individual_index
while another_parent == individual_index: another_parent = randint(0, len(population)-1)
from copy import deepcopy
parent1 = deepcopy(population[individual_index])
parent2 = deepcopy(population[another_parent])
child1, _ = sbxcrossover(problem, parent1, parent2, configuration)
mchild1 = polynomial_mutation(problem, child1, configuration)
return mchild1
def gale_64_Mutate(problem, NDLeafs, configuration):
#################
# Mutation Phase
#################
# Keep track of evals
numEval = 0
population = []
for leaf in NDLeafs:
initial_size = len(leaf.table.rows)
# print "Number of mutants: ", len(leaf.table.rows)
# Pull out the Poles
east = leaf.table.rows[0]
west = leaf.table.rows[-1]
# Evaluate those poles if needed
if not east.evaluated:
for o, objScore in enumerate(problem.evaluate(east.cells)):
east.cells[-(len(problem.objectives) - o)] = objScore
east.evaluated = True
numEval += 1
if not west.evaluated:
for o, objScore in enumerate(problem.evaluate(west.cells)):
west.cells[-(len(problem.objectives) - o)] = objScore
west.evaluated = True
numEval += 1
# Score the poles
n = len(problem.decisions)
weights = []
for obj in problem.objectives:
# w is negative when we are maximizing that objective
if obj.lismore:
weights.append(+1)
else:
weights.append(-1)
weightedWest = [c * w for c, w in zip(west.cells[n:], weights)]
weightedEast = [c * w for c, w in zip(east.cells[n:], weights)]
westLoss = loss(weightedWest, weightedEast, mins=[obj.low for obj in problem.objectives],
maxs=[obj.up for obj in problem.objectives])
eastLoss = loss(weightedEast, weightedWest, mins=[obj.low for obj in problem.objectives],
maxs=[obj.up for obj in problem.objectives])
# Determine better Pole
if eastLoss < westLoss:
to_be_mutated = leaf.table.rows[:int(len(leaf.table.rows)/2)]
else:
to_be_mutated = leaf.table.rows[:int(len(leaf.table.rows)/2)]
to_be_mutated_jmoo = []
for row in to_be_mutated:
if row.evaluated:
to_be_mutated_jmoo.append(jmoo_individual(problem, [x for x in row.cells[:len(problem.decisions)]],
[x for x in row.cells[len(problem.decisions):]]))
else:
to_be_mutated_jmoo.append(jmoo_individual(problem, [x for x in row.cells[:len(problem.decisions)]], None))
for i in xrange(initial_size - len(to_be_mutated)):
index = i%len(to_be_mutated_jmoo)
mutant = variation(problem, index, to_be_mutated_jmoo, configuration)
to_be_mutated_jmoo.append(mutant)
members_evaluated = sum([1 for i in to_be_mutated_jmoo if i.valid])
while members_evaluated <= 2:
from random import randint
index = randint(0, len(to_be_mutated_jmoo)-1)
to_be_mutated_jmoo[index].evaluate()
numEval += 1
members_evaluated += 1
print "> ", members_evaluated
population += to_be_mutated_jmoo
return population, numEval
def gale_64_Regen(problem, unusedslot, mutants, configuration):
howMany = configuration["Universal"]["Population_Size"] - len(mutants)
# Generate random individuals
population = []
for i in range(howMany):
population.append(jmoo_individual(problem, problem.generateInput(), None))
return mutants+population, 0
|
normal
|
{
"blob_id": "957545649e9bf1eaabe42a1caa627d544e68f108",
"index": 5490,
"step-1": "\"\"\"\n This file is part of GALE,\n Copyright Joe Krall, 2014.\n\n GALE is free software: you can redistribute it and/or modify\n it under the terms of the GNU Lesser General Public License as published by\n the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n\n GALE is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU Lesser General Public License for more details.\n\n You should have received a copy of the GNU Lesser General Public License\n along with GALE. If not, see <http://www.gnu.org/licenses/>.\n\"\"\"\n\nfrom Fastmap.Slurp import *\nfrom Fastmap.Moo import *\nfrom jmoo_individual import *\n\n\ndef gale_64_WHERE(problem, population, configuration, values_to_be_passed):\n \"The Core method behind GALE\"\n\n # Compile population into table form used by WHERE\n t = slurp([[x for x in row.decisionValues] + [\"?\" for y in problem.objectives] for row in population],\n problem.buildHeader().split(\",\"))\n\n # Initialize some parameters for WHERE\n The.allowDomination = True\n The.alpha = 1\n for i, row in enumerate(t.rows):\n row.evaluated = False\n\n # Run WHERE\n m = Moo(problem, t, len(t.rows), N=1).divide(minnie=rstop(t))\n\n # Organizing\n NDLeafs = m.nonPrunedLeaves() # The surviving non-dominated leafs\n allLeafs = m.nonPrunedLeaves() + m.prunedLeaves() # All of the leafs\n\n # After mutation: Check how many rows were actually evaluated\n numEval = 0\n for leaf in allLeafs:\n for row in leaf.table.rows:\n if row.evaluated:\n numEval += 1\n\n return NDLeafs, numEval\n\n\ndef polynomial_mutation(problem, individual, configuration):\n from numpy.random import random\n eta_m_ = configuration[\"NSGAIII\"][\"ETA_M_DEFAULT_\"]\n distributionIndex_ = eta_m_\n output = jmoo_individual(problem, individual.decisionValues)\n\n probability = 1/len(problem.decisions)\n for var in xrange(len(problem.decisions)):\n if random() <= probability:\n y = individual.decisionValues[var]\n yU = problem.decisions[var].up\n yL = problem.decisions[var].low\n delta1 = (y - yL)/(yU - yL)\n delta2 = (yU - y)/(yU - yL)\n rnd = random()\n\n mut_pow = 1.0/(eta_m_ + 1.0)\n if rnd < 0.5:\n xy = 1.0 - delta1\n val = 2.0 * rnd + (1 - 2 * rnd) * (xy ** (distributionIndex_ + 1.0))\n deltaq = val ** mut_pow - 1\n else:\n xy = 1.0 - delta2\n val = 2.0 * (1.0-rnd) + 2.0 * (rnd-0.5) * (xy ** (distributionIndex_+1.0))\n deltaq = 1.0 - (val ** mut_pow)\n\n\n y += deltaq * (yU - yL)\n if y < yL: y = yL\n if y > yU: y = yU\n\n output.decisionValues[var] = y\n\n return output\n\n\ndef sbxcrossover(problem, parent1, parent2, configuration):\n\n EPS = 1.0e-14\n distribution_index = configuration[\"NSGAIII\"][\"ETA_C_DEFAULT_\"]\n probability = configuration[\"NSGAIII\"][\"SBX_Probability\"]\n from numpy.random import random\n offspring1 = jmoo_individual(problem, parent1.decisionValues)\n offspring2 = jmoo_individual(problem, parent2.decisionValues)\n\n number_of_variables = len(problem.decisions)\n if random() <= probability:\n for i in xrange(number_of_variables):\n valuex1 = offspring1.decisionValues[i]\n valuex2 = offspring2.decisionValues[i]\n if random() <= 0.5:\n if abs(valuex1 - valuex2) > EPS:\n if valuex1 < valuex2:\n y1 = valuex1\n y2 = valuex2\n else:\n y1 = valuex2\n y2 = valuex1\n\n yL = problem.decisions[i].low\n yU = problem.decisions[i].up\n rand = random()\n beta = 1.0 + (2.0 * (y1 - yL) / (y2 - y1))\n alpha = 2.0 - beta ** (-1 * (distribution_index + 1.0))\n\n if rand <= 1/alpha:\n betaq = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (distribution_index + 1.0))\n else:\n betaq = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (distribution_index + 1.0))\n\n c1 = 0.5 * ((y1 + y2) - betaq * (y2 - y1))\n beta = 1.0 + (2.0 * (yU - y2) / (y2 - y1))\n alpha = 2.0 - beta ** -(distribution_index + 1.0)\n\n if rand <= (1.0 / alpha):\n betaq = (rand * alpha) ** (1.0 / (distribution_index + 1.0))\n else:\n betaq = ((1.0 / (2.0 - rand * alpha)) ** (1.0 / (distribution_index + 1.0)))\n\n c2 = 0.5 * ((y1 + y2) + betaq * (y2 - y1))\n\n if c1 < yL: c1 = yL\n if c2 < yL: c2 = yL\n if c1 > yU: c1 = yU\n if c2 > yU: c2 = yU\n\n if random() <= 0.5:\n offspring1.decisionValues[i] = c2\n offspring2.decisionValues[i] = c1\n else:\n offspring1.decisionValues[i] = c1\n offspring2.decisionValues[i] = c2\n else:\n offspring1.decisionValues[i] = valuex1\n offspring2.decisionValues[i] = valuex2\n else:\n offspring1.decisionValues[i] = valuex2\n offspring2.decisionValues[i] = valuex1\n\n return offspring1, offspring2\n\n\ndef variation(problem, individual_index, population, configuration):\n \"\"\" SBX regeneration Technique \"\"\"\n\n from random import randint\n another_parent = individual_index\n while another_parent == individual_index: another_parent = randint(0, len(population)-1)\n\n from copy import deepcopy\n parent1 = deepcopy(population[individual_index])\n parent2 = deepcopy(population[another_parent])\n\n child1, _ = sbxcrossover(problem, parent1, parent2, configuration)\n mchild1 = polynomial_mutation(problem, child1, configuration)\n\n return mchild1\n\ndef gale_64_Mutate(problem, NDLeafs, configuration):\n #################\n # Mutation Phase\n #################\n # Keep track of evals\n numEval = 0\n\n population = []\n for leaf in NDLeafs:\n\n initial_size = len(leaf.table.rows)\n\n # print \"Number of mutants: \", len(leaf.table.rows)\n # Pull out the Poles\n east = leaf.table.rows[0]\n west = leaf.table.rows[-1]\n\n # Evaluate those poles if needed\n if not east.evaluated:\n for o, objScore in enumerate(problem.evaluate(east.cells)):\n east.cells[-(len(problem.objectives) - o)] = objScore\n east.evaluated = True\n numEval += 1\n if not west.evaluated:\n for o, objScore in enumerate(problem.evaluate(west.cells)):\n west.cells[-(len(problem.objectives) - o)] = objScore\n west.evaluated = True\n numEval += 1\n\n # Score the poles\n n = len(problem.decisions)\n weights = []\n for obj in problem.objectives:\n # w is negative when we are maximizing that objective\n if obj.lismore:\n weights.append(+1)\n else:\n weights.append(-1)\n weightedWest = [c * w for c, w in zip(west.cells[n:], weights)]\n weightedEast = [c * w for c, w in zip(east.cells[n:], weights)]\n westLoss = loss(weightedWest, weightedEast, mins=[obj.low for obj in problem.objectives],\n maxs=[obj.up for obj in problem.objectives])\n eastLoss = loss(weightedEast, weightedWest, mins=[obj.low for obj in problem.objectives],\n maxs=[obj.up for obj in problem.objectives])\n\n # Determine better Pole\n if eastLoss < westLoss:\n to_be_mutated = leaf.table.rows[:int(len(leaf.table.rows)/2)]\n else:\n to_be_mutated = leaf.table.rows[:int(len(leaf.table.rows)/2)]\n\n to_be_mutated_jmoo = []\n for row in to_be_mutated:\n if row.evaluated:\n to_be_mutated_jmoo.append(jmoo_individual(problem, [x for x in row.cells[:len(problem.decisions)]],\n [x for x in row.cells[len(problem.decisions):]]))\n else:\n to_be_mutated_jmoo.append(jmoo_individual(problem, [x for x in row.cells[:len(problem.decisions)]], None))\n\n for i in xrange(initial_size - len(to_be_mutated)):\n index = i%len(to_be_mutated_jmoo)\n mutant = variation(problem, index, to_be_mutated_jmoo, configuration)\n to_be_mutated_jmoo.append(mutant)\n\n members_evaluated = sum([1 for i in to_be_mutated_jmoo if i.valid])\n while members_evaluated <= 2:\n from random import randint\n index = randint(0, len(to_be_mutated_jmoo)-1)\n to_be_mutated_jmoo[index].evaluate()\n numEval += 1\n members_evaluated += 1\n print \"> \", members_evaluated\n\n population += to_be_mutated_jmoo\n\n return population, numEval\n\n\ndef gale_64_Regen(problem, unusedslot, mutants, configuration):\n howMany = configuration[\"Universal\"][\"Population_Size\"] - len(mutants)\n # Generate random individuals\n population = []\n for i in range(howMany):\n population.append(jmoo_individual(problem, problem.generateInput(), None))\n \n return mutants+population, 0\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# python/motorcycle.py Author "Nathan Wycoff <[email protected]>" Date 06.23.2019
# Run a CGAN on the motorcycle data.
import keras
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
np.random.seed(123)
import tensorflow as tf
from scipy.optimize import line_search
tf.enable_eager_execution()
tf.set_random_seed(123)
P = 1 # Dim of X data (to be conditioned on)
R = 1 # Dim of latent error variable
Q = 1 # Dim of y data (to be generated)
H = 20# Number of hidden units
epochs = 1000
doubleback_const = 1
# Load and pre-process data
mcycle = np.genfromtxt('./data/mcycle.csv', delimiter=',', skip_header = 1)
N = mcycle.shape[0]
x = mcycle[:,0].reshape([N,P])
y = mcycle[:,1].reshape([N,Q])
#x /= max(x)
#y = (y-min(y)) / (max(y) - min(y))
x = (x - np.mean(x)) / np.std(x)
y = (y - np.mean(y)) / np.std(y)
# Build the generator, accepts X and Z as inputs
gen = tf.keras.Sequential()
gen.add(tf.keras.layers.Dense(H, input_dim = P + R, activation = tf.keras.activations.elu))
gen.add(tf.keras.layers.Dense(H, activation = tf.keras.activations.elu))
gen.add(tf.keras.layers.Dense(Q))
# Build the discriminator, accepts an X and a Y as inputs.
disc = tf.keras.Sequential()
disc.add(tf.keras.layers.Dense(H, input_dim = P + Q, activation = tf.keras.activations.elu))
disc.add(tf.keras.layers.Dense(H, activation = tf.keras.activations.elu))
disc.add(tf.keras.layers.Dense(1, activation = tf.keras.activations.sigmoid))
gen.summary()
disc.summary()
# NOTE: Compilation of discriminator needs to occur BEFORE we set its weights untrainable below, as these changes will not be reflected until disc is compiled again. So also be wary of compiling disc later, as its weights may not change.
#TODO: the above is a mess, find a better way.
#disc.compile(tf.keras.optimizers.Adam(), 'binary_crossentropy')
disc.compile(tf.train.GradientDescentOptimizer(learning_rate = 1.0), 'binary_crossentropy')
noise = tf.keras.layers.Input(shape = (R,))
xdat = tf.keras.layers.Input(shape = (P,))
genin = tf.keras.layers.concatenate([xdat, noise])
genout = gen(genin)
discin = tf.keras.layers.concatenate([xdat, genout])
validity = disc(discin)
#NOTE: Next lin possible issue in ordering of inputs?
both_mod = tf.keras.models.Model([xdat, noise], validity)
both_mod.layers[5].trainable = False
#both_mod.compile(tf.keras.optimizers.Adam(), 'binary_crossentropy')
#both_mod.compile(tf.train.AdamOptimizer(), 'binary_crossentropy')
both_mod.compile(tf.train.GradientDescentOptimizer(learning_rate = 1.0), 'binary_crossentropy')
## Custom training with double backprop
#genloss = lambda: both_mod.output
#genopt = tf.keras.optimizers.Adam(genloss, both_mod.trainable_variables)
# Do the training!
for epoch in tqdm(range(epochs)):
# Sample some noise
#TODO: Batch size
some_noise = np.random.normal(size=[N,R])
gen_dat = gen.predict(np.hstack([x, some_noise]))
# Train discriminator
#NOTE: Minor discrepency in losses from the manual loop below and from keras's built in: follow up if there appears to be bugs.
#disc_rl = disc.train_on_batch(np.hstack([x, y]), np.ones(N))
#disc_fl = disc.train_on_batch(np.hstack([x, gen_dat]), np.zeros(N))
#disc_loss = 0.5 * np.add(disc_rl, disc_fl)
disc.trainable = True
with tf.GradientTape() as td:
with tf.GradientTape() as t:
#preds_real = disc(tf.cast(np.concatenate([x, y]).reshape([N,P+Q]), tf.float32))
#preds_fake = disc(tf.cast(np.concatenate([x, gen_dat]).reshape([N,P+Q]), tf.float32))
preds_real = disc(tf.cast(np.hstack([x, y.reshape([N,Q])]), tf.float32))
preds_fake = disc(tf.cast(np.hstack([x, gen_dat]), tf.float32))
dl_real = tf.reduce_mean(keras.losses.binary_crossentropy(np.ones(N).reshape([N,1]), tf.cast(preds_real, tf.float64)))
dl_fake = tf.reduce_mean(keras.losses.binary_crossentropy(np.zeros(N).reshape([N,1]), tf.cast(preds_fake, tf.float64)))
dl = 0.5*tf.add(dl_real, dl_fake)
grads = t.gradient(dl, disc.trainable_variables)
grads_norm = 0
for i in range(len(grads)):
#grads_norm += tf.reduce_sum(tf.square(grads[i]))
grads_norm += tf.reduce_mean(tf.square(grads[i]))
grads_norm /= float(len(grads))
double_grads = td.gradient(grads_norm, disc.trainable_variables)
grads_n_vars = [(grads[i] + doubleback_const * double_grads[i], disc.trainable_variables[i]) for i in range(len(grads))]
disc.optimizer.apply_gradients(grads_n_vars)
disc.trainable = False
# Train generator
#both_mod.train_on_batch([x, some_noise], np.ones(N))
# Manually compute and apply gradient
with tf.GradientTape() as td:
with tf.GradientTape() as t:
preds = both_mod([tf.cast(x, tf.float32), tf.cast(some_noise, tf.float32)])
bl = tf.reduce_mean(keras.losses.binary_crossentropy(np.ones(N).reshape([N,1]), tf.cast(preds, tf.float64)))
#bl = tf.losses.sigmoid_cross_entropy(preds, np.ones(N).reshape([N,1]))
grads = t.gradient(bl, both_mod.trainable_variables)
grads_norm = 0
for i in range(len(grads)):
#grads_norm += tf.reduce_sum(tf.square(grads[i]))
grads_norm += tf.reduce_mean(tf.square(grads[i]))
grads_norm /= float(len(grads))
double_grads = td.gradient(grads_norm, both_mod.trainable_variables)
grads_n_vars = [(grads[i] + doubleback_const*double_grads[i], both_mod.trainable_variables[i]) for i in range(len(grads))]
both_mod.optimizer.apply_gradients(grads_n_vars)
# Plot the results
fig = plt.figure()
plt.scatter(x, y)
some_noise = np.random.normal(size=[N,P])
preds = gen.predict(np.hstack([x, some_noise]))
plt.scatter(x, preds)
#plt.savefig("images/motor_scatter.pdf")
plt.savefig("temp.pdf")
|
normal
|
{
"blob_id": "aba3e0907e59bc5125759e90d3c784ceb97fca80",
"index": 9941,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.random.seed(123)\n<mask token>\ntf.enable_eager_execution()\ntf.set_random_seed(123)\n<mask token>\ngen.add(tf.keras.layers.Dense(H, input_dim=P + R, activation=tf.keras.\n activations.elu))\ngen.add(tf.keras.layers.Dense(H, activation=tf.keras.activations.elu))\ngen.add(tf.keras.layers.Dense(Q))\n<mask token>\ndisc.add(tf.keras.layers.Dense(H, input_dim=P + Q, activation=tf.keras.\n activations.elu))\ndisc.add(tf.keras.layers.Dense(H, activation=tf.keras.activations.elu))\ndisc.add(tf.keras.layers.Dense(1, activation=tf.keras.activations.sigmoid))\ngen.summary()\ndisc.summary()\ndisc.compile(tf.train.GradientDescentOptimizer(learning_rate=1.0),\n 'binary_crossentropy')\n<mask token>\nboth_mod.compile(tf.train.GradientDescentOptimizer(learning_rate=1.0),\n 'binary_crossentropy')\nfor epoch in tqdm(range(epochs)):\n some_noise = np.random.normal(size=[N, R])\n gen_dat = gen.predict(np.hstack([x, some_noise]))\n disc.trainable = True\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n preds_real = disc(tf.cast(np.hstack([x, y.reshape([N, Q])]), tf\n .float32))\n preds_fake = disc(tf.cast(np.hstack([x, gen_dat]), tf.float32))\n dl_real = tf.reduce_mean(keras.losses.binary_crossentropy(np.\n ones(N).reshape([N, 1]), tf.cast(preds_real, tf.float64)))\n dl_fake = tf.reduce_mean(keras.losses.binary_crossentropy(np.\n zeros(N).reshape([N, 1]), tf.cast(preds_fake, tf.float64)))\n dl = 0.5 * tf.add(dl_real, dl_fake)\n grads = t.gradient(dl, disc.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n double_grads = td.gradient(grads_norm, disc.trainable_variables)\n grads_n_vars = [(grads[i] + doubleback_const * double_grads[i], disc.\n trainable_variables[i]) for i in range(len(grads))]\n disc.optimizer.apply_gradients(grads_n_vars)\n disc.trainable = False\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n preds = both_mod([tf.cast(x, tf.float32), tf.cast(some_noise,\n tf.float32)])\n bl = tf.reduce_mean(keras.losses.binary_crossentropy(np.ones(N)\n .reshape([N, 1]), tf.cast(preds, tf.float64)))\n grads = t.gradient(bl, both_mod.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n double_grads = td.gradient(grads_norm, both_mod.trainable_variables)\n grads_n_vars = [(grads[i] + doubleback_const * double_grads[i],\n both_mod.trainable_variables[i]) for i in range(len(grads))]\n both_mod.optimizer.apply_gradients(grads_n_vars)\n<mask token>\nplt.scatter(x, y)\n<mask token>\nplt.scatter(x, preds)\nplt.savefig('temp.pdf')\n",
"step-3": "<mask token>\nnp.random.seed(123)\n<mask token>\ntf.enable_eager_execution()\ntf.set_random_seed(123)\nP = 1\nR = 1\nQ = 1\nH = 20\nepochs = 1000\ndoubleback_const = 1\nmcycle = np.genfromtxt('./data/mcycle.csv', delimiter=',', skip_header=1)\nN = mcycle.shape[0]\nx = mcycle[:, 0].reshape([N, P])\ny = mcycle[:, 1].reshape([N, Q])\nx = (x - np.mean(x)) / np.std(x)\ny = (y - np.mean(y)) / np.std(y)\ngen = tf.keras.Sequential()\ngen.add(tf.keras.layers.Dense(H, input_dim=P + R, activation=tf.keras.\n activations.elu))\ngen.add(tf.keras.layers.Dense(H, activation=tf.keras.activations.elu))\ngen.add(tf.keras.layers.Dense(Q))\ndisc = tf.keras.Sequential()\ndisc.add(tf.keras.layers.Dense(H, input_dim=P + Q, activation=tf.keras.\n activations.elu))\ndisc.add(tf.keras.layers.Dense(H, activation=tf.keras.activations.elu))\ndisc.add(tf.keras.layers.Dense(1, activation=tf.keras.activations.sigmoid))\ngen.summary()\ndisc.summary()\ndisc.compile(tf.train.GradientDescentOptimizer(learning_rate=1.0),\n 'binary_crossentropy')\nnoise = tf.keras.layers.Input(shape=(R,))\nxdat = tf.keras.layers.Input(shape=(P,))\ngenin = tf.keras.layers.concatenate([xdat, noise])\ngenout = gen(genin)\ndiscin = tf.keras.layers.concatenate([xdat, genout])\nvalidity = disc(discin)\nboth_mod = tf.keras.models.Model([xdat, noise], validity)\nboth_mod.layers[5].trainable = False\nboth_mod.compile(tf.train.GradientDescentOptimizer(learning_rate=1.0),\n 'binary_crossentropy')\nfor epoch in tqdm(range(epochs)):\n some_noise = np.random.normal(size=[N, R])\n gen_dat = gen.predict(np.hstack([x, some_noise]))\n disc.trainable = True\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n preds_real = disc(tf.cast(np.hstack([x, y.reshape([N, Q])]), tf\n .float32))\n preds_fake = disc(tf.cast(np.hstack([x, gen_dat]), tf.float32))\n dl_real = tf.reduce_mean(keras.losses.binary_crossentropy(np.\n ones(N).reshape([N, 1]), tf.cast(preds_real, tf.float64)))\n dl_fake = tf.reduce_mean(keras.losses.binary_crossentropy(np.\n zeros(N).reshape([N, 1]), tf.cast(preds_fake, tf.float64)))\n dl = 0.5 * tf.add(dl_real, dl_fake)\n grads = t.gradient(dl, disc.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n double_grads = td.gradient(grads_norm, disc.trainable_variables)\n grads_n_vars = [(grads[i] + doubleback_const * double_grads[i], disc.\n trainable_variables[i]) for i in range(len(grads))]\n disc.optimizer.apply_gradients(grads_n_vars)\n disc.trainable = False\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n preds = both_mod([tf.cast(x, tf.float32), tf.cast(some_noise,\n tf.float32)])\n bl = tf.reduce_mean(keras.losses.binary_crossentropy(np.ones(N)\n .reshape([N, 1]), tf.cast(preds, tf.float64)))\n grads = t.gradient(bl, both_mod.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n double_grads = td.gradient(grads_norm, both_mod.trainable_variables)\n grads_n_vars = [(grads[i] + doubleback_const * double_grads[i],\n both_mod.trainable_variables[i]) for i in range(len(grads))]\n both_mod.optimizer.apply_gradients(grads_n_vars)\nfig = plt.figure()\nplt.scatter(x, y)\nsome_noise = np.random.normal(size=[N, P])\npreds = gen.predict(np.hstack([x, some_noise]))\nplt.scatter(x, preds)\nplt.savefig('temp.pdf')\n",
"step-4": "import keras\nimport numpy as np\nfrom tqdm import tqdm\nimport matplotlib.pyplot as plt\nnp.random.seed(123)\nimport tensorflow as tf\nfrom scipy.optimize import line_search\ntf.enable_eager_execution()\ntf.set_random_seed(123)\nP = 1\nR = 1\nQ = 1\nH = 20\nepochs = 1000\ndoubleback_const = 1\nmcycle = np.genfromtxt('./data/mcycle.csv', delimiter=',', skip_header=1)\nN = mcycle.shape[0]\nx = mcycle[:, 0].reshape([N, P])\ny = mcycle[:, 1].reshape([N, Q])\nx = (x - np.mean(x)) / np.std(x)\ny = (y - np.mean(y)) / np.std(y)\ngen = tf.keras.Sequential()\ngen.add(tf.keras.layers.Dense(H, input_dim=P + R, activation=tf.keras.\n activations.elu))\ngen.add(tf.keras.layers.Dense(H, activation=tf.keras.activations.elu))\ngen.add(tf.keras.layers.Dense(Q))\ndisc = tf.keras.Sequential()\ndisc.add(tf.keras.layers.Dense(H, input_dim=P + Q, activation=tf.keras.\n activations.elu))\ndisc.add(tf.keras.layers.Dense(H, activation=tf.keras.activations.elu))\ndisc.add(tf.keras.layers.Dense(1, activation=tf.keras.activations.sigmoid))\ngen.summary()\ndisc.summary()\ndisc.compile(tf.train.GradientDescentOptimizer(learning_rate=1.0),\n 'binary_crossentropy')\nnoise = tf.keras.layers.Input(shape=(R,))\nxdat = tf.keras.layers.Input(shape=(P,))\ngenin = tf.keras.layers.concatenate([xdat, noise])\ngenout = gen(genin)\ndiscin = tf.keras.layers.concatenate([xdat, genout])\nvalidity = disc(discin)\nboth_mod = tf.keras.models.Model([xdat, noise], validity)\nboth_mod.layers[5].trainable = False\nboth_mod.compile(tf.train.GradientDescentOptimizer(learning_rate=1.0),\n 'binary_crossentropy')\nfor epoch in tqdm(range(epochs)):\n some_noise = np.random.normal(size=[N, R])\n gen_dat = gen.predict(np.hstack([x, some_noise]))\n disc.trainable = True\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n preds_real = disc(tf.cast(np.hstack([x, y.reshape([N, Q])]), tf\n .float32))\n preds_fake = disc(tf.cast(np.hstack([x, gen_dat]), tf.float32))\n dl_real = tf.reduce_mean(keras.losses.binary_crossentropy(np.\n ones(N).reshape([N, 1]), tf.cast(preds_real, tf.float64)))\n dl_fake = tf.reduce_mean(keras.losses.binary_crossentropy(np.\n zeros(N).reshape([N, 1]), tf.cast(preds_fake, tf.float64)))\n dl = 0.5 * tf.add(dl_real, dl_fake)\n grads = t.gradient(dl, disc.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n double_grads = td.gradient(grads_norm, disc.trainable_variables)\n grads_n_vars = [(grads[i] + doubleback_const * double_grads[i], disc.\n trainable_variables[i]) for i in range(len(grads))]\n disc.optimizer.apply_gradients(grads_n_vars)\n disc.trainable = False\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n preds = both_mod([tf.cast(x, tf.float32), tf.cast(some_noise,\n tf.float32)])\n bl = tf.reduce_mean(keras.losses.binary_crossentropy(np.ones(N)\n .reshape([N, 1]), tf.cast(preds, tf.float64)))\n grads = t.gradient(bl, both_mod.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n double_grads = td.gradient(grads_norm, both_mod.trainable_variables)\n grads_n_vars = [(grads[i] + doubleback_const * double_grads[i],\n both_mod.trainable_variables[i]) for i in range(len(grads))]\n both_mod.optimizer.apply_gradients(grads_n_vars)\nfig = plt.figure()\nplt.scatter(x, y)\nsome_noise = np.random.normal(size=[N, P])\npreds = gen.predict(np.hstack([x, some_noise]))\nplt.scatter(x, preds)\nplt.savefig('temp.pdf')\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n# python/motorcycle.py Author \"Nathan Wycoff <[email protected]>\" Date 06.23.2019\n\n# Run a CGAN on the motorcycle data.\nimport keras\nimport numpy as np\nfrom tqdm import tqdm\nimport matplotlib.pyplot as plt\n\nnp.random.seed(123)\nimport tensorflow as tf\nfrom scipy.optimize import line_search\ntf.enable_eager_execution()\ntf.set_random_seed(123)\n\nP = 1 # Dim of X data (to be conditioned on)\nR = 1 # Dim of latent error variable\nQ = 1 # Dim of y data (to be generated)\nH = 20# Number of hidden units\nepochs = 1000\ndoubleback_const = 1\n\n# Load and pre-process data\nmcycle = np.genfromtxt('./data/mcycle.csv', delimiter=',', skip_header = 1)\nN = mcycle.shape[0]\nx = mcycle[:,0].reshape([N,P])\ny = mcycle[:,1].reshape([N,Q])\n#x /= max(x)\n#y = (y-min(y)) / (max(y) - min(y))\nx = (x - np.mean(x)) / np.std(x)\ny = (y - np.mean(y)) / np.std(y)\n\n# Build the generator, accepts X and Z as inputs\ngen = tf.keras.Sequential()\ngen.add(tf.keras.layers.Dense(H, input_dim = P + R, activation = tf.keras.activations.elu))\ngen.add(tf.keras.layers.Dense(H, activation = tf.keras.activations.elu))\ngen.add(tf.keras.layers.Dense(Q))\n\n# Build the discriminator, accepts an X and a Y as inputs.\ndisc = tf.keras.Sequential()\ndisc.add(tf.keras.layers.Dense(H, input_dim = P + Q, activation = tf.keras.activations.elu))\ndisc.add(tf.keras.layers.Dense(H, activation = tf.keras.activations.elu))\ndisc.add(tf.keras.layers.Dense(1, activation = tf.keras.activations.sigmoid))\n\ngen.summary()\ndisc.summary()\n\n# NOTE: Compilation of discriminator needs to occur BEFORE we set its weights untrainable below, as these changes will not be reflected until disc is compiled again. So also be wary of compiling disc later, as its weights may not change.\n#TODO: the above is a mess, find a better way.\n#disc.compile(tf.keras.optimizers.Adam(), 'binary_crossentropy')\ndisc.compile(tf.train.GradientDescentOptimizer(learning_rate = 1.0), 'binary_crossentropy')\n\nnoise = tf.keras.layers.Input(shape = (R,))\nxdat = tf.keras.layers.Input(shape = (P,))\n\ngenin = tf.keras.layers.concatenate([xdat, noise])\ngenout = gen(genin)\n\ndiscin = tf.keras.layers.concatenate([xdat, genout])\nvalidity = disc(discin)\n\n#NOTE: Next lin possible issue in ordering of inputs?\nboth_mod = tf.keras.models.Model([xdat, noise], validity)\nboth_mod.layers[5].trainable = False\n\n#both_mod.compile(tf.keras.optimizers.Adam(), 'binary_crossentropy')\n#both_mod.compile(tf.train.AdamOptimizer(), 'binary_crossentropy')\nboth_mod.compile(tf.train.GradientDescentOptimizer(learning_rate = 1.0), 'binary_crossentropy')\n\n## Custom training with double backprop\n#genloss = lambda: both_mod.output\n#genopt = tf.keras.optimizers.Adam(genloss, both_mod.trainable_variables)\n\n# Do the training!\nfor epoch in tqdm(range(epochs)):\n # Sample some noise\n #TODO: Batch size\n some_noise = np.random.normal(size=[N,R])\n\n gen_dat = gen.predict(np.hstack([x, some_noise]))\n\n # Train discriminator\n #NOTE: Minor discrepency in losses from the manual loop below and from keras's built in: follow up if there appears to be bugs.\n #disc_rl = disc.train_on_batch(np.hstack([x, y]), np.ones(N))\n #disc_fl = disc.train_on_batch(np.hstack([x, gen_dat]), np.zeros(N))\n #disc_loss = 0.5 * np.add(disc_rl, disc_fl)\n\n disc.trainable = True\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n #preds_real = disc(tf.cast(np.concatenate([x, y]).reshape([N,P+Q]), tf.float32))\n #preds_fake = disc(tf.cast(np.concatenate([x, gen_dat]).reshape([N,P+Q]), tf.float32))\n preds_real = disc(tf.cast(np.hstack([x, y.reshape([N,Q])]), tf.float32))\n preds_fake = disc(tf.cast(np.hstack([x, gen_dat]), tf.float32))\n dl_real = tf.reduce_mean(keras.losses.binary_crossentropy(np.ones(N).reshape([N,1]), tf.cast(preds_real, tf.float64)))\n dl_fake = tf.reduce_mean(keras.losses.binary_crossentropy(np.zeros(N).reshape([N,1]), tf.cast(preds_fake, tf.float64)))\n dl = 0.5*tf.add(dl_real, dl_fake)\n\n grads = t.gradient(dl, disc.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n #grads_norm += tf.reduce_sum(tf.square(grads[i]))\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n\n double_grads = td.gradient(grads_norm, disc.trainable_variables)\n\n grads_n_vars = [(grads[i] + doubleback_const * double_grads[i], disc.trainable_variables[i]) for i in range(len(grads))]\n disc.optimizer.apply_gradients(grads_n_vars)\n disc.trainable = False\n\n # Train generator\n #both_mod.train_on_batch([x, some_noise], np.ones(N))\n # Manually compute and apply gradient\n with tf.GradientTape() as td:\n with tf.GradientTape() as t:\n preds = both_mod([tf.cast(x, tf.float32), tf.cast(some_noise, tf.float32)])\n bl = tf.reduce_mean(keras.losses.binary_crossentropy(np.ones(N).reshape([N,1]), tf.cast(preds, tf.float64)))\n #bl = tf.losses.sigmoid_cross_entropy(preds, np.ones(N).reshape([N,1]))\n\n grads = t.gradient(bl, both_mod.trainable_variables)\n grads_norm = 0\n for i in range(len(grads)):\n #grads_norm += tf.reduce_sum(tf.square(grads[i]))\n grads_norm += tf.reduce_mean(tf.square(grads[i]))\n grads_norm /= float(len(grads))\n\n double_grads = td.gradient(grads_norm, both_mod.trainable_variables)\n\n grads_n_vars = [(grads[i] + doubleback_const*double_grads[i], both_mod.trainable_variables[i]) for i in range(len(grads))]\n both_mod.optimizer.apply_gradients(grads_n_vars)\n\n# Plot the results\nfig = plt.figure()\nplt.scatter(x, y)\nsome_noise = np.random.normal(size=[N,P])\npreds = gen.predict(np.hstack([x, some_noise]))\nplt.scatter(x, preds)\n#plt.savefig(\"images/motor_scatter.pdf\")\nplt.savefig(\"temp.pdf\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
{
'name': 'Islamic Datepicker',
'category': 'Extra Tools',
'author': 'Mostafa Mohamed',
'website': 'https://eg.linkedin.com/in/mostafa-mohammed-449a8786',
'price': 25.00,
'currency': 'EUR',
'version': '9.0.1.0.1',
'depends': ['base','web'],
'data': [
'views/islamic_template.xml',
],
'qweb': [
"static/src/xml/islamice_date_widget.xml",
],
'auto_install': False,
'installable': True
}
|
normal
|
{
"blob_id": "51a4d8f1be7009b69f0b69bdd51a0077256304a9",
"index": 7222,
"step-1": "<mask token>\n",
"step-2": "{'name': 'Islamic Datepicker', 'category': 'Extra Tools', 'author':\n 'Mostafa Mohamed', 'website':\n 'https://eg.linkedin.com/in/mostafa-mohammed-449a8786', 'price': 25.0,\n 'currency': 'EUR', 'version': '9.0.1.0.1', 'depends': ['base', 'web'],\n 'data': ['views/islamic_template.xml'], 'qweb': [\n 'static/src/xml/islamice_date_widget.xml'], 'auto_install': False,\n 'installable': True}\n",
"step-3": "# -*- coding: utf-8 -*-\n{\n 'name': 'Islamic Datepicker',\n 'category': 'Extra Tools',\n 'author': 'Mostafa Mohamed',\n 'website': 'https://eg.linkedin.com/in/mostafa-mohammed-449a8786',\n 'price': 25.00,\n 'currency': 'EUR',\n 'version': '9.0.1.0.1',\n 'depends': ['base','web'],\n 'data': [\n 'views/islamic_template.xml',\n ],\n 'qweb': [\n \"static/src/xml/islamice_date_widget.xml\",\n ],\n 'auto_install': False,\n 'installable': True\n}\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/bin/env python
from boincvm_common.stomp.StompProtocol import StompProtocolFactory
from stomp.HostStompEngine import HostStompEngine
from boincvm_host.xmlrpc.HostXMLRPCService import HostXMLRPCService
from twisted.internet import reactor
from ConfigParser import SafeConfigParser
import coilmq.start
import logging
import multiprocessing
import time
import pdb
logging.basicConfig(level=logging.DEBUG, \
format='%(asctime)s - %(name)s - %(levelname)s: %(message)s', )
logger = logging.getLogger(__name__)
def startSTOMPBroker(config, serverUpEvent, tries=-1, delay=1, backoff=1.5):
"""
@param tries number of times to retry starting the broker. < 0 means infinitely many.
@param delay number of seconds to wait after the first failed attempt
@param backoff factor by which the delay will be incremented after a failure.
"""
#stomp broker
mtries = tries
mdelay = delay
coilserver = None
from coilmq.config import config as coilconfig
if config.has_section('coilmq'):
for k,v in config.items('coilmq'):
coilconfig.set('coilmq', k, v)
logger.debug("Set %s to %s for coilmq config." % (k,v))
while True:
try:
coilserver = coilmq.start.server_from_config(coilconfig)
logger.info("Stomp server listening on %s:%s" % \
coilserver.server_address)
serverUpEvent.set()
coilserver.serve_forever()
except IOError as ex:
logger.error("Exception while starting coilmq broker: '%s'", ex)
if mtries != 0:
logger.debug("Retrying coilmq startup in %.1f seconds...", mdelay)
time.sleep(mdelay)
mdelay *= backoff
mtries -= 1
else:
logger.debug("Ran out of trials (tried %d times) for coilmq startup. Giving up.", tries)
break
finally:
if coilserver: coilserver.server_close()
def start(config, brokerTimeout = 60.0):
"""
Start twisted event loop and the fun should begin...
@param brokerTimeout how long to wait for a broker
@return a negative number upon failure. Otherwise, it never returns.
"""
manager = multiprocessing.Manager()
serverUpEvent = manager.Event()
broker = multiprocessing.Process(target=startSTOMPBroker, args=(config,serverUpEvent))
broker.daemon = True
broker.name = 'STOMP-Broker'
broker.start()
serverUpEvent.wait(brokerTimeout)
if not serverUpEvent.is_set():
logger.fatal("Broker not available after %.1f seconds. Giving up", brokerTimeout)
return -1
#host side logic
host = config.get('Broker', 'host')
port = int(config.get('Broker', 'port'))
username = config.get('Broker', 'username')
password = config.get('Broker', 'password')
hostEngine = HostStompEngine(config)
stompProtocolFactory = StompProtocolFactory(hostEngine, username, password)
HostXMLRPCService(config).makeEngineAccesible(hostEngine)
reactor.connectTCP(host, port, stompProtocolFactory)
reactor.run()
if __name__ == '__main__':
from sys import argv, exit
if len(argv) < 2:
print "Usage: %s <config-file>" % argv[0]
exit(-1)
else:
configFile = argv[1]
config = SafeConfigParser()
config.read(configFile)
exit(start(config))
|
normal
|
{
"blob_id": "e533b7aadd1cd7137301af8862dd2987622e499e",
"index": 3357,
"step-1": "#!/bin/env python\n\nfrom boincvm_common.stomp.StompProtocol import StompProtocolFactory\nfrom stomp.HostStompEngine import HostStompEngine\n\nfrom boincvm_host.xmlrpc.HostXMLRPCService import HostXMLRPCService\n\nfrom twisted.internet import reactor\nfrom ConfigParser import SafeConfigParser\n\nimport coilmq.start\n\nimport logging\nimport multiprocessing\nimport time \nimport pdb\n\nlogging.basicConfig(level=logging.DEBUG, \\\n format='%(asctime)s - %(name)s - %(levelname)s: %(message)s', )\n\nlogger = logging.getLogger(__name__)\n\ndef startSTOMPBroker(config, serverUpEvent, tries=-1, delay=1, backoff=1.5):\n \"\"\"\n\n @param tries number of times to retry starting the broker. < 0 means infinitely many.\n @param delay number of seconds to wait after the first failed attempt\n @param backoff factor by which the delay will be incremented after a failure.\n \"\"\"\n #stomp broker\n mtries = tries\n mdelay = delay\n coilserver = None\n from coilmq.config import config as coilconfig\n if config.has_section('coilmq'):\n for k,v in config.items('coilmq'):\n coilconfig.set('coilmq', k, v)\n logger.debug(\"Set %s to %s for coilmq config.\" % (k,v))\n while True:\n try:\n coilserver = coilmq.start.server_from_config(coilconfig)\n logger.info(\"Stomp server listening on %s:%s\" % \\\n coilserver.server_address)\n serverUpEvent.set()\n coilserver.serve_forever()\n except IOError as ex:\n logger.error(\"Exception while starting coilmq broker: '%s'\", ex)\n if mtries != 0: \n logger.debug(\"Retrying coilmq startup in %.1f seconds...\", mdelay)\n time.sleep(mdelay)\n mdelay *= backoff\n mtries -= 1\n else:\n logger.debug(\"Ran out of trials (tried %d times) for coilmq startup. Giving up.\", tries)\n break\n finally:\n if coilserver: coilserver.server_close()\n\n\ndef start(config, brokerTimeout = 60.0):\n \"\"\"\n Start twisted event loop and the fun should begin...\n\n @param brokerTimeout how long to wait for a broker \n \n @return a negative number upon failure. Otherwise, it never returns.\n \"\"\"\n \n manager = multiprocessing.Manager()\n serverUpEvent = manager.Event()\n broker = multiprocessing.Process(target=startSTOMPBroker, args=(config,serverUpEvent))\n broker.daemon = True\n broker.name = 'STOMP-Broker'\n broker.start()\n\n serverUpEvent.wait(brokerTimeout)\n if not serverUpEvent.is_set():\n logger.fatal(\"Broker not available after %.1f seconds. Giving up\", brokerTimeout)\n return -1\n #host side logic\n host = config.get('Broker', 'host') \n port = int(config.get('Broker', 'port'))\n username = config.get('Broker', 'username')\n password = config.get('Broker', 'password')\n\n hostEngine = HostStompEngine(config)\n stompProtocolFactory = StompProtocolFactory(hostEngine, username, password)\n \n HostXMLRPCService(config).makeEngineAccesible(hostEngine)\n\n\n reactor.connectTCP(host, port, stompProtocolFactory)\n reactor.run()\n\n\n\nif __name__ == '__main__':\n from sys import argv, exit\n if len(argv) < 2:\n print \"Usage: %s <config-file>\" % argv[0]\n exit(-1)\n else:\n configFile = argv[1]\n\n config = SafeConfigParser()\n config.read(configFile)\n\n exit(start(config))\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
def ddm_dd_convert(coord, direction):
"""Converts GPS reading from DDM to DD
str coord - the ddm coordinate from $GPGGA
str direction - the direction of the coord (N,S,W,E)
returns - string representation of dd coordinate
"""
value = ''
if (direction == 'S' or direction == 'W'):
value += '-'
value += coord[0:-7]
minute = float(coord[-7:])
decimal = round(minute / 60, 8)
result = str(decimal)[1:]
value += result
return value
def gprmc_convert(line):
"""Translates $GPRMC line into documented array
str line - the GPRMC line
returns - the data documented into array
"""
gps = line.strip().split(',')
#check data
if gps[2] == 'V':
return
raw_date = gps[9]
time = ''
date = raw_date[0:2]
month = raw_date[2:4]
year = raw_date[4:]
#modify year if reaches year 2100
time += date + '/' + month + '/20' + year
return [time]
def gpvtg_convert(line):
"""Translates $GPVTG line into documented array
Data only used for measuring ground speed
str line - the GPVTG line
returns - the data documented into array
"""
gps = line.strip().split(',')
#check data
if gps[1] == '0.00':
return
#jsondata = {'Horizontal speed': gps[7] + ' kmph or ' + gps[5] + 'knots'}
return []
def gpgga_convert(line):
"""Translates $GPGGPA line into documented array
str line - the GPGGA line
returns - the data documented into array
"""
gps = line.strip().split(',')
#check data
if gps[6] == '0' :
return
fix = ''
if gps[6] == '1':
fix = 'GPS fix'
elif gps[6] == '2':
fix = 'DGPS fix'
elif gps[6] == '4':
fix = 'RTK Fix coordinate (centimeter precision)'
elif gps[6] == '5':
fix = 'RTK Float (decimeter precision)'
#utc = gps[1][0:2] + ':' + gps[1][2:4] + ':' + gps[1][4:6]
lat = ddm_dd_convert(gps[2], gps[3])
long = ddm_dd_convert(gps[4], gps[5])
return [lat, long, fix]
def gpgsa_convert(line):
"""Translates $GPGSA line into documented array
str line - the GPGSA line
returns - the data documented into array
"""
gps = line.strip().split(',')
#check data
if gps[2] == '1':
return
if gps[2] == '2':
fix = '2D fix'
else:
fix = '3D fix'
return [fix]
|
normal
|
{
"blob_id": "dc5630e17bb6ed85157b06108250427be41416d1",
"index": 7766,
"step-1": "<mask token>\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\n<mask token>\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\ndef gpvtg_convert(line):\n \"\"\"Translates $GPVTG line into documented array\n Data only used for measuring ground speed\n str line - the GPVTG line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[1] == '0.00':\n return\n return []\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\n<mask token>\n",
"step-3": "def ddm_dd_convert(coord, direction):\n \"\"\"Converts GPS reading from DDM to DD\n str coord - the ddm coordinate from $GPGGA\n str direction - the direction of the coord (N,S,W,E)\n returns - string representation of dd coordinate\n \"\"\"\n value = ''\n if direction == 'S' or direction == 'W':\n value += '-'\n value += coord[0:-7]\n minute = float(coord[-7:])\n decimal = round(minute / 60, 8)\n result = str(decimal)[1:]\n value += result\n return value\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\ndef gpvtg_convert(line):\n \"\"\"Translates $GPVTG line into documented array\n Data only used for measuring ground speed\n str line - the GPVTG line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[1] == '0.00':\n return\n return []\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\n<mask token>\n",
"step-4": "def ddm_dd_convert(coord, direction):\n \"\"\"Converts GPS reading from DDM to DD\n str coord - the ddm coordinate from $GPGGA\n str direction - the direction of the coord (N,S,W,E)\n returns - string representation of dd coordinate\n \"\"\"\n value = ''\n if direction == 'S' or direction == 'W':\n value += '-'\n value += coord[0:-7]\n minute = float(coord[-7:])\n decimal = round(minute / 60, 8)\n result = str(decimal)[1:]\n value += result\n return value\n\n\ndef gprmc_convert(line):\n \"\"\"Translates $GPRMC line into documented array\n str line - the GPRMC line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == 'V':\n return\n raw_date = gps[9]\n time = ''\n date = raw_date[0:2]\n month = raw_date[2:4]\n year = raw_date[4:]\n time += date + '/' + month + '/20' + year\n return [time]\n\n\ndef gpvtg_convert(line):\n \"\"\"Translates $GPVTG line into documented array\n Data only used for measuring ground speed\n str line - the GPVTG line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[1] == '0.00':\n return\n return []\n\n\ndef gpgga_convert(line):\n \"\"\"Translates $GPGGPA line into documented array\n str line - the GPGGA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[6] == '0':\n return\n fix = ''\n if gps[6] == '1':\n fix = 'GPS fix'\n elif gps[6] == '2':\n fix = 'DGPS fix'\n elif gps[6] == '4':\n fix = 'RTK Fix coordinate (centimeter precision)'\n elif gps[6] == '5':\n fix = 'RTK Float (decimeter precision)'\n lat = ddm_dd_convert(gps[2], gps[3])\n long = ddm_dd_convert(gps[4], gps[5])\n return [lat, long, fix]\n\n\ndef gpgsa_convert(line):\n \"\"\"Translates $GPGSA line into documented array\n str line - the GPGSA line\n returns - the data documented into array\n \"\"\"\n gps = line.strip().split(',')\n if gps[2] == '1':\n return\n if gps[2] == '2':\n fix = '2D fix'\n else:\n fix = '3D fix'\n return [fix]\n",
"step-5": "\r\n\r\ndef ddm_dd_convert(coord, direction):\r\n \"\"\"Converts GPS reading from DDM to DD\r\n str coord - the ddm coordinate from $GPGGA\r\n str direction - the direction of the coord (N,S,W,E)\r\n returns - string representation of dd coordinate\r\n \"\"\"\r\n value = ''\r\n if (direction == 'S' or direction == 'W'):\r\n value += '-'\r\n value += coord[0:-7] \r\n minute = float(coord[-7:])\r\n decimal = round(minute / 60, 8)\r\n result = str(decimal)[1:]\r\n value += result\r\n return value\r\n\r\ndef gprmc_convert(line):\r\n \"\"\"Translates $GPRMC line into documented array\r\n str line - the GPRMC line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[2] == 'V':\r\n return\r\n raw_date = gps[9]\r\n time = ''\r\n date = raw_date[0:2]\r\n month = raw_date[2:4]\r\n year = raw_date[4:]\r\n #modify year if reaches year 2100\r\n time += date + '/' + month + '/20' + year\r\n return [time]\r\n\r\n\r\ndef gpvtg_convert(line):\r\n \"\"\"Translates $GPVTG line into documented array\r\n Data only used for measuring ground speed\r\n str line - the GPVTG line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[1] == '0.00': \r\n return\r\n #jsondata = {'Horizontal speed': gps[7] + ' kmph or ' + gps[5] + 'knots'}\r\n return []\r\n\r\n\r\ndef gpgga_convert(line):\r\n \"\"\"Translates $GPGGPA line into documented array\r\n str line - the GPGGA line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[6] == '0' :\r\n return\r\n fix = ''\r\n if gps[6] == '1':\r\n fix = 'GPS fix'\r\n elif gps[6] == '2':\r\n fix = 'DGPS fix'\r\n elif gps[6] == '4':\r\n fix = 'RTK Fix coordinate (centimeter precision)'\r\n elif gps[6] == '5':\r\n fix = 'RTK Float (decimeter precision)'\r\n #utc = gps[1][0:2] + ':' + gps[1][2:4] + ':' + gps[1][4:6]\r\n lat = ddm_dd_convert(gps[2], gps[3])\r\n long = ddm_dd_convert(gps[4], gps[5]) \r\n return [lat, long, fix]\r\n\r\n \r\ndef gpgsa_convert(line):\r\n \"\"\"Translates $GPGSA line into documented array\r\n str line - the GPGSA line\r\n returns - the data documented into array\r\n \"\"\"\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[2] == '1':\r\n return\r\n if gps[2] == '2':\r\n fix = '2D fix'\r\n else:\r\n fix = '3D fix'\r\n return [fix]",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import os
import sys
import string
from array import *
from datetime import datetime
#f = open('input_test.txt', 'r')
f = open('input_task.txt', 'r')
width = 60
height = 5000
sleepingMinutes = [[0 for x in range(width)] for y in range(height)]
infos = []
# Change lines to tuples and store to array for sorting
for line in f:
line = line.rstrip('\n')
line = line.replace('[','')
splitted = line.split(']')
stringTime = splitted[0]
stringTask = splitted[1]
datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')
lineTuple = (datetimeTime, stringTask)
infos.append(lineTuple)
#print(datetimeTime.minute)
# sort the info we have
infosSorted = sorted(infos, key=lambda time: time[0])
#print(infos)
#print(infosSorted)
sleeping = False
for dataPoint in infosSorted:
splitted = dataPoint[1].split(' ')
#print(splitted)
if splitted[1] == 'Guard':
#print('Vartija vaihtui, vuorossa: ' + splitted[2])
guard = splitted[2].replace('#','')
if splitted[1] == 'falls':
sleeping = True
sleepingTimeStart = dataPoint[0]
#print('vartija ' + guard + ' nukahti hetkellä ' + str(sleepingTimeStart))
if splitted[1] == 'wakes':
sleeping = False
sleepingTimeStop = dataPoint[0]
sleepingTime = sleepingTimeStop - sleepingTimeStart
#print('vartija ' + guard + ' heräsi hetkellä ' + str(sleepingTimeStop) + ' nukkuen ' + str(sleepingTime))
for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):
sleepingMinutes[int(guard)][x] += 1
maxVartija = 0
maxMinuutti = 0
maxMinuutit = 0
vartija = 0
for x in sleepingMinutes:
summa = sum(x)
minuutti = x.index(max(x))
#print(x)
#print('yhteensä ' + str(summa) + ' nukkui eniten minuutilla ' + str(maxMinuutti))
if maxVartija < summa:
maxVartija = vartija
maxMinuutti = minuutti
maxMinuutit = summa
vartija += 1
print('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' + str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))
print('Vastaus on siis ' + str(maxVartija*maxMinuutti))
|
normal
|
{
"blob_id": "293533d07b530be9e8f97f1720619bf6c3113cca",
"index": 9447,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[', '')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = datetimeTime, stringTask\n infos.append(lineTuple)\n<mask token>\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n if splitted[1] == 'Guard':\n guard = splitted[2].replace('#', '')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\n<mask token>\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' +\n str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija * maxMinuutti))\n",
"step-3": "<mask token>\nf = open('input_task.txt', 'r')\nwidth = 60\nheight = 5000\nsleepingMinutes = [[(0) for x in range(width)] for y in range(height)]\ninfos = []\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[', '')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = datetimeTime, stringTask\n infos.append(lineTuple)\ninfosSorted = sorted(infos, key=lambda time: time[0])\nsleeping = False\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n if splitted[1] == 'Guard':\n guard = splitted[2].replace('#', '')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\nmaxVartija = 0\nmaxMinuutti = 0\nmaxMinuutit = 0\nvartija = 0\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' +\n str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija * maxMinuutti))\n",
"step-4": "import os\nimport sys\nimport string\nfrom array import *\nfrom datetime import datetime\nf = open('input_task.txt', 'r')\nwidth = 60\nheight = 5000\nsleepingMinutes = [[(0) for x in range(width)] for y in range(height)]\ninfos = []\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[', '')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = datetimeTime, stringTask\n infos.append(lineTuple)\ninfosSorted = sorted(infos, key=lambda time: time[0])\nsleeping = False\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n if splitted[1] == 'Guard':\n guard = splitted[2].replace('#', '')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\nmaxVartija = 0\nmaxMinuutti = 0\nmaxMinuutit = 0\nvartija = 0\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' +\n str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija * maxMinuutti))\n",
"step-5": "import os\nimport sys\nimport string\nfrom array import *\nfrom datetime import datetime\n\n#f = open('input_test.txt', 'r')\nf = open('input_task.txt', 'r')\n\nwidth = 60\nheight = 5000\nsleepingMinutes = [[0 for x in range(width)] for y in range(height)]\n\ninfos = []\n\n# Change lines to tuples and store to array for sorting\nfor line in f:\n line = line.rstrip('\\n')\n line = line.replace('[','')\n splitted = line.split(']')\n stringTime = splitted[0]\n stringTask = splitted[1]\n datetimeTime = datetime.strptime(stringTime, '%Y-%m-%d %H:%M')\n lineTuple = (datetimeTime, stringTask)\n infos.append(lineTuple)\n #print(datetimeTime.minute)\n\n# sort the info we have\ninfosSorted = sorted(infos, key=lambda time: time[0])\n#print(infos)\n#print(infosSorted)\n\nsleeping = False\n\nfor dataPoint in infosSorted:\n splitted = dataPoint[1].split(' ')\n #print(splitted)\n if splitted[1] == 'Guard':\n #print('Vartija vaihtui, vuorossa: ' + splitted[2])\n guard = splitted[2].replace('#','')\n if splitted[1] == 'falls':\n sleeping = True\n sleepingTimeStart = dataPoint[0]\n #print('vartija ' + guard + ' nukahti hetkellä ' + str(sleepingTimeStart))\n if splitted[1] == 'wakes':\n sleeping = False\n sleepingTimeStop = dataPoint[0]\n sleepingTime = sleepingTimeStop - sleepingTimeStart\n #print('vartija ' + guard + ' heräsi hetkellä ' + str(sleepingTimeStop) + ' nukkuen ' + str(sleepingTime))\n for x in range(sleepingTimeStart.minute, sleepingTimeStop.minute):\n sleepingMinutes[int(guard)][x] += 1\n\nmaxVartija = 0\nmaxMinuutti = 0\nmaxMinuutit = 0\nvartija = 0\n\nfor x in sleepingMinutes:\n summa = sum(x)\n minuutti = x.index(max(x))\n #print(x)\n #print('yhteensä ' + str(summa) + ' nukkui eniten minuutilla ' + str(maxMinuutti))\n if maxVartija < summa:\n maxVartija = vartija\n maxMinuutti = minuutti\n maxMinuutit = summa\n vartija += 1\n\nprint('Eniten nukkui vartija #' + str(maxVartija) + ' nukkuen yhteensä ' + str(maxMinuutit) + ' minuuttia ja eniten minuutilla ' + str(maxMinuutti))\nprint('Vastaus on siis ' + str(maxVartija*maxMinuutti))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from odoo import models,fields, api
class director(models.Model):
#Clasica
_inherit = 'base.entidad'
_name = 'cinemateca.director'
name = fields.Char(string="name", required=True, help="Nombre del director")
apellidos = fields.Char(string="apellidos", required=True, help="Apellidos del director")
pelicula_ids = fields.One2many("cinemateca.pelicula", "director_id", string="sesion")
|
normal
|
{
"blob_id": "006f499eed7cd5d73bb0cb9b242c90726fff35c1",
"index": 3185,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass director(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass director(models.Model):\n _inherit = 'base.entidad'\n _name = 'cinemateca.director'\n name = fields.Char(string='name', required=True, help='Nombre del director'\n )\n apellidos = fields.Char(string='apellidos', required=True, help=\n 'Apellidos del director')\n pelicula_ids = fields.One2many('cinemateca.pelicula', 'director_id',\n string='sesion')\n",
"step-4": "from odoo import models, fields, api\n\n\nclass director(models.Model):\n _inherit = 'base.entidad'\n _name = 'cinemateca.director'\n name = fields.Char(string='name', required=True, help='Nombre del director'\n )\n apellidos = fields.Char(string='apellidos', required=True, help=\n 'Apellidos del director')\n pelicula_ids = fields.One2many('cinemateca.pelicula', 'director_id',\n string='sesion')\n",
"step-5": "from odoo import models,fields, api\n\nclass director(models.Model):\n #Clasica\n _inherit = 'base.entidad'\n _name = 'cinemateca.director'\n name = fields.Char(string=\"name\", required=True, help=\"Nombre del director\")\n apellidos = fields.Char(string=\"apellidos\", required=True, help=\"Apellidos del director\")\n pelicula_ids = fields.One2many(\"cinemateca.pelicula\", \"director_id\", string=\"sesion\")",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
'''Lab01 ex4
E/16/319 Rathnayake R.P.V.N'''
from dataclasses import asdict
from json import dumps
from dataclasses import dataclass
from typing import List, Dict
import json
import ex1 #import the ex1 to get the lord_course_registraion function
s1=ex1.load_course_registrations("data.txt") #lord the list of Student object in to the s1
s1=(map(asdict,s1)) #aply asdict() to s1 my useng the map function
e=json.dumps(list(s1)) #convert into jsom=n string
#print(e)
with open("student_registrations.json","w") as f: #open json file and write on it
f.write(e)
|
normal
|
{
"blob_id": "8a5ade450485f9114fa91c00c7588535ccbaf0e6",
"index": 1923,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('student_registrations.json', 'w') as f:\n f.write(e)\n",
"step-3": "<mask token>\ns1 = ex1.load_course_registrations('data.txt')\ns1 = map(asdict, s1)\ne = json.dumps(list(s1))\nwith open('student_registrations.json', 'w') as f:\n f.write(e)\n",
"step-4": "<mask token>\nfrom dataclasses import asdict\nfrom json import dumps\nfrom dataclasses import dataclass\nfrom typing import List, Dict\nimport json\nimport ex1\ns1 = ex1.load_course_registrations('data.txt')\ns1 = map(asdict, s1)\ne = json.dumps(list(s1))\nwith open('student_registrations.json', 'w') as f:\n f.write(e)\n",
"step-5": "'''Lab01 ex4\n\tE/16/319 Rathnayake R.P.V.N'''\nfrom dataclasses import asdict\nfrom json import dumps\nfrom dataclasses import dataclass\nfrom typing import List, Dict\nimport json\nimport ex1\t\t#import the ex1 to get the lord_course_registraion function\n\n\ns1=ex1.load_course_registrations(\"data.txt\")\t#lord the list of Student object in to the s1\ns1=(map(asdict,s1))\t\t\t\t\t\t\t\t#aply asdict() to s1 my useng the map function\n\ne=json.dumps(list(s1))\t\t\t\t\t\t\t#convert into jsom=n string\n#print(e)\nwith open(\"student_registrations.json\",\"w\") as f:\t\t#open json file and write on it\n\tf.write(e)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
from src.model_manager import ModelManager
dir_path = os.path.dirname(os.path.realpath(__file__))
config_file = '{}/data/config/config_1.json'.format(dir_path)
model_dir = '{}/data/models'.format(dir_path)
def test_init():
mm = ModelManager(config_file, model_dir)
def test_predict():
pass
|
normal
|
{
"blob_id": "5da61b4cd8e4faf135b49396d3b346a219bf73f6",
"index": 3851,
"step-1": "<mask token>\n\n\ndef test_predict():\n pass\n",
"step-2": "<mask token>\n\n\ndef test_init():\n mm = ModelManager(config_file, model_dir)\n\n\ndef test_predict():\n pass\n",
"step-3": "<mask token>\ndir_path = os.path.dirname(os.path.realpath(__file__))\nconfig_file = '{}/data/config/config_1.json'.format(dir_path)\nmodel_dir = '{}/data/models'.format(dir_path)\n\n\ndef test_init():\n mm = ModelManager(config_file, model_dir)\n\n\ndef test_predict():\n pass\n",
"step-4": "import os\nfrom src.model_manager import ModelManager\ndir_path = os.path.dirname(os.path.realpath(__file__))\nconfig_file = '{}/data/config/config_1.json'.format(dir_path)\nmodel_dir = '{}/data/models'.format(dir_path)\n\n\ndef test_init():\n mm = ModelManager(config_file, model_dir)\n\n\ndef test_predict():\n pass\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.