code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
"""AOC Day 13"""
import pathlib
import time
TEST_INPUT = """6,10
0,14
9,10
0,3
10,4
4,11
6,0
6,12
4,1
0,13
10,12
3,4
3,0
8,4
1,10
2,14
8,10
9,0
fold along y=7
fold along x=5"""
def read_input(input_path: str) -> str:
"""take input file path and return a str with the file's content"""
with open(input_path, 'r') as input_file:
input_data = input_file.read().strip()
return input_data
def extract(input_data: str) -> tuple:
"""take input data and return the appropriate data structure"""
sheet = set()
folds = list()
s_instr, f_instr = input_data.split('\n\n')
for line in s_instr.split('\n'):
sheet.add(tuple(map(int, line.split(','))))
for line in f_instr.split('\n'):
equal_pos = line.index('=')
folds.append((line[equal_pos-1], int(line[equal_pos+1:])))
return (sheet, folds)
def fold(sheet: set, direction: str, axis: int):
folded = set()
for x, y in sheet:
if direction == 'x' and x > axis:
x = 2 * axis - x
elif direction == 'y' and y > axis:
y = 2 * axis - y
folded.add((x, y))
return folded
def part1(entries: tuple) -> int:
"""part1 solver take the entries and return the part1 solution"""
direction, axis = entries[1][0]
sheet = fold(entries[0], direction, axis)
return len(sheet)
def part2(entries: tuple) -> str:
"""part2 solver take the entries and return the part2 solution"""
sheet = entries[0]
fold_instructions = entries[1]
for direction, axis in fold_instructions:
sheet = fold(sheet, direction, axis)
max_x = max(p[0] for p in sheet)
max_y = max(p[1] for p in sheet)
out = ''
for y in range(max_y + 1):
for x in range(max_x + 1):
out += '#' if (x, y) in sheet else ' '
out += '\n'
return out
def test_input_day_13():
"""pytest testing function"""
entries = extract(TEST_INPUT)
assert part1(entries) == 17
def test_bench_day_13(benchmark):
"""pytest-benchmark function"""
benchmark(main)
def main():
"""main function"""
input_path = str(pathlib.Path(__file__).resolve().parent.parent) + "/inputs/" + str(pathlib.Path(__file__).stem)
start_time = time.time()
input_data = read_input(input_path)
entries = extract(input_data)
print("Part 1: %d" % part1(entries))
print("Part 2:\n%s" % part2(entries))
end_time = time.time()
print("Execution time: %f" % (end_time-start_time))
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "bda28e5a0cb8a3dddea58c9c59a165b31274ac03",
"index": 5225,
"step-1": "<mask token>\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_input(input_path: str) ->str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent\n ) + '/inputs/' + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print('Part 1: %d' % part1(entries))\n print('Part 2:\\n%s' % part2(entries))\n end_time = time.time()\n print('Execution time: %f' % (end_time - start_time))\n\n\n<mask token>\n",
"step-3": "<mask token>\nTEST_INPUT = \"\"\"6,10\n0,14\n9,10\n0,3\n10,4\n4,11\n6,0\n6,12\n4,1\n0,13\n10,12\n3,4\n3,0\n8,4\n1,10\n2,14\n8,10\n9,0\n\nfold along y=7\nfold along x=5\"\"\"\n\n\ndef read_input(input_path: str) ->str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent\n ) + '/inputs/' + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print('Part 1: %d' % part1(entries))\n print('Part 2:\\n%s' % part2(entries))\n end_time = time.time()\n print('Execution time: %f' % (end_time - start_time))\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nimport pathlib\nimport time\nTEST_INPUT = \"\"\"6,10\n0,14\n9,10\n0,3\n10,4\n4,11\n6,0\n6,12\n4,1\n0,13\n10,12\n3,4\n3,0\n8,4\n1,10\n2,14\n8,10\n9,0\n\nfold along y=7\nfold along x=5\"\"\"\n\n\ndef read_input(input_path: str) ->str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\n\ndef extract(input_data: str) ->tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos - 1], int(line[equal_pos + 1:])))\n return sheet, folds\n\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n folded.add((x, y))\n return folded\n\n\ndef part1(entries: tuple) ->int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\n\ndef part2(entries: tuple) ->str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent\n ) + '/inputs/' + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print('Part 1: %d' % part1(entries))\n print('Part 2:\\n%s' % part2(entries))\n end_time = time.time()\n print('Execution time: %f' % (end_time - start_time))\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\"AOC Day 13\"\"\"\n\nimport pathlib\nimport time\n\nTEST_INPUT = \"\"\"6,10\n0,14\n9,10\n0,3\n10,4\n4,11\n6,0\n6,12\n4,1\n0,13\n10,12\n3,4\n3,0\n8,4\n1,10\n2,14\n8,10\n9,0\n\nfold along y=7\nfold along x=5\"\"\"\n\ndef read_input(input_path: str) -> str:\n \"\"\"take input file path and return a str with the file's content\"\"\"\n with open(input_path, 'r') as input_file:\n input_data = input_file.read().strip()\n return input_data\n\ndef extract(input_data: str) -> tuple:\n \"\"\"take input data and return the appropriate data structure\"\"\"\n sheet = set()\n folds = list()\n s_instr, f_instr = input_data.split('\\n\\n')\n for line in s_instr.split('\\n'):\n sheet.add(tuple(map(int, line.split(','))))\n for line in f_instr.split('\\n'):\n equal_pos = line.index('=')\n folds.append((line[equal_pos-1], int(line[equal_pos+1:])))\n return (sheet, folds)\n\ndef fold(sheet: set, direction: str, axis: int):\n folded = set()\n\n for x, y in sheet:\n if direction == 'x' and x > axis:\n x = 2 * axis - x\n elif direction == 'y' and y > axis:\n y = 2 * axis - y\n\n folded.add((x, y))\n\n return folded\n\ndef part1(entries: tuple) -> int:\n \"\"\"part1 solver take the entries and return the part1 solution\"\"\"\n direction, axis = entries[1][0]\n sheet = fold(entries[0], direction, axis)\n return len(sheet)\n\ndef part2(entries: tuple) -> str:\n \"\"\"part2 solver take the entries and return the part2 solution\"\"\"\n sheet = entries[0]\n fold_instructions = entries[1]\n for direction, axis in fold_instructions:\n sheet = fold(sheet, direction, axis)\n \n max_x = max(p[0] for p in sheet)\n max_y = max(p[1] for p in sheet)\n out = ''\n for y in range(max_y + 1):\n for x in range(max_x + 1):\n out += '#' if (x, y) in sheet else ' '\n out += '\\n'\n return out\n\ndef test_input_day_13():\n \"\"\"pytest testing function\"\"\"\n entries = extract(TEST_INPUT)\n assert part1(entries) == 17\n\ndef test_bench_day_13(benchmark):\n \"\"\"pytest-benchmark function\"\"\"\n benchmark(main)\n\ndef main():\n \"\"\"main function\"\"\"\n input_path = str(pathlib.Path(__file__).resolve().parent.parent) + \"/inputs/\" + str(pathlib.Path(__file__).stem)\n start_time = time.time()\n input_data = read_input(input_path)\n entries = extract(input_data)\n print(\"Part 1: %d\" % part1(entries))\n print(\"Part 2:\\n%s\" % part2(entries))\n end_time = time.time()\n print(\"Execution time: %f\" % (end_time-start_time))\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
5,
8,
10,
11,
12
]
}
|
[
5,
8,
10,
11,
12
] |
# Generated by Django 3.2 on 2021-05-22 06:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Recuerdos',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo_evento', models.CharField(blank=True, max_length=100, null=True)),
('foto1', models.ImageField(blank=True, null=True, upload_to='recuerdos')),
('foto2', models.ImageField(blank=True, null=True, upload_to='recuerdos')),
('foto3', models.ImageField(blank=True, null=True, upload_to='recuerdos')),
('created', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name': 'Recuerdo',
'verbose_name_plural': 'recurdo',
},
),
]
|
normal
|
{
"blob_id": "89d0d5d13c5106c504c6727c7784f048a30495dc",
"index": 5560,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Recuerdos', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('titulo_evento', models.CharField(\n blank=True, max_length=100, null=True)), ('foto1', models.\n ImageField(blank=True, null=True, upload_to='recuerdos')), ('foto2',\n models.ImageField(blank=True, null=True, upload_to='recuerdos')), (\n 'foto3', models.ImageField(blank=True, null=True, upload_to=\n 'recuerdos')), ('created', models.DateTimeField(auto_now_add=True))\n ], options={'verbose_name': 'Recuerdo', 'verbose_name_plural':\n 'recurdo'})]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Recuerdos', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('titulo_evento', models.CharField(\n blank=True, max_length=100, null=True)), ('foto1', models.\n ImageField(blank=True, null=True, upload_to='recuerdos')), ('foto2',\n models.ImageField(blank=True, null=True, upload_to='recuerdos')), (\n 'foto3', models.ImageField(blank=True, null=True, upload_to=\n 'recuerdos')), ('created', models.DateTimeField(auto_now_add=True))\n ], options={'verbose_name': 'Recuerdo', 'verbose_name_plural':\n 'recurdo'})]\n",
"step-5": "# Generated by Django 3.2 on 2021-05-22 06:54\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Recuerdos',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('titulo_evento', models.CharField(blank=True, max_length=100, null=True)),\n ('foto1', models.ImageField(blank=True, null=True, upload_to='recuerdos')),\n ('foto2', models.ImageField(blank=True, null=True, upload_to='recuerdos')),\n ('foto3', models.ImageField(blank=True, null=True, upload_to='recuerdos')),\n ('created', models.DateTimeField(auto_now_add=True)),\n ],\n options={\n 'verbose_name': 'Recuerdo',\n 'verbose_name_plural': 'recurdo',\n },\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import pylab as pl
import pymc as mc
import book_graphics
reload(book_graphics)
# <markdowncell>
# Uniform points in an $n$-dimensional ball
# =========================================
#
# This notebook implements and compares samplers in PyMC
# to sample uniformly from an $n$-dimensional ball,
# i.e to sample from the set
# $$
# \mathbf{B}_n = \\{x \in \mathbf{R}^n: \|x\|\leq 1\\}
# $$
# <codecell>
mc.np.random.seed(1234567)
# simple model
n = 2
X = [mc.Uninformative('X_%d'%i, value=0) for i in range(n)]
@mc.potential
def in_ball(X=X):
if X[0]**2 + X[1]**2 <= 1.:
return 0
else:
return -pl.inf
# <codecell>
class UniformBall(mc.Gibbs):
def __init__(self, stochastic, others, verbose=None):
self.others = others
self.conjugate = True # pymc will include a Metropolis rejection step on top of the proposal if this is false
mc.Gibbs.__init__(self, stochastic, verbose)
def propose(self):
x_other = [X_i.value for X_i in self.others]
max_val = pl.sqrt(1. - pl.dot(x_other, x_other))
self.stochastic.value = mc.runiform(-max_val, max_val)
# <codecell>
m = mc.MCMC([X, in_ball])
for i in range(n):
m.use_step_method(UniformBall, X[i], [X[j] for j in range(n) if j != i])
# <codecell>
m.sample(100, progress_bar=False)
# <codecell>
def plot_trace(X, scale=1., angle=0.):
fig = pl.figure(figsize=(12,4.75))
ax1 = fig.add_subplot(1, 2, 1)
# plot boundary
t = pl.arange(0,2*pl.pi,.01)
ax1.plot(pl.cos(angle)*pl.cos(t) - pl.sin(angle)*pl.sin(t)/scale, pl.cos(angle)*pl.sin(t)/scale + pl.sin(angle)*pl.cos(t), 'k:')
# plot samples
if isinstance(X, mc.Stochastic):
tr = [X.trace()[:,0], X.trace()[:,1]]
else:
tr = [X[0].trace(), X[1].trace()]
ax1.plot(tr[0], tr[1], 'ko-')
# decorate plot
book_graphics.set_font()
pl.xlabel('$X_1$')
pl.ylabel('$X_2$', rotation=0)
pl.axis([-1.1,1.1,-1.1,1.1])
pl.text(-1,1,'(a)', fontsize=16, va='top', ha='left')
for i in range(2):
if i == 0:
ax2 = fig.add_subplot(2, 4, 3+4*i)
ax2.plot(tr[i], 'k', drawstyle='steps-mid')
else:
ax2a = fig.add_subplot(2, 4, 3+4*i, sharex=ax2)
ax2a.plot(tr[i], 'k', drawstyle='steps-mid')
pl.xlabel('Sample')
pl.xticks([25,50,75])
pl.yticks([-.5,0,.5])
pl.ylabel('$X_%d$'%(i+1), rotation=0)
pl.axis([-5,105,-1.5,1.5])
pl.text(-1,1.25,'(%s)'%'bc'[i], fontsize=16, va='top', ha='left')
if i == 0:
ax3 = fig.add_subplot(2, 4, 4+4*i)
ax3.acorr(tr[i].reshape(100), color='k')
else:
ax3a = fig.add_subplot(2, 4, 4+4*i, sharex=ax3)
ax3a.acorr(tr[i].reshape(100), color='k')
pl.xlabel('Autocorrelation')
pl.xticks([-5,0,5])
pl.yticks([0., .5, 1])
pl.axis([-12,12,-.1,1.1])
pl.text(-10,1,'(%s)'%'de'[i], fontsize=16, va='top', ha='left')
pl.setp(ax2.get_xticklabels(), visible=False)
pl.setp(ax3.get_xticklabels(), visible=False)
pl.subplots_adjust(wspace=.55, hspace=.1, bottom=.14,left=.13)
# <codecell>
plot_trace(X, 1, 0.)
pl.savefig('book/graphics/gibbs-ball.pdf')
# <markdowncell>
# Now with the Metropolis sampler
# ---------------------------------
# <codecell>
mc.np.random.seed(123456789)
# <codecell>
# simple model
n = 2
X = mc.Uninformative('X', value=[0,0])
@mc.potential
def in_ball(X=X, s=3., t=pl.pi/4.):
if (pl.cos(t)*X[0] + pl.sin(t)*X[1])**2 + s**2*(pl.cos(t)*X[1] -pl.sin(t)*X[0])**2 <= 1.:
return 0
else:
return -pl.inf
m = mc.MCMC([X, in_ball])
m.sample(100, progress_bar=False)
# <codecell>
plot_trace(X, 3, pl.pi/4)
pl.savefig('book/graphics/metropolis-ball.pdf')
# <markdowncell>
# Now with Adaptive Metropolis
# <codecell>
mc.np.random.seed(1234567)
# simple model
n = 2
X = mc.Uninformative('X', value=[0,0])
@mc.potential
def in_ball(X=X, s=3., t=pl.pi/4):
if (pl.cos(t)*X[0] + pl.sin(t)*X[1])**2 + s**2*(pl.cos(t)*X[1] -pl.sin(t)*X[0])**2 <= 1.:
return 0
else:
return -pl.inf
m = mc.MCMC([X, in_ball])
m.use_step_method(mc.AdaptiveMetropolis, X)
# <codecell>
m.sample(100, progress_bar=False)
plot_trace(X, 3, pl.pi/4)
pl.savefig('book/graphics/am-ball-1.pdf')
# <codecell>
m.sample(iter=20100, burn=20000, progress_bar=False)
plot_trace(X, 3, pl.pi/4)
pl.savefig('book/graphics/am-ball-2.pdf')
pl.show()
|
normal
|
{
"blob_id": "8283bdab023e22bba3d8a05f8bda0014ee19adee",
"index": 4286,
"step-1": "<mask token>\n\n\nclass UniformBall(mc.Gibbs):\n\n def __init__(self, stochastic, others, verbose=None):\n self.others = others\n self.conjugate = True\n mc.Gibbs.__init__(self, stochastic, verbose)\n\n def propose(self):\n x_other = [X_i.value for X_i in self.others]\n max_val = pl.sqrt(1.0 - pl.dot(x_other, x_other))\n self.stochastic.value = mc.runiform(-max_val, max_val)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]\ndef in_ball(X=X):\n if X[0] ** 2 + X[1] ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\nclass UniformBall(mc.Gibbs):\n\n def __init__(self, stochastic, others, verbose=None):\n self.others = others\n self.conjugate = True\n mc.Gibbs.__init__(self, stochastic, verbose)\n\n def propose(self):\n x_other = [X_i.value for X_i in self.others]\n max_val = pl.sqrt(1.0 - pl.dot(x_other, x_other))\n self.stochastic.value = mc.runiform(-max_val, max_val)\n\n\n<mask token>\n\n\ndef plot_trace(X, scale=1.0, angle=0.0):\n fig = pl.figure(figsize=(12, 4.75))\n ax1 = fig.add_subplot(1, 2, 1)\n t = pl.arange(0, 2 * pl.pi, 0.01)\n ax1.plot(pl.cos(angle) * pl.cos(t) - pl.sin(angle) * pl.sin(t) / scale,\n pl.cos(angle) * pl.sin(t) / scale + pl.sin(angle) * pl.cos(t), 'k:')\n if isinstance(X, mc.Stochastic):\n tr = [X.trace()[:, 0], X.trace()[:, 1]]\n else:\n tr = [X[0].trace(), X[1].trace()]\n ax1.plot(tr[0], tr[1], 'ko-')\n book_graphics.set_font()\n pl.xlabel('$X_1$')\n pl.ylabel('$X_2$', rotation=0)\n pl.axis([-1.1, 1.1, -1.1, 1.1])\n pl.text(-1, 1, '(a)', fontsize=16, va='top', ha='left')\n for i in range(2):\n if i == 0:\n ax2 = fig.add_subplot(2, 4, 3 + 4 * i)\n ax2.plot(tr[i], 'k', drawstyle='steps-mid')\n else:\n ax2a = fig.add_subplot(2, 4, 3 + 4 * i, sharex=ax2)\n ax2a.plot(tr[i], 'k', drawstyle='steps-mid')\n pl.xlabel('Sample')\n pl.xticks([25, 50, 75])\n pl.yticks([-0.5, 0, 0.5])\n pl.ylabel('$X_%d$' % (i + 1), rotation=0)\n pl.axis([-5, 105, -1.5, 1.5])\n pl.text(-1, 1.25, '(%s)' % 'bc'[i], fontsize=16, va='top', ha='left')\n if i == 0:\n ax3 = fig.add_subplot(2, 4, 4 + 4 * i)\n ax3.acorr(tr[i].reshape(100), color='k')\n else:\n ax3a = fig.add_subplot(2, 4, 4 + 4 * i, sharex=ax3)\n ax3a.acorr(tr[i].reshape(100), color='k')\n pl.xlabel('Autocorrelation')\n pl.xticks([-5, 0, 5])\n pl.yticks([0.0, 0.5, 1])\n pl.axis([-12, 12, -0.1, 1.1])\n pl.text(-10, 1, '(%s)' % 'de'[i], fontsize=16, va='top', ha='left')\n pl.setp(ax2.get_xticklabels(), visible=False)\n pl.setp(ax3.get_xticklabels(), visible=False)\n pl.subplots_adjust(wspace=0.55, hspace=0.1, bottom=0.14, left=0.13)\n\n\n<mask token>\n\n\[email protected]\ndef in_ball(X=X, s=3.0, t=pl.pi / 4.0):\n if (pl.cos(t) * X[0] + pl.sin(t) * X[1]) ** 2 + s ** 2 * (pl.cos(t) * X\n [1] - pl.sin(t) * X[0]) ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\[email protected]\ndef in_ball(X=X):\n if X[0] ** 2 + X[1] ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\nclass UniformBall(mc.Gibbs):\n\n def __init__(self, stochastic, others, verbose=None):\n self.others = others\n self.conjugate = True\n mc.Gibbs.__init__(self, stochastic, verbose)\n\n def propose(self):\n x_other = [X_i.value for X_i in self.others]\n max_val = pl.sqrt(1.0 - pl.dot(x_other, x_other))\n self.stochastic.value = mc.runiform(-max_val, max_val)\n\n\n<mask token>\n\n\ndef plot_trace(X, scale=1.0, angle=0.0):\n fig = pl.figure(figsize=(12, 4.75))\n ax1 = fig.add_subplot(1, 2, 1)\n t = pl.arange(0, 2 * pl.pi, 0.01)\n ax1.plot(pl.cos(angle) * pl.cos(t) - pl.sin(angle) * pl.sin(t) / scale,\n pl.cos(angle) * pl.sin(t) / scale + pl.sin(angle) * pl.cos(t), 'k:')\n if isinstance(X, mc.Stochastic):\n tr = [X.trace()[:, 0], X.trace()[:, 1]]\n else:\n tr = [X[0].trace(), X[1].trace()]\n ax1.plot(tr[0], tr[1], 'ko-')\n book_graphics.set_font()\n pl.xlabel('$X_1$')\n pl.ylabel('$X_2$', rotation=0)\n pl.axis([-1.1, 1.1, -1.1, 1.1])\n pl.text(-1, 1, '(a)', fontsize=16, va='top', ha='left')\n for i in range(2):\n if i == 0:\n ax2 = fig.add_subplot(2, 4, 3 + 4 * i)\n ax2.plot(tr[i], 'k', drawstyle='steps-mid')\n else:\n ax2a = fig.add_subplot(2, 4, 3 + 4 * i, sharex=ax2)\n ax2a.plot(tr[i], 'k', drawstyle='steps-mid')\n pl.xlabel('Sample')\n pl.xticks([25, 50, 75])\n pl.yticks([-0.5, 0, 0.5])\n pl.ylabel('$X_%d$' % (i + 1), rotation=0)\n pl.axis([-5, 105, -1.5, 1.5])\n pl.text(-1, 1.25, '(%s)' % 'bc'[i], fontsize=16, va='top', ha='left')\n if i == 0:\n ax3 = fig.add_subplot(2, 4, 4 + 4 * i)\n ax3.acorr(tr[i].reshape(100), color='k')\n else:\n ax3a = fig.add_subplot(2, 4, 4 + 4 * i, sharex=ax3)\n ax3a.acorr(tr[i].reshape(100), color='k')\n pl.xlabel('Autocorrelation')\n pl.xticks([-5, 0, 5])\n pl.yticks([0.0, 0.5, 1])\n pl.axis([-12, 12, -0.1, 1.1])\n pl.text(-10, 1, '(%s)' % 'de'[i], fontsize=16, va='top', ha='left')\n pl.setp(ax2.get_xticklabels(), visible=False)\n pl.setp(ax3.get_xticklabels(), visible=False)\n pl.subplots_adjust(wspace=0.55, hspace=0.1, bottom=0.14, left=0.13)\n\n\n<mask token>\n\n\[email protected]\ndef in_ball(X=X, s=3.0, t=pl.pi / 4.0):\n if (pl.cos(t) * X[0] + pl.sin(t) * X[1]) ** 2 + s ** 2 * (pl.cos(t) * X\n [1] - pl.sin(t) * X[0]) ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\n<mask token>\n\n\[email protected]\ndef in_ball(X=X, s=3.0, t=pl.pi / 4):\n if (pl.cos(t) * X[0] + pl.sin(t) * X[1]) ** 2 + s ** 2 * (pl.cos(t) * X\n [1] - pl.sin(t) * X[0]) ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\n<mask token>\n",
"step-4": "<mask token>\nreload(book_graphics)\nmc.np.random.seed(1234567)\n<mask token>\n\n\[email protected]\ndef in_ball(X=X):\n if X[0] ** 2 + X[1] ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\nclass UniformBall(mc.Gibbs):\n\n def __init__(self, stochastic, others, verbose=None):\n self.others = others\n self.conjugate = True\n mc.Gibbs.__init__(self, stochastic, verbose)\n\n def propose(self):\n x_other = [X_i.value for X_i in self.others]\n max_val = pl.sqrt(1.0 - pl.dot(x_other, x_other))\n self.stochastic.value = mc.runiform(-max_val, max_val)\n\n\n<mask token>\nfor i in range(n):\n m.use_step_method(UniformBall, X[i], [X[j] for j in range(n) if j != i])\nm.sample(100, progress_bar=False)\n\n\ndef plot_trace(X, scale=1.0, angle=0.0):\n fig = pl.figure(figsize=(12, 4.75))\n ax1 = fig.add_subplot(1, 2, 1)\n t = pl.arange(0, 2 * pl.pi, 0.01)\n ax1.plot(pl.cos(angle) * pl.cos(t) - pl.sin(angle) * pl.sin(t) / scale,\n pl.cos(angle) * pl.sin(t) / scale + pl.sin(angle) * pl.cos(t), 'k:')\n if isinstance(X, mc.Stochastic):\n tr = [X.trace()[:, 0], X.trace()[:, 1]]\n else:\n tr = [X[0].trace(), X[1].trace()]\n ax1.plot(tr[0], tr[1], 'ko-')\n book_graphics.set_font()\n pl.xlabel('$X_1$')\n pl.ylabel('$X_2$', rotation=0)\n pl.axis([-1.1, 1.1, -1.1, 1.1])\n pl.text(-1, 1, '(a)', fontsize=16, va='top', ha='left')\n for i in range(2):\n if i == 0:\n ax2 = fig.add_subplot(2, 4, 3 + 4 * i)\n ax2.plot(tr[i], 'k', drawstyle='steps-mid')\n else:\n ax2a = fig.add_subplot(2, 4, 3 + 4 * i, sharex=ax2)\n ax2a.plot(tr[i], 'k', drawstyle='steps-mid')\n pl.xlabel('Sample')\n pl.xticks([25, 50, 75])\n pl.yticks([-0.5, 0, 0.5])\n pl.ylabel('$X_%d$' % (i + 1), rotation=0)\n pl.axis([-5, 105, -1.5, 1.5])\n pl.text(-1, 1.25, '(%s)' % 'bc'[i], fontsize=16, va='top', ha='left')\n if i == 0:\n ax3 = fig.add_subplot(2, 4, 4 + 4 * i)\n ax3.acorr(tr[i].reshape(100), color='k')\n else:\n ax3a = fig.add_subplot(2, 4, 4 + 4 * i, sharex=ax3)\n ax3a.acorr(tr[i].reshape(100), color='k')\n pl.xlabel('Autocorrelation')\n pl.xticks([-5, 0, 5])\n pl.yticks([0.0, 0.5, 1])\n pl.axis([-12, 12, -0.1, 1.1])\n pl.text(-10, 1, '(%s)' % 'de'[i], fontsize=16, va='top', ha='left')\n pl.setp(ax2.get_xticklabels(), visible=False)\n pl.setp(ax3.get_xticklabels(), visible=False)\n pl.subplots_adjust(wspace=0.55, hspace=0.1, bottom=0.14, left=0.13)\n\n\nplot_trace(X, 1, 0.0)\npl.savefig('book/graphics/gibbs-ball.pdf')\nmc.np.random.seed(123456789)\n<mask token>\n\n\[email protected]\ndef in_ball(X=X, s=3.0, t=pl.pi / 4.0):\n if (pl.cos(t) * X[0] + pl.sin(t) * X[1]) ** 2 + s ** 2 * (pl.cos(t) * X\n [1] - pl.sin(t) * X[0]) ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\n<mask token>\nm.sample(100, progress_bar=False)\nplot_trace(X, 3, pl.pi / 4)\npl.savefig('book/graphics/metropolis-ball.pdf')\nmc.np.random.seed(1234567)\n<mask token>\n\n\[email protected]\ndef in_ball(X=X, s=3.0, t=pl.pi / 4):\n if (pl.cos(t) * X[0] + pl.sin(t) * X[1]) ** 2 + s ** 2 * (pl.cos(t) * X\n [1] - pl.sin(t) * X[0]) ** 2 <= 1.0:\n return 0\n else:\n return -pl.inf\n\n\n<mask token>\nm.use_step_method(mc.AdaptiveMetropolis, X)\nm.sample(100, progress_bar=False)\nplot_trace(X, 3, pl.pi / 4)\npl.savefig('book/graphics/am-ball-1.pdf')\nm.sample(iter=20100, burn=20000, progress_bar=False)\nplot_trace(X, 3, pl.pi / 4)\npl.savefig('book/graphics/am-ball-2.pdf')\npl.show()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n# <nbformat>3.0</nbformat>\r\n\r\n# <codecell>\r\n\r\nimport pylab as pl\r\nimport pymc as mc\r\nimport book_graphics\r\nreload(book_graphics)\r\n\r\n# <markdowncell>\r\n\r\n# Uniform points in an $n$-dimensional ball\r\n# =========================================\r\n# \r\n# This notebook implements and compares samplers in PyMC\r\n# to sample uniformly from an $n$-dimensional ball,\r\n# i.e to sample from the set\r\n# $$\r\n# \\mathbf{B}_n = \\\\{x \\in \\mathbf{R}^n: \\|x\\|\\leq 1\\\\}\r\n# $$\r\n\r\n# <codecell>\r\n\r\nmc.np.random.seed(1234567)\r\n\r\n# simple model\r\nn = 2\r\nX = [mc.Uninformative('X_%d'%i, value=0) for i in range(n)]\r\[email protected]\r\ndef in_ball(X=X):\r\n if X[0]**2 + X[1]**2 <= 1.:\r\n return 0\r\n else:\r\n return -pl.inf\r\n\r\n# <codecell>\r\n\r\nclass UniformBall(mc.Gibbs):\r\n def __init__(self, stochastic, others, verbose=None):\r\n self.others = others\r\n self.conjugate = True # pymc will include a Metropolis rejection step on top of the proposal if this is false\r\n mc.Gibbs.__init__(self, stochastic, verbose)\r\n \r\n def propose(self):\r\n x_other = [X_i.value for X_i in self.others]\r\n max_val = pl.sqrt(1. - pl.dot(x_other, x_other))\r\n self.stochastic.value = mc.runiform(-max_val, max_val)\r\n\r\n# <codecell>\r\n\r\nm = mc.MCMC([X, in_ball])\r\nfor i in range(n):\r\n m.use_step_method(UniformBall, X[i], [X[j] for j in range(n) if j != i])\r\n\r\n# <codecell>\r\n\r\nm.sample(100, progress_bar=False)\r\n\r\n# <codecell>\r\n\r\ndef plot_trace(X, scale=1., angle=0.):\r\n fig = pl.figure(figsize=(12,4.75))\r\n \r\n ax1 = fig.add_subplot(1, 2, 1)\r\n # plot boundary\r\n t = pl.arange(0,2*pl.pi,.01)\r\n ax1.plot(pl.cos(angle)*pl.cos(t) - pl.sin(angle)*pl.sin(t)/scale, pl.cos(angle)*pl.sin(t)/scale + pl.sin(angle)*pl.cos(t), 'k:')\r\n \r\n # plot samples\r\n if isinstance(X, mc.Stochastic):\r\n tr = [X.trace()[:,0], X.trace()[:,1]]\r\n else:\r\n tr = [X[0].trace(), X[1].trace()]\r\n\r\n ax1.plot(tr[0], tr[1], 'ko-')\r\n \r\n # decorate plot\r\n book_graphics.set_font()\r\n pl.xlabel('$X_1$')\r\n pl.ylabel('$X_2$', rotation=0)\r\n pl.axis([-1.1,1.1,-1.1,1.1])\r\n pl.text(-1,1,'(a)', fontsize=16, va='top', ha='left')\r\n\r\n \r\n for i in range(2):\r\n if i == 0:\r\n ax2 = fig.add_subplot(2, 4, 3+4*i)\r\n ax2.plot(tr[i], 'k', drawstyle='steps-mid')\r\n else:\r\n ax2a = fig.add_subplot(2, 4, 3+4*i, sharex=ax2)\r\n ax2a.plot(tr[i], 'k', drawstyle='steps-mid')\r\n pl.xlabel('Sample')\r\n \r\n pl.xticks([25,50,75])\r\n pl.yticks([-.5,0,.5])\r\n pl.ylabel('$X_%d$'%(i+1), rotation=0)\r\n pl.axis([-5,105,-1.5,1.5])\r\n pl.text(-1,1.25,'(%s)'%'bc'[i], fontsize=16, va='top', ha='left')\r\n \r\n if i == 0:\r\n ax3 = fig.add_subplot(2, 4, 4+4*i)\r\n ax3.acorr(tr[i].reshape(100), color='k')\r\n else:\r\n ax3a = fig.add_subplot(2, 4, 4+4*i, sharex=ax3)\r\n ax3a.acorr(tr[i].reshape(100), color='k')\r\n pl.xlabel('Autocorrelation')\r\n \r\n pl.xticks([-5,0,5])\r\n pl.yticks([0., .5, 1])\r\n pl.axis([-12,12,-.1,1.1])\r\n pl.text(-10,1,'(%s)'%'de'[i], fontsize=16, va='top', ha='left')\r\n \r\n pl.setp(ax2.get_xticklabels(), visible=False)\r\n pl.setp(ax3.get_xticklabels(), visible=False)\r\n pl.subplots_adjust(wspace=.55, hspace=.1, bottom=.14,left=.13)\r\n\r\n# <codecell>\r\n\r\nplot_trace(X, 1, 0.)\r\npl.savefig('book/graphics/gibbs-ball.pdf')\r\n\r\n# <markdowncell>\r\n\r\n# Now with the Metropolis sampler\r\n# ---------------------------------\r\n\r\n# <codecell>\r\n\r\nmc.np.random.seed(123456789)\r\n\r\n# <codecell>\r\n\r\n# simple model\r\n\r\nn = 2\r\nX = mc.Uninformative('X', value=[0,0])\r\[email protected]\r\ndef in_ball(X=X, s=3., t=pl.pi/4.):\r\n if (pl.cos(t)*X[0] + pl.sin(t)*X[1])**2 + s**2*(pl.cos(t)*X[1] -pl.sin(t)*X[0])**2 <= 1.:\r\n return 0\r\n else:\r\n return -pl.inf\r\n \r\nm = mc.MCMC([X, in_ball])\r\n\r\nm.sample(100, progress_bar=False)\r\n\r\n# <codecell>\r\n\r\nplot_trace(X, 3, pl.pi/4)\r\npl.savefig('book/graphics/metropolis-ball.pdf')\r\n\r\n# <markdowncell>\r\n\r\n# Now with Adaptive Metropolis\r\n\r\n# <codecell>\r\n\r\nmc.np.random.seed(1234567)\r\n\r\n# simple model\r\nn = 2\r\nX = mc.Uninformative('X', value=[0,0])\r\[email protected]\r\ndef in_ball(X=X, s=3., t=pl.pi/4):\r\n if (pl.cos(t)*X[0] + pl.sin(t)*X[1])**2 + s**2*(pl.cos(t)*X[1] -pl.sin(t)*X[0])**2 <= 1.:\r\n return 0\r\n else:\r\n return -pl.inf\r\n \r\nm = mc.MCMC([X, in_ball])\r\nm.use_step_method(mc.AdaptiveMetropolis, X)\r\n\r\n# <codecell>\r\n\r\nm.sample(100, progress_bar=False)\r\n\r\nplot_trace(X, 3, pl.pi/4)\r\npl.savefig('book/graphics/am-ball-1.pdf')\r\n\r\n# <codecell>\r\n\r\nm.sample(iter=20100, burn=20000, progress_bar=False)\r\n\r\nplot_trace(X, 3, pl.pi/4)\r\npl.savefig('book/graphics/am-ball-2.pdf')\r\n\r\npl.show()\r\n\r\n\r\n",
"step-ids": [
3,
6,
7,
8,
11
]
}
|
[
3,
6,
7,
8,
11
] |
# p.85 (문자 갯수 카운팅)
message = \
'It was a bright cold day in April, and the clocks were striking thirteen.'
print(message, type(message))
msg_dict = dict() #빈 dict() 생성
for msg in message:
print(msg, message.count(msg))
msg_dict[msg] = message.count(msg)
print(msg_dict)
|
normal
|
{
"blob_id": "20671470c087719fa9ea8ffa25be55e9ade67681",
"index": 5373,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(message, type(message))\n<mask token>\nfor msg in message:\n print(msg, message.count(msg))\n msg_dict[msg] = message.count(msg)\nprint(msg_dict)\n",
"step-3": "message = (\n 'It was a bright cold day in April, and the clocks were striking thirteen.'\n )\nprint(message, type(message))\nmsg_dict = dict()\nfor msg in message:\n print(msg, message.count(msg))\n msg_dict[msg] = message.count(msg)\nprint(msg_dict)\n",
"step-4": "# p.85 (문자 갯수 카운팅)\nmessage = \\\n 'It was a bright cold day in April, and the clocks were striking thirteen.'\nprint(message, type(message))\n\nmsg_dict = dict() #빈 dict() 생성\nfor msg in message:\n print(msg, message.count(msg))\n msg_dict[msg] = message.count(msg)\n\nprint(msg_dict)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
n = int(input())
a = sorted([int(input()) for _ in range(n)])
x = a[:n//2]
y = a[(n + 1)//2:]
ans = 0
for i in range(len(x)):
ans += abs(x[i] - y[i])
for i in range(1, len(y)):
ans += abs(x[i - 1] - y[i])
if n % 2 == 1:
ans += max(
abs(a[n // 2] - x[-1]),
abs(a[n // 2] - y[0]),
)
print(ans)
|
normal
|
{
"blob_id": "0e9d0927e8d69b0c0fad98479d47f2409c95a751",
"index": 794,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(len(x)):\n ans += abs(x[i] - y[i])\nfor i in range(1, len(y)):\n ans += abs(x[i - 1] - y[i])\nif n % 2 == 1:\n ans += max(abs(a[n // 2] - x[-1]), abs(a[n // 2] - y[0]))\nprint(ans)\n",
"step-3": "n = int(input())\na = sorted([int(input()) for _ in range(n)])\nx = a[:n // 2]\ny = a[(n + 1) // 2:]\nans = 0\nfor i in range(len(x)):\n ans += abs(x[i] - y[i])\nfor i in range(1, len(y)):\n ans += abs(x[i - 1] - y[i])\nif n % 2 == 1:\n ans += max(abs(a[n // 2] - x[-1]), abs(a[n // 2] - y[0]))\nprint(ans)\n",
"step-4": "n = int(input())\na = sorted([int(input()) for _ in range(n)])\n\nx = a[:n//2]\ny = a[(n + 1)//2:]\n\nans = 0\nfor i in range(len(x)):\n ans += abs(x[i] - y[i])\nfor i in range(1, len(y)):\n ans += abs(x[i - 1] - y[i])\nif n % 2 == 1:\n ans += max(\n abs(a[n // 2] - x[-1]),\n abs(a[n // 2] - y[0]),\n )\nprint(ans)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from pyNastran.bdf.fieldWriter import print_card
from pyNastran.bdf.bdfInterface.assign_type import (integer, integer_or_blank,
double_or_blank, string_or_blank)
class NLPARM(object):
"""
Defines a set of parameters for nonlinear static analysis iteration
strategy.
+--------+--------+------+------+---------+-------+---------+---------+--------+
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
+========+========+======+======+=========+=======+=========+=========+========+
| NLPARM | ID | NINC | DT | KMETHOD | KSTEP | MAXITER | CONV | INTOUT |
+--------+--------+------+------+---------+-------+---------+---------+--------+
| | ESPU | EPSP | EPSW | MAXDIV | MAXQN | MAXLS | FSTRESS | LSTOL |
+--------+--------+------+------+---------+-------+---------+---------+--------+
| | MAXBIS | | | | MAXR | | RTOLB | CONV |
+--------+--------+------+------+---------+-------+---------+---------+--------+
"""
type = 'NLPARM'
def __init__(self):
pass
def add(self, card=None, comment=''):
if comment:
self._comment = comment
self.nlparm_id = integer(card, 1, 'nlparm_id')
self.ninc = integer_or_blank(card, 2, 'ninc', 10)
self.dt = double_or_blank(card, 3, 'dt', 0.0)
self.kMethod = string_or_blank(card, 4, 'kMethod', 'AUTO')
self.kStep = integer_or_blank(card, 5, 'kStep', 5)
self.maxIter = integer_or_blank(card, 6, 'maxIter', 25)
self.conv = string_or_blank(card, 7, 'conv', 'PW')
self.intOut = string_or_blank(card, 8, 'intOut', 'NO')
# line 2
self.epsU = double_or_blank(card, 9, 'epsU', 0.01)
self.epsP = double_or_blank(card, 10, 'epsP', 0.01)
self.epsW = double_or_blank(card, 11, 'epsW', 0.01)
self.maxDiv = integer_or_blank(card, 12, 'maxDiv', 3)
if self.kMethod == 'PFNT':
self.maxQn = integer_or_blank(card, 13, 'maxQn', 0)
else:
self.maxQn = integer_or_blank(card, 13, 'maxQn', self.maxIter)
self.maxLs = integer_or_blank(card, 14, 'maxLs', 4)
self.fStress = double_or_blank(card, 15, 'fStress', 0.2)
self.lsTol = double_or_blank(card, 16, 'lsTol', 0.5)
# line 3
self.maxBisect = integer_or_blank(card, 17, '', 5)
self.maxR = double_or_blank(card, 21, 'maxR', 20.)
self.rTolB = double_or_blank(card, 23, 'rTolB', 20.)
assert len(card) <= 24, 'len(NLPARM card) = %i' % len(card)
def raw_fields(self):
list_fields = ['NLPARM', self.nlparm_id, self.ninc, self.dt, self.kMethod,
self.kStep, self.maxIter, self.conv, self.intOut, self.epsU,
self.epsP, self.epsW, self.maxDiv, self.maxQn, self.maxLs,
self.fStress, self.lsTol, self.maxBisect, None, None, None,
self.maxR, None, self.rTolB]
return list_fields
def repr_fields(self):
ninc = set_blank_if_default(self.ninc, 10)
dt = set_blank_if_default(self.dt, 0.0)
kMethod = set_blank_if_default(self.kMethod, 'AUTO')
kStep = set_blank_if_default(self.kStep, 5)
maxIter = set_blank_if_default(self.maxIter, 25)
conv = set_blank_if_default(self.conv, 'PW')
intOut = set_blank_if_default(self.intOut, 'NO')
epsU = set_blank_if_default(self.epsU, 0.01)
epsP = set_blank_if_default(self.epsP, 0.01)
epsW = set_blank_if_default(self.epsW, 0.01)
maxDiv = set_blank_if_default(self.maxDiv, 3)
maxQn = set_blank_if_default(self.maxQn, self.maxIter)
maxLs = set_blank_if_default(self.maxLs, 4)
fStress = set_blank_if_default(self.fStress, 0.2)
lsTol = set_blank_if_default(self.lsTol, 0.5)
maxBisect = set_blank_if_default(self.maxBisect, 5)
maxR = set_blank_if_default(self.maxR, 20.)
rTolB = set_blank_if_default(self.rTolB, 20.)
list_fields = ['NLPARM', self.nlparm_id, ninc, dt, kMethod, kStep, maxIter,
conv, intOut, epsU, epsP, epsW, maxDiv, maxQn, maxLs,
fStress, lsTol, maxBisect, None, None, None, maxR, None,
rTolB]
return list_fields
def write_bdf(self, f, size=8):
card = self.raw_fields()
f.write(print_card(card, size=size))
|
normal
|
{
"blob_id": "7701a98d836dc9551a4e2eb4b7d9c10307b3f665",
"index": 1411,
"step-1": "<mask token>\n\n\nclass NLPARM(object):\n <mask token>\n <mask token>\n\n def __init__(self):\n pass\n\n def add(self, card=None, comment=''):\n if comment:\n self._comment = comment\n self.nlparm_id = integer(card, 1, 'nlparm_id')\n self.ninc = integer_or_blank(card, 2, 'ninc', 10)\n self.dt = double_or_blank(card, 3, 'dt', 0.0)\n self.kMethod = string_or_blank(card, 4, 'kMethod', 'AUTO')\n self.kStep = integer_or_blank(card, 5, 'kStep', 5)\n self.maxIter = integer_or_blank(card, 6, 'maxIter', 25)\n self.conv = string_or_blank(card, 7, 'conv', 'PW')\n self.intOut = string_or_blank(card, 8, 'intOut', 'NO')\n self.epsU = double_or_blank(card, 9, 'epsU', 0.01)\n self.epsP = double_or_blank(card, 10, 'epsP', 0.01)\n self.epsW = double_or_blank(card, 11, 'epsW', 0.01)\n self.maxDiv = integer_or_blank(card, 12, 'maxDiv', 3)\n if self.kMethod == 'PFNT':\n self.maxQn = integer_or_blank(card, 13, 'maxQn', 0)\n else:\n self.maxQn = integer_or_blank(card, 13, 'maxQn', self.maxIter)\n self.maxLs = integer_or_blank(card, 14, 'maxLs', 4)\n self.fStress = double_or_blank(card, 15, 'fStress', 0.2)\n self.lsTol = double_or_blank(card, 16, 'lsTol', 0.5)\n self.maxBisect = integer_or_blank(card, 17, '', 5)\n self.maxR = double_or_blank(card, 21, 'maxR', 20.0)\n self.rTolB = double_or_blank(card, 23, 'rTolB', 20.0)\n assert len(card) <= 24, 'len(NLPARM card) = %i' % len(card)\n\n def raw_fields(self):\n list_fields = ['NLPARM', self.nlparm_id, self.ninc, self.dt, self.\n kMethod, self.kStep, self.maxIter, self.conv, self.intOut, self\n .epsU, self.epsP, self.epsW, self.maxDiv, self.maxQn, self.\n maxLs, self.fStress, self.lsTol, self.maxBisect, None, None,\n None, self.maxR, None, self.rTolB]\n return list_fields\n\n def repr_fields(self):\n ninc = set_blank_if_default(self.ninc, 10)\n dt = set_blank_if_default(self.dt, 0.0)\n kMethod = set_blank_if_default(self.kMethod, 'AUTO')\n kStep = set_blank_if_default(self.kStep, 5)\n maxIter = set_blank_if_default(self.maxIter, 25)\n conv = set_blank_if_default(self.conv, 'PW')\n intOut = set_blank_if_default(self.intOut, 'NO')\n epsU = set_blank_if_default(self.epsU, 0.01)\n epsP = set_blank_if_default(self.epsP, 0.01)\n epsW = set_blank_if_default(self.epsW, 0.01)\n maxDiv = set_blank_if_default(self.maxDiv, 3)\n maxQn = set_blank_if_default(self.maxQn, self.maxIter)\n maxLs = set_blank_if_default(self.maxLs, 4)\n fStress = set_blank_if_default(self.fStress, 0.2)\n lsTol = set_blank_if_default(self.lsTol, 0.5)\n maxBisect = set_blank_if_default(self.maxBisect, 5)\n maxR = set_blank_if_default(self.maxR, 20.0)\n rTolB = set_blank_if_default(self.rTolB, 20.0)\n list_fields = ['NLPARM', self.nlparm_id, ninc, dt, kMethod, kStep,\n maxIter, conv, intOut, epsU, epsP, epsW, maxDiv, maxQn, maxLs,\n fStress, lsTol, maxBisect, None, None, None, maxR, None, rTolB]\n return list_fields\n\n def write_bdf(self, f, size=8):\n card = self.raw_fields()\n f.write(print_card(card, size=size))\n",
"step-2": "<mask token>\n\n\nclass NLPARM(object):\n <mask token>\n type = 'NLPARM'\n\n def __init__(self):\n pass\n\n def add(self, card=None, comment=''):\n if comment:\n self._comment = comment\n self.nlparm_id = integer(card, 1, 'nlparm_id')\n self.ninc = integer_or_blank(card, 2, 'ninc', 10)\n self.dt = double_or_blank(card, 3, 'dt', 0.0)\n self.kMethod = string_or_blank(card, 4, 'kMethod', 'AUTO')\n self.kStep = integer_or_blank(card, 5, 'kStep', 5)\n self.maxIter = integer_or_blank(card, 6, 'maxIter', 25)\n self.conv = string_or_blank(card, 7, 'conv', 'PW')\n self.intOut = string_or_blank(card, 8, 'intOut', 'NO')\n self.epsU = double_or_blank(card, 9, 'epsU', 0.01)\n self.epsP = double_or_blank(card, 10, 'epsP', 0.01)\n self.epsW = double_or_blank(card, 11, 'epsW', 0.01)\n self.maxDiv = integer_or_blank(card, 12, 'maxDiv', 3)\n if self.kMethod == 'PFNT':\n self.maxQn = integer_or_blank(card, 13, 'maxQn', 0)\n else:\n self.maxQn = integer_or_blank(card, 13, 'maxQn', self.maxIter)\n self.maxLs = integer_or_blank(card, 14, 'maxLs', 4)\n self.fStress = double_or_blank(card, 15, 'fStress', 0.2)\n self.lsTol = double_or_blank(card, 16, 'lsTol', 0.5)\n self.maxBisect = integer_or_blank(card, 17, '', 5)\n self.maxR = double_or_blank(card, 21, 'maxR', 20.0)\n self.rTolB = double_or_blank(card, 23, 'rTolB', 20.0)\n assert len(card) <= 24, 'len(NLPARM card) = %i' % len(card)\n\n def raw_fields(self):\n list_fields = ['NLPARM', self.nlparm_id, self.ninc, self.dt, self.\n kMethod, self.kStep, self.maxIter, self.conv, self.intOut, self\n .epsU, self.epsP, self.epsW, self.maxDiv, self.maxQn, self.\n maxLs, self.fStress, self.lsTol, self.maxBisect, None, None,\n None, self.maxR, None, self.rTolB]\n return list_fields\n\n def repr_fields(self):\n ninc = set_blank_if_default(self.ninc, 10)\n dt = set_blank_if_default(self.dt, 0.0)\n kMethod = set_blank_if_default(self.kMethod, 'AUTO')\n kStep = set_blank_if_default(self.kStep, 5)\n maxIter = set_blank_if_default(self.maxIter, 25)\n conv = set_blank_if_default(self.conv, 'PW')\n intOut = set_blank_if_default(self.intOut, 'NO')\n epsU = set_blank_if_default(self.epsU, 0.01)\n epsP = set_blank_if_default(self.epsP, 0.01)\n epsW = set_blank_if_default(self.epsW, 0.01)\n maxDiv = set_blank_if_default(self.maxDiv, 3)\n maxQn = set_blank_if_default(self.maxQn, self.maxIter)\n maxLs = set_blank_if_default(self.maxLs, 4)\n fStress = set_blank_if_default(self.fStress, 0.2)\n lsTol = set_blank_if_default(self.lsTol, 0.5)\n maxBisect = set_blank_if_default(self.maxBisect, 5)\n maxR = set_blank_if_default(self.maxR, 20.0)\n rTolB = set_blank_if_default(self.rTolB, 20.0)\n list_fields = ['NLPARM', self.nlparm_id, ninc, dt, kMethod, kStep,\n maxIter, conv, intOut, epsU, epsP, epsW, maxDiv, maxQn, maxLs,\n fStress, lsTol, maxBisect, None, None, None, maxR, None, rTolB]\n return list_fields\n\n def write_bdf(self, f, size=8):\n card = self.raw_fields()\n f.write(print_card(card, size=size))\n",
"step-3": "<mask token>\n\n\nclass NLPARM(object):\n \"\"\"\n Defines a set of parameters for nonlinear static analysis iteration\n strategy.\n\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +========+========+======+======+=========+=======+=========+=========+========+\n | NLPARM | ID | NINC | DT | KMETHOD | KSTEP | MAXITER | CONV | INTOUT |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | | ESPU | EPSP | EPSW | MAXDIV | MAXQN | MAXLS | FSTRESS | LSTOL |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | | MAXBIS | | | | MAXR | | RTOLB | CONV |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n \"\"\"\n type = 'NLPARM'\n\n def __init__(self):\n pass\n\n def add(self, card=None, comment=''):\n if comment:\n self._comment = comment\n self.nlparm_id = integer(card, 1, 'nlparm_id')\n self.ninc = integer_or_blank(card, 2, 'ninc', 10)\n self.dt = double_or_blank(card, 3, 'dt', 0.0)\n self.kMethod = string_or_blank(card, 4, 'kMethod', 'AUTO')\n self.kStep = integer_or_blank(card, 5, 'kStep', 5)\n self.maxIter = integer_or_blank(card, 6, 'maxIter', 25)\n self.conv = string_or_blank(card, 7, 'conv', 'PW')\n self.intOut = string_or_blank(card, 8, 'intOut', 'NO')\n self.epsU = double_or_blank(card, 9, 'epsU', 0.01)\n self.epsP = double_or_blank(card, 10, 'epsP', 0.01)\n self.epsW = double_or_blank(card, 11, 'epsW', 0.01)\n self.maxDiv = integer_or_blank(card, 12, 'maxDiv', 3)\n if self.kMethod == 'PFNT':\n self.maxQn = integer_or_blank(card, 13, 'maxQn', 0)\n else:\n self.maxQn = integer_or_blank(card, 13, 'maxQn', self.maxIter)\n self.maxLs = integer_or_blank(card, 14, 'maxLs', 4)\n self.fStress = double_or_blank(card, 15, 'fStress', 0.2)\n self.lsTol = double_or_blank(card, 16, 'lsTol', 0.5)\n self.maxBisect = integer_or_blank(card, 17, '', 5)\n self.maxR = double_or_blank(card, 21, 'maxR', 20.0)\n self.rTolB = double_or_blank(card, 23, 'rTolB', 20.0)\n assert len(card) <= 24, 'len(NLPARM card) = %i' % len(card)\n\n def raw_fields(self):\n list_fields = ['NLPARM', self.nlparm_id, self.ninc, self.dt, self.\n kMethod, self.kStep, self.maxIter, self.conv, self.intOut, self\n .epsU, self.epsP, self.epsW, self.maxDiv, self.maxQn, self.\n maxLs, self.fStress, self.lsTol, self.maxBisect, None, None,\n None, self.maxR, None, self.rTolB]\n return list_fields\n\n def repr_fields(self):\n ninc = set_blank_if_default(self.ninc, 10)\n dt = set_blank_if_default(self.dt, 0.0)\n kMethod = set_blank_if_default(self.kMethod, 'AUTO')\n kStep = set_blank_if_default(self.kStep, 5)\n maxIter = set_blank_if_default(self.maxIter, 25)\n conv = set_blank_if_default(self.conv, 'PW')\n intOut = set_blank_if_default(self.intOut, 'NO')\n epsU = set_blank_if_default(self.epsU, 0.01)\n epsP = set_blank_if_default(self.epsP, 0.01)\n epsW = set_blank_if_default(self.epsW, 0.01)\n maxDiv = set_blank_if_default(self.maxDiv, 3)\n maxQn = set_blank_if_default(self.maxQn, self.maxIter)\n maxLs = set_blank_if_default(self.maxLs, 4)\n fStress = set_blank_if_default(self.fStress, 0.2)\n lsTol = set_blank_if_default(self.lsTol, 0.5)\n maxBisect = set_blank_if_default(self.maxBisect, 5)\n maxR = set_blank_if_default(self.maxR, 20.0)\n rTolB = set_blank_if_default(self.rTolB, 20.0)\n list_fields = ['NLPARM', self.nlparm_id, ninc, dt, kMethod, kStep,\n maxIter, conv, intOut, epsU, epsP, epsW, maxDiv, maxQn, maxLs,\n fStress, lsTol, maxBisect, None, None, None, maxR, None, rTolB]\n return list_fields\n\n def write_bdf(self, f, size=8):\n card = self.raw_fields()\n f.write(print_card(card, size=size))\n",
"step-4": "from pyNastran.bdf.fieldWriter import print_card\nfrom pyNastran.bdf.bdfInterface.assign_type import integer, integer_or_blank, double_or_blank, string_or_blank\n\n\nclass NLPARM(object):\n \"\"\"\n Defines a set of parameters for nonlinear static analysis iteration\n strategy.\n\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +========+========+======+======+=========+=======+=========+=========+========+\n | NLPARM | ID | NINC | DT | KMETHOD | KSTEP | MAXITER | CONV | INTOUT |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | | ESPU | EPSP | EPSW | MAXDIV | MAXQN | MAXLS | FSTRESS | LSTOL |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | | MAXBIS | | | | MAXR | | RTOLB | CONV |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n \"\"\"\n type = 'NLPARM'\n\n def __init__(self):\n pass\n\n def add(self, card=None, comment=''):\n if comment:\n self._comment = comment\n self.nlparm_id = integer(card, 1, 'nlparm_id')\n self.ninc = integer_or_blank(card, 2, 'ninc', 10)\n self.dt = double_or_blank(card, 3, 'dt', 0.0)\n self.kMethod = string_or_blank(card, 4, 'kMethod', 'AUTO')\n self.kStep = integer_or_blank(card, 5, 'kStep', 5)\n self.maxIter = integer_or_blank(card, 6, 'maxIter', 25)\n self.conv = string_or_blank(card, 7, 'conv', 'PW')\n self.intOut = string_or_blank(card, 8, 'intOut', 'NO')\n self.epsU = double_or_blank(card, 9, 'epsU', 0.01)\n self.epsP = double_or_blank(card, 10, 'epsP', 0.01)\n self.epsW = double_or_blank(card, 11, 'epsW', 0.01)\n self.maxDiv = integer_or_blank(card, 12, 'maxDiv', 3)\n if self.kMethod == 'PFNT':\n self.maxQn = integer_or_blank(card, 13, 'maxQn', 0)\n else:\n self.maxQn = integer_or_blank(card, 13, 'maxQn', self.maxIter)\n self.maxLs = integer_or_blank(card, 14, 'maxLs', 4)\n self.fStress = double_or_blank(card, 15, 'fStress', 0.2)\n self.lsTol = double_or_blank(card, 16, 'lsTol', 0.5)\n self.maxBisect = integer_or_blank(card, 17, '', 5)\n self.maxR = double_or_blank(card, 21, 'maxR', 20.0)\n self.rTolB = double_or_blank(card, 23, 'rTolB', 20.0)\n assert len(card) <= 24, 'len(NLPARM card) = %i' % len(card)\n\n def raw_fields(self):\n list_fields = ['NLPARM', self.nlparm_id, self.ninc, self.dt, self.\n kMethod, self.kStep, self.maxIter, self.conv, self.intOut, self\n .epsU, self.epsP, self.epsW, self.maxDiv, self.maxQn, self.\n maxLs, self.fStress, self.lsTol, self.maxBisect, None, None,\n None, self.maxR, None, self.rTolB]\n return list_fields\n\n def repr_fields(self):\n ninc = set_blank_if_default(self.ninc, 10)\n dt = set_blank_if_default(self.dt, 0.0)\n kMethod = set_blank_if_default(self.kMethod, 'AUTO')\n kStep = set_blank_if_default(self.kStep, 5)\n maxIter = set_blank_if_default(self.maxIter, 25)\n conv = set_blank_if_default(self.conv, 'PW')\n intOut = set_blank_if_default(self.intOut, 'NO')\n epsU = set_blank_if_default(self.epsU, 0.01)\n epsP = set_blank_if_default(self.epsP, 0.01)\n epsW = set_blank_if_default(self.epsW, 0.01)\n maxDiv = set_blank_if_default(self.maxDiv, 3)\n maxQn = set_blank_if_default(self.maxQn, self.maxIter)\n maxLs = set_blank_if_default(self.maxLs, 4)\n fStress = set_blank_if_default(self.fStress, 0.2)\n lsTol = set_blank_if_default(self.lsTol, 0.5)\n maxBisect = set_blank_if_default(self.maxBisect, 5)\n maxR = set_blank_if_default(self.maxR, 20.0)\n rTolB = set_blank_if_default(self.rTolB, 20.0)\n list_fields = ['NLPARM', self.nlparm_id, ninc, dt, kMethod, kStep,\n maxIter, conv, intOut, epsU, epsP, epsW, maxDiv, maxQn, maxLs,\n fStress, lsTol, maxBisect, None, None, None, maxR, None, rTolB]\n return list_fields\n\n def write_bdf(self, f, size=8):\n card = self.raw_fields()\n f.write(print_card(card, size=size))\n",
"step-5": "from pyNastran.bdf.fieldWriter import print_card\nfrom pyNastran.bdf.bdfInterface.assign_type import (integer, integer_or_blank,\n double_or_blank, string_or_blank)\n\n\nclass NLPARM(object):\n \"\"\"\n Defines a set of parameters for nonlinear static analysis iteration\n strategy.\n\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |\n +========+========+======+======+=========+=======+=========+=========+========+\n | NLPARM | ID | NINC | DT | KMETHOD | KSTEP | MAXITER | CONV | INTOUT |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | | ESPU | EPSP | EPSW | MAXDIV | MAXQN | MAXLS | FSTRESS | LSTOL |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n | | MAXBIS | | | | MAXR | | RTOLB | CONV |\n +--------+--------+------+------+---------+-------+---------+---------+--------+\n \"\"\"\n type = 'NLPARM'\n\n def __init__(self):\n pass\n\n def add(self, card=None, comment=''):\n if comment:\n self._comment = comment\n\n self.nlparm_id = integer(card, 1, 'nlparm_id')\n self.ninc = integer_or_blank(card, 2, 'ninc', 10)\n self.dt = double_or_blank(card, 3, 'dt', 0.0)\n self.kMethod = string_or_blank(card, 4, 'kMethod', 'AUTO')\n self.kStep = integer_or_blank(card, 5, 'kStep', 5)\n self.maxIter = integer_or_blank(card, 6, 'maxIter', 25)\n self.conv = string_or_blank(card, 7, 'conv', 'PW')\n self.intOut = string_or_blank(card, 8, 'intOut', 'NO')\n\n # line 2\n self.epsU = double_or_blank(card, 9, 'epsU', 0.01)\n self.epsP = double_or_blank(card, 10, 'epsP', 0.01)\n self.epsW = double_or_blank(card, 11, 'epsW', 0.01)\n self.maxDiv = integer_or_blank(card, 12, 'maxDiv', 3)\n\n if self.kMethod == 'PFNT':\n self.maxQn = integer_or_blank(card, 13, 'maxQn', 0)\n else:\n self.maxQn = integer_or_blank(card, 13, 'maxQn', self.maxIter)\n\n self.maxLs = integer_or_blank(card, 14, 'maxLs', 4)\n self.fStress = double_or_blank(card, 15, 'fStress', 0.2)\n self.lsTol = double_or_blank(card, 16, 'lsTol', 0.5)\n\n # line 3\n self.maxBisect = integer_or_blank(card, 17, '', 5)\n self.maxR = double_or_blank(card, 21, 'maxR', 20.)\n self.rTolB = double_or_blank(card, 23, 'rTolB', 20.)\n assert len(card) <= 24, 'len(NLPARM card) = %i' % len(card)\n\n def raw_fields(self):\n list_fields = ['NLPARM', self.nlparm_id, self.ninc, self.dt, self.kMethod,\n self.kStep, self.maxIter, self.conv, self.intOut, self.epsU,\n self.epsP, self.epsW, self.maxDiv, self.maxQn, self.maxLs,\n self.fStress, self.lsTol, self.maxBisect, None, None, None,\n self.maxR, None, self.rTolB]\n return list_fields\n\n def repr_fields(self):\n ninc = set_blank_if_default(self.ninc, 10)\n dt = set_blank_if_default(self.dt, 0.0)\n kMethod = set_blank_if_default(self.kMethod, 'AUTO')\n kStep = set_blank_if_default(self.kStep, 5)\n maxIter = set_blank_if_default(self.maxIter, 25)\n conv = set_blank_if_default(self.conv, 'PW')\n intOut = set_blank_if_default(self.intOut, 'NO')\n epsU = set_blank_if_default(self.epsU, 0.01)\n epsP = set_blank_if_default(self.epsP, 0.01)\n epsW = set_blank_if_default(self.epsW, 0.01)\n maxDiv = set_blank_if_default(self.maxDiv, 3)\n maxQn = set_blank_if_default(self.maxQn, self.maxIter)\n maxLs = set_blank_if_default(self.maxLs, 4)\n fStress = set_blank_if_default(self.fStress, 0.2)\n lsTol = set_blank_if_default(self.lsTol, 0.5)\n maxBisect = set_blank_if_default(self.maxBisect, 5)\n maxR = set_blank_if_default(self.maxR, 20.)\n rTolB = set_blank_if_default(self.rTolB, 20.)\n\n list_fields = ['NLPARM', self.nlparm_id, ninc, dt, kMethod, kStep, maxIter,\n conv, intOut, epsU, epsP, epsW, maxDiv, maxQn, maxLs,\n fStress, lsTol, maxBisect, None, None, None, maxR, None,\n rTolB]\n return list_fields\n\n def write_bdf(self, f, size=8):\n card = self.raw_fields()\n f.write(print_card(card, size=size))",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
import pandas as pd
import numpy as np
df1 = pd.DataFrame(np.ones((3, 4))*0, columns=['a', 'b', 'c', 'd'])
df2 = pd.DataFrame(np.ones((3, 4))*1, columns=['a', 'b', 'c', 'd'])
df3 = pd.DataFrame(np.ones((3, 4))*2, columns=['a', 'b', 'c', 'd'])
# 竖向合并
# ignore_index对行索引重新排序
res1 = pd.concat([df1, df2, df3], axis=0, ignore_index=True)
print(res1)
df4 = pd.DataFrame(np.ones((3, 4))*0, columns=['a', 'b', 'c', 'd'], index=[1, 2, 3])
df5 = pd.DataFrame(np.ones((3, 4))*1, columns=['b', 'c', 'd', 'e'], index=[2, 3, 4])
# inner选择相同部分拼接
res2 = pd.concat([df4, df5], join='inner', ignore_index=True)
print(res2)
# 默认为outer,补充NaN
res3 = pd.concat([df4, df5], join='outer', ignore_index=True)
print(res3)
# 按列合并
res4 = pd.concat([df1, df2], axis=1, join_axes=[df1.index])
print(res4)
# append增加数据
res5 = df1.append([df2, df3], ignore_index=True)
print(res5)
# 单独加一组数据
s1 = pd.Series([1, 2, 3, 4], index=['a', 'b', 'c', 'd'])
res6 = df1.append(s1, ignore_index=True)
print(res6)
|
normal
|
{
"blob_id": "0a38cf6e0518a08895ed7155069aa2257c7b352e",
"index": 4662,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(res1)\n<mask token>\nprint(res2)\n<mask token>\nprint(res3)\n<mask token>\nprint(res4)\n<mask token>\nprint(res5)\n<mask token>\nprint(res6)\n",
"step-3": "<mask token>\ndf1 = pd.DataFrame(np.ones((3, 4)) * 0, columns=['a', 'b', 'c', 'd'])\ndf2 = pd.DataFrame(np.ones((3, 4)) * 1, columns=['a', 'b', 'c', 'd'])\ndf3 = pd.DataFrame(np.ones((3, 4)) * 2, columns=['a', 'b', 'c', 'd'])\nres1 = pd.concat([df1, df2, df3], axis=0, ignore_index=True)\nprint(res1)\ndf4 = pd.DataFrame(np.ones((3, 4)) * 0, columns=['a', 'b', 'c', 'd'], index\n =[1, 2, 3])\ndf5 = pd.DataFrame(np.ones((3, 4)) * 1, columns=['b', 'c', 'd', 'e'], index\n =[2, 3, 4])\nres2 = pd.concat([df4, df5], join='inner', ignore_index=True)\nprint(res2)\nres3 = pd.concat([df4, df5], join='outer', ignore_index=True)\nprint(res3)\nres4 = pd.concat([df1, df2], axis=1, join_axes=[df1.index])\nprint(res4)\nres5 = df1.append([df2, df3], ignore_index=True)\nprint(res5)\ns1 = pd.Series([1, 2, 3, 4], index=['a', 'b', 'c', 'd'])\nres6 = df1.append(s1, ignore_index=True)\nprint(res6)\n",
"step-4": "import pandas as pd\nimport numpy as np\ndf1 = pd.DataFrame(np.ones((3, 4)) * 0, columns=['a', 'b', 'c', 'd'])\ndf2 = pd.DataFrame(np.ones((3, 4)) * 1, columns=['a', 'b', 'c', 'd'])\ndf3 = pd.DataFrame(np.ones((3, 4)) * 2, columns=['a', 'b', 'c', 'd'])\nres1 = pd.concat([df1, df2, df3], axis=0, ignore_index=True)\nprint(res1)\ndf4 = pd.DataFrame(np.ones((3, 4)) * 0, columns=['a', 'b', 'c', 'd'], index\n =[1, 2, 3])\ndf5 = pd.DataFrame(np.ones((3, 4)) * 1, columns=['b', 'c', 'd', 'e'], index\n =[2, 3, 4])\nres2 = pd.concat([df4, df5], join='inner', ignore_index=True)\nprint(res2)\nres3 = pd.concat([df4, df5], join='outer', ignore_index=True)\nprint(res3)\nres4 = pd.concat([df1, df2], axis=1, join_axes=[df1.index])\nprint(res4)\nres5 = df1.append([df2, df3], ignore_index=True)\nprint(res5)\ns1 = pd.Series([1, 2, 3, 4], index=['a', 'b', 'c', 'd'])\nres6 = df1.append(s1, ignore_index=True)\nprint(res6)\n",
"step-5": "import pandas as pd\nimport numpy as np\n\n\ndf1 = pd.DataFrame(np.ones((3, 4))*0, columns=['a', 'b', 'c', 'd'])\ndf2 = pd.DataFrame(np.ones((3, 4))*1, columns=['a', 'b', 'c', 'd'])\ndf3 = pd.DataFrame(np.ones((3, 4))*2, columns=['a', 'b', 'c', 'd'])\n\n# 竖向合并\n# ignore_index对行索引重新排序\nres1 = pd.concat([df1, df2, df3], axis=0, ignore_index=True)\nprint(res1)\n\ndf4 = pd.DataFrame(np.ones((3, 4))*0, columns=['a', 'b', 'c', 'd'], index=[1, 2, 3])\ndf5 = pd.DataFrame(np.ones((3, 4))*1, columns=['b', 'c', 'd', 'e'], index=[2, 3, 4])\n# inner选择相同部分拼接\nres2 = pd.concat([df4, df5], join='inner', ignore_index=True)\nprint(res2)\n# 默认为outer,补充NaN\nres3 = pd.concat([df4, df5], join='outer', ignore_index=True)\nprint(res3)\n\n# 按列合并\nres4 = pd.concat([df1, df2], axis=1, join_axes=[df1.index])\nprint(res4)\n\n# append增加数据\nres5 = df1.append([df2, df3], ignore_index=True)\nprint(res5)\n\n# 单独加一组数据\ns1 = pd.Series([1, 2, 3, 4], index=['a', 'b', 'c', 'd'])\nres6 = df1.append(s1, ignore_index=True)\nprint(res6)\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import logging
from utils import Utils
from block import Block
from message import Message
from transaction import Transaction
class Response:
def __init__(self, node, data):
self.node = node
self.data = data
self.selector()
def selector(self):
if self.data['flag'] == 1:
self.chain_size()
elif self.data['flag'] == 2:
self.chain_sync()
elif self.data['flag'] == 3:
if isinstance(self.data['content'], bool):
self.append_new_block()
else:
self.new_block()
else:
self.new_transaction()
def chain_size(self):
server_chain_size = self.node.get_ledger_size()
self.return_response(1, server_chain_size)
def chain_sync(self):
u = Utils()
blocks = [u.dict_to_json(block) for block in self.node.get_server_ledger()]
self.return_response(2, blocks)
def new_block(self):
b = Block()
block = self.data['content'][0]
if not self.node.get_server_ledger():
# Server has no chain, cannot validate previous hash
logging.error('{0}Peer #{1}: cannot validate blocks! Authorizing!'.format(self.node.type,self.node.index))
self.return_response(3, block)
else:
if b.validate(block):
self.node.server.write_message('announce', 1, block['index'])
self.node.add_block(block)
self.return_response(3, block)
else:
self.node.server.write_message('announce', 2, block['index'])
self.return_response(3)
def new_transaction(self):
t = Transaction()
tx = self.data['content'][0][0]
if t.validate(tx):
self.node.server.shared_tx.append(tx)
self.return_response(4, tx)
else:
self.return_response(4)
def return_response(self, flag, content=None):
m = Message()
response = m.create('response', flag, [content])
self.node.send(response)
|
normal
|
{
"blob_id": "55b8590410bfe8f12ce3b52710238a79d27189a7",
"index": 5125,
"step-1": "<mask token>\n\n\nclass Response:\n <mask token>\n <mask token>\n\n def chain_size(self):\n server_chain_size = self.node.get_ledger_size()\n self.return_response(1, server_chain_size)\n\n def chain_sync(self):\n u = Utils()\n blocks = [u.dict_to_json(block) for block in self.node.\n get_server_ledger()]\n self.return_response(2, blocks)\n <mask token>\n\n def new_transaction(self):\n t = Transaction()\n tx = self.data['content'][0][0]\n if t.validate(tx):\n self.node.server.shared_tx.append(tx)\n self.return_response(4, tx)\n else:\n self.return_response(4)\n\n def return_response(self, flag, content=None):\n m = Message()\n response = m.create('response', flag, [content])\n self.node.send(response)\n",
"step-2": "<mask token>\n\n\nclass Response:\n <mask token>\n <mask token>\n\n def chain_size(self):\n server_chain_size = self.node.get_ledger_size()\n self.return_response(1, server_chain_size)\n\n def chain_sync(self):\n u = Utils()\n blocks = [u.dict_to_json(block) for block in self.node.\n get_server_ledger()]\n self.return_response(2, blocks)\n\n def new_block(self):\n b = Block()\n block = self.data['content'][0]\n if not self.node.get_server_ledger():\n logging.error('{0}Peer #{1}: cannot validate blocks! Authorizing!'\n .format(self.node.type, self.node.index))\n self.return_response(3, block)\n elif b.validate(block):\n self.node.server.write_message('announce', 1, block['index'])\n self.node.add_block(block)\n self.return_response(3, block)\n else:\n self.node.server.write_message('announce', 2, block['index'])\n self.return_response(3)\n\n def new_transaction(self):\n t = Transaction()\n tx = self.data['content'][0][0]\n if t.validate(tx):\n self.node.server.shared_tx.append(tx)\n self.return_response(4, tx)\n else:\n self.return_response(4)\n\n def return_response(self, flag, content=None):\n m = Message()\n response = m.create('response', flag, [content])\n self.node.send(response)\n",
"step-3": "<mask token>\n\n\nclass Response:\n\n def __init__(self, node, data):\n self.node = node\n self.data = data\n self.selector()\n\n def selector(self):\n if self.data['flag'] == 1:\n self.chain_size()\n elif self.data['flag'] == 2:\n self.chain_sync()\n elif self.data['flag'] == 3:\n if isinstance(self.data['content'], bool):\n self.append_new_block()\n else:\n self.new_block()\n else:\n self.new_transaction()\n\n def chain_size(self):\n server_chain_size = self.node.get_ledger_size()\n self.return_response(1, server_chain_size)\n\n def chain_sync(self):\n u = Utils()\n blocks = [u.dict_to_json(block) for block in self.node.\n get_server_ledger()]\n self.return_response(2, blocks)\n\n def new_block(self):\n b = Block()\n block = self.data['content'][0]\n if not self.node.get_server_ledger():\n logging.error('{0}Peer #{1}: cannot validate blocks! Authorizing!'\n .format(self.node.type, self.node.index))\n self.return_response(3, block)\n elif b.validate(block):\n self.node.server.write_message('announce', 1, block['index'])\n self.node.add_block(block)\n self.return_response(3, block)\n else:\n self.node.server.write_message('announce', 2, block['index'])\n self.return_response(3)\n\n def new_transaction(self):\n t = Transaction()\n tx = self.data['content'][0][0]\n if t.validate(tx):\n self.node.server.shared_tx.append(tx)\n self.return_response(4, tx)\n else:\n self.return_response(4)\n\n def return_response(self, flag, content=None):\n m = Message()\n response = m.create('response', flag, [content])\n self.node.send(response)\n",
"step-4": "import logging\nfrom utils import Utils\nfrom block import Block\nfrom message import Message\nfrom transaction import Transaction\n\n\nclass Response:\n\n def __init__(self, node, data):\n self.node = node\n self.data = data\n self.selector()\n\n def selector(self):\n if self.data['flag'] == 1:\n self.chain_size()\n elif self.data['flag'] == 2:\n self.chain_sync()\n elif self.data['flag'] == 3:\n if isinstance(self.data['content'], bool):\n self.append_new_block()\n else:\n self.new_block()\n else:\n self.new_transaction()\n\n def chain_size(self):\n server_chain_size = self.node.get_ledger_size()\n self.return_response(1, server_chain_size)\n\n def chain_sync(self):\n u = Utils()\n blocks = [u.dict_to_json(block) for block in self.node.\n get_server_ledger()]\n self.return_response(2, blocks)\n\n def new_block(self):\n b = Block()\n block = self.data['content'][0]\n if not self.node.get_server_ledger():\n logging.error('{0}Peer #{1}: cannot validate blocks! Authorizing!'\n .format(self.node.type, self.node.index))\n self.return_response(3, block)\n elif b.validate(block):\n self.node.server.write_message('announce', 1, block['index'])\n self.node.add_block(block)\n self.return_response(3, block)\n else:\n self.node.server.write_message('announce', 2, block['index'])\n self.return_response(3)\n\n def new_transaction(self):\n t = Transaction()\n tx = self.data['content'][0][0]\n if t.validate(tx):\n self.node.server.shared_tx.append(tx)\n self.return_response(4, tx)\n else:\n self.return_response(4)\n\n def return_response(self, flag, content=None):\n m = Message()\n response = m.create('response', flag, [content])\n self.node.send(response)\n",
"step-5": "import logging\n\nfrom utils import Utils\nfrom block import Block\nfrom message import Message\nfrom transaction import Transaction\n\nclass Response:\n def __init__(self, node, data):\n self.node = node\n self.data = data\n self.selector()\n\n def selector(self):\n if self.data['flag'] == 1:\n self.chain_size()\n elif self.data['flag'] == 2:\n self.chain_sync()\n elif self.data['flag'] == 3:\n if isinstance(self.data['content'], bool):\n self.append_new_block()\n else:\n self.new_block()\n else:\n self.new_transaction()\n\n def chain_size(self):\n server_chain_size = self.node.get_ledger_size()\n self.return_response(1, server_chain_size)\n\n def chain_sync(self):\n u = Utils()\n blocks = [u.dict_to_json(block) for block in self.node.get_server_ledger()]\n self.return_response(2, blocks)\n\n def new_block(self):\n b = Block()\n block = self.data['content'][0]\n if not self.node.get_server_ledger():\n # Server has no chain, cannot validate previous hash\n logging.error('{0}Peer #{1}: cannot validate blocks! Authorizing!'.format(self.node.type,self.node.index))\n self.return_response(3, block)\n else:\n if b.validate(block):\n self.node.server.write_message('announce', 1, block['index'])\n self.node.add_block(block)\n self.return_response(3, block)\n else:\n self.node.server.write_message('announce', 2, block['index'])\n self.return_response(3)\n\n def new_transaction(self):\n t = Transaction()\n tx = self.data['content'][0][0]\n if t.validate(tx):\n self.node.server.shared_tx.append(tx)\n self.return_response(4, tx)\n else:\n self.return_response(4)\n\n def return_response(self, flag, content=None):\n m = Message()\n response = m.create('response', flag, [content])\n self.node.send(response)\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
import time
import pigpio
class Car:
def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig=0, sensors=[]):
self.pi = pigpio.pi()
if not self.pi.connected:
print("Pi not connected to pigpio.")
return
# GPIO Drive Pin locations
self.STBY = STBY
# drive motor
self.drivePWM = PWMA
self.driveIN1 = AIN1
self.driveIN2 = AIN2
# steering motor
self.steerPWM = PWMB
self.steerIN1 = BIN1
self.steerIN2 = BIN2
# initialize GPIO
self.pi.set_mode(STBY, pigpio.OUTPUT)
self.pi.set_mode(PWMA, pigpio.OUTPUT)
self.pi.set_mode(AIN1, pigpio.OUTPUT)
self.pi.set_mode(AIN2, pigpio.OUTPUT)
self.pi.set_mode(PWMB, pigpio.OUTPUT)
self.pi.set_mode(BIN1, pigpio.OUTPUT)
self.pi.set_mode(BIN2, pigpio.OUTPUT)
self.pi.set_PWM_frequency(PWMA, 50)
self.pi.set_PWM_frequency(PWMB, 50)
# Sensor GPIO Pin locations
self.sensorTrig = sensorTrig
self.sensors = sensors
self.distances = []
for i in range(len(sensors)):
self.distances.append(0)
# initialize sensor GPIO
if sensorTrig > 0:
self.pi.set_mode(sensorTrig, pigpio.OUTPUT)
for sensor in range(len(sensors)):
if sensors[sensor] > 0:
self.pi.set_mode(sensors[sensor], pigpio.INPUT)
# activate car
self.activate()
# activate motors
def activate(self):
self.deactivate()
self.pi.write(self.STBY, 1)
# shut off motors
def deactivate(self):
self.pi.write(self.STBY, 0)
# shut off drive motor
self.pi.write(self.driveIN1, 0)
self.pi.write(self.driveIN2, 0)
self.pi.set_PWM_dutycycle(self.drivePWM, 0)
# shut off steering motor
self.pi.write(self.steerIN1, 0)
self.pi.write(self.steerIN2, 0)
self.pi.set_PWM_dutycycle(self.steerPWM, 0)
if self.sensorTrig > 0:
# make sure sensors aren't triggered
self.pi.write(self.sensorTrig, False)
# set drive motor
def setDrive(self, direction, dutycycle=100):
dc = int((255.0 / 100.0) * dutycycle)
if direction == 1:
self.pi.write(self.driveIN1, 1)
self.pi.write(self.driveIN2, 0)
self.pi.set_PWM_dutycycle(self.drivePWM, dc)
elif direction == -1:
self.pi.write(self.driveIN1, 0)
self.pi.write(self.driveIN2, 1)
self.pi.set_PWM_dutycycle(self.drivePWM, dc)
else:
self.pi.write(self.driveIN1, 0)
self.pi.write(self.driveIN2, 0)
self.pi.set_PWM_dutycycle(self.drivePWM, 0)
# set steering motor
def setSteering(self, direction, dutycycle=100):
dc = int((255.0 / 100.0) * dutycycle)
if direction == 1:
self.pi.write(self.steerIN1, 0)
self.pi.write(self.steerIN2, 1)
self.pi.set_PWM_dutycycle(self.steerPWM, dc)
elif direction == -1:
self.pi.write(self.steerIN1, 1)
self.pi.write(self.steerIN2, 0)
self.pi.set_PWM_dutycycle(self.steerPWM, dc)
else:
self.pi.write(self.steerIN1, 0)
self.pi.write(self.steerIN2, 0)
self.pi.set_PWM_dutycycle(self.steerPWM, 0)
# update sensors distance
def updateDistances(self):
if self.sensorTrig > 0:
for sensor in range(len(self.sensors)):
while self.pi.read(self.sensors[sensor]):
continue
# trigger the sensors so they start reading
self.pi.write(self.sensorTrig, True)
time.sleep(0.000001)
self.pi.write(self.sensorTrig, False)
# wait until the sensor starts reading, if it takes longer than .001 seconds then something went wrong
startT = time.time()
while not self.pi.read(self.sensors[sensor]) and time.time() - startT < .001:
continue
startT = time.time()
# wait for the sensor to become inactive which gives us the ending time
while self.pi.read(self.sensors[sensor]):
continue
endT = time.time()
# convert the sensor readings to distance in centimeters
self.distances[sensor] = round((endT - startT) * 17150, 2)
'''
# trial to read multiple sensors at once but was having issues
# definitely can be optimized better and needs code hang detection
startT = {}
endT = {}
self.pi.write(self.sensorTrig, True)
time.sleep(0.0000001)
self.pi.write(self.sensorTrig, False)
sensorCount = len(self.sensors)
while len(endT) < sensorCount:
for sensor in range(sensorCount):
if sensor not in startT.keys():
if self.pi.read(self.sensors[sensor]):
startT[sensor] = time.time()
elif not sensor in endT.keys():
if not self.pi.read(self.sensors[sensor]):
endT[sensor] = time.time()
for sensor in range(len(self.sensors)):
self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)
'''
# shut everything off and disconnect from pi
def stop(self):
self.deactivate()
self.pi.stop()
|
normal
|
{
"blob_id": "5b9f1b3ca4b50a4e9e8bd6715e73c62b4f778929",
"index": 1594,
"step-1": "<mask token>\n\n\nclass Car:\n <mask token>\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n <mask token>\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n <mask token>\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Car:\n\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig\n =0, sensors=[]):\n self.pi = pigpio.pi()\n if not self.pi.connected:\n print('Pi not connected to pigpio.')\n return\n self.STBY = STBY\n self.drivePWM = PWMA\n self.driveIN1 = AIN1\n self.driveIN2 = AIN2\n self.steerPWM = PWMB\n self.steerIN1 = BIN1\n self.steerIN2 = BIN2\n self.pi.set_mode(STBY, pigpio.OUTPUT)\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\n self.pi.set_PWM_frequency(PWMA, 50)\n self.pi.set_PWM_frequency(PWMB, 50)\n self.sensorTrig = sensorTrig\n self.sensors = sensors\n self.distances = []\n for i in range(len(sensors)):\n self.distances.append(0)\n if sensorTrig > 0:\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\n for sensor in range(len(sensors)):\n if sensors[sensor] > 0:\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\n self.activate()\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n <mask token>\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n <mask token>\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n\n def stop(self):\n self.deactivate()\n self.pi.stop()\n",
"step-3": "<mask token>\n\n\nclass Car:\n\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig\n =0, sensors=[]):\n self.pi = pigpio.pi()\n if not self.pi.connected:\n print('Pi not connected to pigpio.')\n return\n self.STBY = STBY\n self.drivePWM = PWMA\n self.driveIN1 = AIN1\n self.driveIN2 = AIN2\n self.steerPWM = PWMB\n self.steerIN1 = BIN1\n self.steerIN2 = BIN2\n self.pi.set_mode(STBY, pigpio.OUTPUT)\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\n self.pi.set_PWM_frequency(PWMA, 50)\n self.pi.set_PWM_frequency(PWMB, 50)\n self.sensorTrig = sensorTrig\n self.sensors = sensors\n self.distances = []\n for i in range(len(sensors)):\n self.distances.append(0)\n if sensorTrig > 0:\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\n for sensor in range(len(sensors)):\n if sensors[sensor] > 0:\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\n self.activate()\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n <mask token>\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n\n def setSteering(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 1)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n elif direction == -1:\n self.pi.write(self.steerIN1, 1)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n else:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n\n def stop(self):\n self.deactivate()\n self.pi.stop()\n",
"step-4": "<mask token>\n\n\nclass Car:\n\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig\n =0, sensors=[]):\n self.pi = pigpio.pi()\n if not self.pi.connected:\n print('Pi not connected to pigpio.')\n return\n self.STBY = STBY\n self.drivePWM = PWMA\n self.driveIN1 = AIN1\n self.driveIN2 = AIN2\n self.steerPWM = PWMB\n self.steerIN1 = BIN1\n self.steerIN2 = BIN2\n self.pi.set_mode(STBY, pigpio.OUTPUT)\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\n self.pi.set_PWM_frequency(PWMA, 50)\n self.pi.set_PWM_frequency(PWMB, 50)\n self.sensorTrig = sensorTrig\n self.sensors = sensors\n self.distances = []\n for i in range(len(sensors)):\n self.distances.append(0)\n if sensorTrig > 0:\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\n for sensor in range(len(sensors)):\n if sensors[sensor] > 0:\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\n self.activate()\n\n def activate(self):\n self.deactivate()\n self.pi.write(self.STBY, 1)\n\n def deactivate(self):\n self.pi.write(self.STBY, 0)\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\n if self.sensorTrig > 0:\n self.pi.write(self.sensorTrig, False)\n\n def setDrive(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.driveIN1, 1)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n elif direction == -1:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 1)\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\n else:\n self.pi.write(self.driveIN1, 0)\n self.pi.write(self.driveIN2, 0)\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\n\n def setSteering(self, direction, dutycycle=100):\n dc = int(255.0 / 100.0 * dutycycle)\n if direction == 1:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 1)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n elif direction == -1:\n self.pi.write(self.steerIN1, 1)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\n else:\n self.pi.write(self.steerIN1, 0)\n self.pi.write(self.steerIN2, 0)\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\n\n def updateDistances(self):\n if self.sensorTrig > 0:\n for sensor in range(len(self.sensors)):\n while self.pi.read(self.sensors[sensor]):\n continue\n self.pi.write(self.sensorTrig, True)\n time.sleep(1e-06)\n self.pi.write(self.sensorTrig, False)\n startT = time.time()\n while not self.pi.read(self.sensors[sensor]) and time.time(\n ) - startT < 0.001:\n continue\n startT = time.time()\n while self.pi.read(self.sensors[sensor]):\n continue\n endT = time.time()\n self.distances[sensor] = round((endT - startT) * 17150, 2)\n \"\"\"\n # trial to read multiple sensors at once but was having issues\n # definitely can be optimized better and needs code hang detection\n startT = {}\n endT = {}\n self.pi.write(self.sensorTrig, True)\n time.sleep(0.0000001)\n self.pi.write(self.sensorTrig, False)\n sensorCount = len(self.sensors)\n while len(endT) < sensorCount:\n for sensor in range(sensorCount):\n if sensor not in startT.keys():\n if self.pi.read(self.sensors[sensor]):\n startT[sensor] = time.time()\n elif not sensor in endT.keys():\n if not self.pi.read(self.sensors[sensor]):\n endT[sensor] = time.time()\n for sensor in range(len(self.sensors)):\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\n \"\"\"\n\n def stop(self):\n self.deactivate()\n self.pi.stop()\n",
"step-5": "import time\r\nimport pigpio\r\n\r\nclass Car:\r\n def __init__(self, STBY, PWMA, AIN2, AIN1, BIN1, BIN2, PWMB, sensorTrig=0, sensors=[]):\r\n self.pi = pigpio.pi()\r\n if not self.pi.connected:\r\n print(\"Pi not connected to pigpio.\")\r\n return\r\n \r\n # GPIO Drive Pin locations\r\n self.STBY = STBY\r\n # drive motor\r\n self.drivePWM = PWMA\r\n self.driveIN1 = AIN1\r\n self.driveIN2 = AIN2\r\n # steering motor\r\n self.steerPWM = PWMB\r\n self.steerIN1 = BIN1\r\n self.steerIN2 = BIN2\r\n \r\n # initialize GPIO\r\n self.pi.set_mode(STBY, pigpio.OUTPUT)\r\n self.pi.set_mode(PWMA, pigpio.OUTPUT)\r\n self.pi.set_mode(AIN1, pigpio.OUTPUT)\r\n self.pi.set_mode(AIN2, pigpio.OUTPUT)\r\n self.pi.set_mode(PWMB, pigpio.OUTPUT)\r\n self.pi.set_mode(BIN1, pigpio.OUTPUT)\r\n self.pi.set_mode(BIN2, pigpio.OUTPUT)\r\n \r\n self.pi.set_PWM_frequency(PWMA, 50)\r\n self.pi.set_PWM_frequency(PWMB, 50)\r\n \r\n \r\n # Sensor GPIO Pin locations\r\n self.sensorTrig = sensorTrig\r\n self.sensors = sensors\r\n self.distances = []\r\n for i in range(len(sensors)):\r\n self.distances.append(0)\r\n \r\n # initialize sensor GPIO\r\n if sensorTrig > 0:\r\n self.pi.set_mode(sensorTrig, pigpio.OUTPUT)\r\n for sensor in range(len(sensors)):\r\n if sensors[sensor] > 0:\r\n self.pi.set_mode(sensors[sensor], pigpio.INPUT)\r\n \r\n # activate car\r\n self.activate()\r\n \r\n # activate motors\r\n def activate(self): \r\n self.deactivate()\r\n self.pi.write(self.STBY, 1)\r\n \r\n # shut off motors\r\n def deactivate(self):\r\n self.pi.write(self.STBY, 0)\r\n # shut off drive motor\r\n self.pi.write(self.driveIN1, 0)\r\n self.pi.write(self.driveIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\r\n # shut off steering motor\r\n self.pi.write(self.steerIN1, 0)\r\n self.pi.write(self.steerIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\r\n if self.sensorTrig > 0:\r\n # make sure sensors aren't triggered\r\n self.pi.write(self.sensorTrig, False)\r\n \r\n # set drive motor\r\n def setDrive(self, direction, dutycycle=100):\r\n dc = int((255.0 / 100.0) * dutycycle)\r\n if direction == 1:\r\n self.pi.write(self.driveIN1, 1)\r\n self.pi.write(self.driveIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\r\n elif direction == -1:\r\n self.pi.write(self.driveIN1, 0)\r\n self.pi.write(self.driveIN2, 1)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, dc)\r\n else:\r\n self.pi.write(self.driveIN1, 0)\r\n self.pi.write(self.driveIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.drivePWM, 0)\r\n \r\n # set steering motor\r\n def setSteering(self, direction, dutycycle=100):\r\n dc = int((255.0 / 100.0) * dutycycle)\r\n if direction == 1:\r\n self.pi.write(self.steerIN1, 0)\r\n self.pi.write(self.steerIN2, 1)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\r\n elif direction == -1:\r\n self.pi.write(self.steerIN1, 1)\r\n self.pi.write(self.steerIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, dc)\r\n else:\r\n self.pi.write(self.steerIN1, 0)\r\n self.pi.write(self.steerIN2, 0)\r\n self.pi.set_PWM_dutycycle(self.steerPWM, 0)\r\n \r\n # update sensors distance\r\n def updateDistances(self):\r\n if self.sensorTrig > 0:\r\n for sensor in range(len(self.sensors)):\r\n while self.pi.read(self.sensors[sensor]):\r\n continue\r\n # trigger the sensors so they start reading\r\n self.pi.write(self.sensorTrig, True)\r\n time.sleep(0.000001)\r\n self.pi.write(self.sensorTrig, False)\r\n # wait until the sensor starts reading, if it takes longer than .001 seconds then something went wrong\r\n startT = time.time()\r\n while not self.pi.read(self.sensors[sensor]) and time.time() - startT < .001:\r\n continue\r\n startT = time.time()\r\n # wait for the sensor to become inactive which gives us the ending time\r\n while self.pi.read(self.sensors[sensor]):\r\n continue\r\n endT = time.time()\r\n # convert the sensor readings to distance in centimeters\r\n self.distances[sensor] = round((endT - startT) * 17150, 2)\r\n \r\n '''\r\n # trial to read multiple sensors at once but was having issues\r\n # definitely can be optimized better and needs code hang detection\r\n startT = {}\r\n endT = {}\r\n self.pi.write(self.sensorTrig, True)\r\n time.sleep(0.0000001)\r\n self.pi.write(self.sensorTrig, False)\r\n sensorCount = len(self.sensors)\r\n while len(endT) < sensorCount:\r\n for sensor in range(sensorCount):\r\n if sensor not in startT.keys():\r\n if self.pi.read(self.sensors[sensor]):\r\n startT[sensor] = time.time()\r\n elif not sensor in endT.keys():\r\n if not self.pi.read(self.sensors[sensor]):\r\n endT[sensor] = time.time()\r\n for sensor in range(len(self.sensors)):\r\n self.distances[sensor] = round((endT[sensor] - startT[sensor]) * 17150, 2)\r\n '''\r\n \r\n # shut everything off and disconnect from pi\r\n def stop(self):\r\n self.deactivate()\r\n self.pi.stop()\r\n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.contrib.auth.models import User
from ..models import Todo
class MyTestCase(TestCase):
def test_mark_done(self):
user = User.objects.create_user(email='user@…', username='user', password='somepasswd')
todo = Todo(title='SomeTitle', description='SomeDescr', owner=user)
res = todo.mark_done(user)
self.assertTrue(res)
self.assertEqual(Todo.objects.count(), 1)
def test_mark_done_already_done(self):
user = User.objects.create_user(email='user@…', username='user', password='somepasswd')
todo = Todo(title='SomeTitle', description='SomeDescr', is_done=True, done_by=user, owner=user)
res = todo.mark_done(user)
self.assertIsNone(res)
# todo not saved because mark_done don't save already done todos
self.assertEqual(Todo.objects.count(), 0)
|
normal
|
{
"blob_id": "5c81ddbc8f5a162949a100dbef1c69551d9e267a",
"index": 37,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyTestCase(TestCase):\n\n def test_mark_done(self):\n user = User.objects.create_user(email='user@…', username='user',\n password='somepasswd')\n todo = Todo(title='SomeTitle', description='SomeDescr', owner=user)\n res = todo.mark_done(user)\n self.assertTrue(res)\n self.assertEqual(Todo.objects.count(), 1)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass MyTestCase(TestCase):\n\n def test_mark_done(self):\n user = User.objects.create_user(email='user@…', username='user',\n password='somepasswd')\n todo = Todo(title='SomeTitle', description='SomeDescr', owner=user)\n res = todo.mark_done(user)\n self.assertTrue(res)\n self.assertEqual(Todo.objects.count(), 1)\n\n def test_mark_done_already_done(self):\n user = User.objects.create_user(email='user@…', username='user',\n password='somepasswd')\n todo = Todo(title='SomeTitle', description='SomeDescr', is_done=\n True, done_by=user, owner=user)\n res = todo.mark_done(user)\n self.assertIsNone(res)\n self.assertEqual(Todo.objects.count(), 0)\n",
"step-4": "from django.test import TestCase\nfrom django.contrib.auth.models import User\nfrom ..models import Todo\n\n\nclass MyTestCase(TestCase):\n\n def test_mark_done(self):\n user = User.objects.create_user(email='user@…', username='user',\n password='somepasswd')\n todo = Todo(title='SomeTitle', description='SomeDescr', owner=user)\n res = todo.mark_done(user)\n self.assertTrue(res)\n self.assertEqual(Todo.objects.count(), 1)\n\n def test_mark_done_already_done(self):\n user = User.objects.create_user(email='user@…', username='user',\n password='somepasswd')\n todo = Todo(title='SomeTitle', description='SomeDescr', is_done=\n True, done_by=user, owner=user)\n res = todo.mark_done(user)\n self.assertIsNone(res)\n self.assertEqual(Todo.objects.count(), 0)\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom django.test import TestCase\nfrom django.contrib.auth.models import User\nfrom ..models import Todo\n\n\nclass MyTestCase(TestCase):\n\n def test_mark_done(self):\n user = User.objects.create_user(email='user@…', username='user', password='somepasswd')\n todo = Todo(title='SomeTitle', description='SomeDescr', owner=user)\n res = todo.mark_done(user)\n self.assertTrue(res)\n self.assertEqual(Todo.objects.count(), 1)\n\n def test_mark_done_already_done(self):\n user = User.objects.create_user(email='user@…', username='user', password='somepasswd')\n todo = Todo(title='SomeTitle', description='SomeDescr', is_done=True, done_by=user, owner=user)\n res = todo.mark_done(user)\n self.assertIsNone(res)\n # todo not saved because mark_done don't save already done todos\n self.assertEqual(Todo.objects.count(), 0)\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
from pymongo import MongoClient
from modules.linkedinSearch import SearchClass
from config import Config
class LinkedinSearch:
def __init__(self):
self.client = MongoClient(Config.MONGO_URI)
db = self.client.linkedin_db
self.collection = db.search
self.dict = {}
self.obj = SearchClass()
def db_check(self, query):
r = self.obj.search(query)
print(r)
t = 0
for i in r['results']:
if self.collection.find_one({'userid': i['userid']}):
pass
else:
# print(i)
t += 1
self.collection.insert_one(i)
self.client.close()
print('no. of stored pages', t)
# self.loop.close()
results = self.db_fetch(query)
#
# # return {'results': m}
return {'data': results}
# ---------------------fetching total number of query pages from database----------------------------------------
def db_fetch(self, query):
self.collection.create_index([("name", "text")])
lst = []
cursor = self.collection.find(
{"$text": {"$search": query}},
{'score': {'$meta': "textScore"}}).sort([('score', {'$meta': "textScore"})])
total = cursor.count()
n = 0
for i in cursor:
# print(i)
i.pop('_id')
lst.append(i)
n += 1
print('fetched pages from db', len(lst))
# return {'results': lst,
# 'total': n}
return lst
if __name__ == '__main__':
obj = LinkedinSearch()
print(obj.db_check("mark"))
|
normal
|
{
"blob_id": "3e8860c22ff3092304df57aa7f5dbcb6ccda7dd8",
"index": 5249,
"step-1": "<mask token>\n\n\nclass LinkedinSearch:\n <mask token>\n <mask token>\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n results = self.db_fetch(query)\n return {'data': results}\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n results = self.db_fetch(query)\n return {'data': results}\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\nif __name__ == '__main__':\n obj = LinkedinSearch()\n print(obj.db_check('mark'))\n",
"step-4": "from pymongo import MongoClient\nfrom modules.linkedinSearch import SearchClass\nfrom config import Config\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n results = self.db_fetch(query)\n return {'data': results}\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\nif __name__ == '__main__':\n obj = LinkedinSearch()\n print(obj.db_check('mark'))\n",
"step-5": "from pymongo import MongoClient\nfrom modules.linkedinSearch import SearchClass\nfrom config import Config\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n # print(i)\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n # self.loop.close()\n\n results = self.db_fetch(query)\n #\n # # return {'results': m}\n return {'data': results}\n\n # ---------------------fetching total number of query pages from database----------------------------------------\n def db_fetch(self, query):\n self.collection.create_index([(\"name\", \"text\")])\n\n lst = []\n cursor = self.collection.find(\n {\"$text\": {\"$search\": query}},\n {'score': {'$meta': \"textScore\"}}).sort([('score', {'$meta': \"textScore\"})])\n total = cursor.count()\n n = 0\n for i in cursor:\n # print(i)\n i.pop('_id')\n lst.append(i)\n n += 1\n\n print('fetched pages from db', len(lst))\n # return {'results': lst,\n # 'total': n}\n return lst\n\n\nif __name__ == '__main__':\n obj = LinkedinSearch()\n print(obj.db_check(\"mark\"))\n\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
import pickle
from absl import flags
from absl import app
from absl import logging
import time
import numpy as np
FLAGS = flags.FLAGS
flags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')
flags.DEFINE_integer('num_chunks', 36, 'how many files')
flags.DEFINE_string('out_dir', '2020-04-10/', 'out path')
logging.set_verbosity(logging.INFO)
def load_all_vectors(num_chunks):
all_vectors = []
meta_data = [] # (doc_id, section_id, sentence_id, sentence)
for chunk_id in range(num_chunks):
logging.info('Processing file %s', chunk_id)
t = time.time()
vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' % chunk_id).astype(np.float32)
with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id, 'rb') as fin:
meta = pickle.load(fin)
vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)
vector_norms[vector_norms == 0] = 1.0
vectors /= vector_norms
all_vectors.append(vectors)
meta_data.extend(meta)
e = time.time()
logging.info('Finished processing chunk %s in %s seconds', chunk_id, str(e-t))
all_vec = np.concatenate(all_vectors)
logging.info('Concatenated shape %s' % str(all_vec.shape))
return all_vec, meta_data
def main(argv):
logging.info('Running reduce vecs with args %s', str(argv))
logging.info('Running on %s files', str(FLAGS.num_chunks))
all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)
np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)
with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:
pickle.dump(all_meta, fout)
if __name__ == "__main__":
app.run(main)
|
normal
|
{
"blob_id": "8aa35bcaa4e564306125b37c70a8a92f26da736d",
"index": 7418,
"step-1": "<mask token>\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\n<mask token>\n",
"step-2": "<mask token>\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\nlogging.set_verbosity(logging.INFO)\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == '__main__':\n app.run(main)\n",
"step-3": "<mask token>\nFLAGS = flags.FLAGS\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\nlogging.set_verbosity(logging.INFO)\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == '__main__':\n app.run(main)\n",
"step-4": "import pickle\nfrom absl import flags\nfrom absl import app\nfrom absl import logging\nimport time\nimport numpy as np\nFLAGS = flags.FLAGS\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\nlogging.set_verbosity(logging.INFO)\n\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = []\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' %\n chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id,\n 'rb') as fin:\n meta = pickle.load(fin)\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n logging.info('Finished processing chunk %s in %s seconds', chunk_id,\n str(e - t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == '__main__':\n app.run(main)\n",
"step-5": "\nimport pickle\n\nfrom absl import flags\nfrom absl import app\nfrom absl import logging\nimport time\nimport numpy as np\n\nFLAGS = flags.FLAGS\nflags.DEFINE_string('sent2vec_dir', '2020-04-10/sent2vec/', 'out path')\nflags.DEFINE_integer('num_chunks', 36, 'how many files')\nflags.DEFINE_string('out_dir', '2020-04-10/', 'out path')\n\nlogging.set_verbosity(logging.INFO)\n\ndef load_all_vectors(num_chunks):\n all_vectors = []\n meta_data = [] # (doc_id, section_id, sentence_id, sentence)\n for chunk_id in range(num_chunks):\n logging.info('Processing file %s', chunk_id)\n t = time.time()\n vectors = np.load(FLAGS.sent2vec_dir + '/chunk_%s.vectors.npy' % chunk_id).astype(np.float32)\n with open(FLAGS.sent2vec_dir + '/chunk_%s.sentences.pkl' % chunk_id, 'rb') as fin:\n meta = pickle.load(fin)\n\n vector_norms = np.linalg.norm(vectors, axis=1, keepdims=True)\n vector_norms[vector_norms == 0] = 1.0\n vectors /= vector_norms\n all_vectors.append(vectors)\n meta_data.extend(meta)\n e = time.time()\n\n logging.info('Finished processing chunk %s in %s seconds', chunk_id, str(e-t))\n all_vec = np.concatenate(all_vectors)\n logging.info('Concatenated shape %s' % str(all_vec.shape))\n return all_vec, meta_data\n\n\ndef main(argv):\n logging.info('Running reduce vecs with args %s', str(argv))\n logging.info('Running on %s files', str(FLAGS.num_chunks))\n all_vecs, all_meta = load_all_vectors(FLAGS.num_chunks)\n np.save('%s/all.npy' % FLAGS.out_dir, all_vecs)\n with open('%s/all.pkl' % FLAGS.out_dir, 'wb') as fout:\n pickle.dump(all_meta, fout)\n\n\nif __name__ == \"__main__\":\n app.run(main)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
lista = [x for x in range(11)] ##todo: wazne
kwadraty = [i**2 for i in lista]
kwadraty = [(i, i**2, i**3) for i in range(-10, 11)]
zbior_wyr = {'aa', '1233', '111111'}
slownik = {i : len(i)for i in zbior_wyr}
print(kwadraty, slownik, sep='\n')
|
normal
|
{
"blob_id": "248b9b9d613f71e0130353f0792083b7d3f6ccd6",
"index": 7000,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(kwadraty, slownik, sep='\\n')\n",
"step-3": "lista = [x for x in range(11)]\nkwadraty = [(i ** 2) for i in lista]\nkwadraty = [(i, i ** 2, i ** 3) for i in range(-10, 11)]\nzbior_wyr = {'aa', '1233', '111111'}\nslownik = {i: len(i) for i in zbior_wyr}\nprint(kwadraty, slownik, sep='\\n')\n",
"step-4": "lista = [x for x in range(11)] ##todo: wazne\n\nkwadraty = [i**2 for i in lista]\nkwadraty = [(i, i**2, i**3) for i in range(-10, 11)]\nzbior_wyr = {'aa', '1233', '111111'}\nslownik = {i : len(i)for i in zbior_wyr}\n\nprint(kwadraty, slownik, sep='\\n')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import http.server
import socketserver
from http.server import BaseHTTPRequestHandler, HTTPServer
import time
import json
import io
import urllib
import requests
from lib.Emby_ws import xnoppo_ws
from lib.Emby_http import *
from lib.Xnoppo import *
from lib.Xnoppo_TV import *
import lib.Xnoppo_AVR
import shutil
import asyncio
import threading
import logging
import logging.handlers
import psutil
def get_version():
return("2.01")
def thread_function(ws_object):
print("Thread: starting")
ws_object.start()
print("Thread: finishing")
def restart():
print('restart')
try:
emby_wsocket.stop()
except:
sys.exit()
sys.exit()
print('fin restart')
def save_config(config_file, config):
with open(config_file, 'w') as fw:
json.dump(config, fw, indent=4)
fw.close
try:
emby_wsocket.ws_config=config
emby_wsocket.EmbySession.config=config
except:
emby_wsocket.ws_config=config
def get_state():
status={}
status["Version"]=get_version()
try:
status["Playstate"]=emby_wsocket.EmbySession.playstate
status["playedtitle"]=emby_wsocket.EmbySession.playedtitle
status["server"]=emby_wsocket.EmbySession.server
status["folder"]=emby_wsocket.EmbySession.folder
status["filename"]=emby_wsocket.EmbySession.filename
status["CurrentData"]=emby_wsocket.EmbySession.currentdata
# gives a single float value
except:
status["Playstate"]="Not_Connected"
status["playedtitle"]=""
status["server"]=""
status["folder"]=""
status["filename"]=""
status["CurrentData"]=""
status["cpu_perc"]=psutil.cpu_percent()
status["mem_perc"]=psutil.virtual_memory().percent
# you can have the percentage of used RAM
print(psutil.virtual_memory().percent)
print(status)
return(status)
def cargar_config(config_file,tv_path,av_path,lang_path):
with open(config_file, 'r') as f:
config = json.load(f)
#ver_configuracion(config)
f.close
## new options default config values
config["Version"]=get_version()
default = config.get("Autoscript", False)
config["Autoscript"]=default
default = config.get("enable_all_libraries", False)
config["enable_all_libraries"]=default
default = config.get("TV_model", "")
config["TV_model"]=default
default = config.get("TV_SOURCES", [])
config["TV_SOURCES"] = default
default = config.get("AV_model", "")
config["AV_model"]=default
default = config.get("AV_SOURCES", [])
config["AV_SOURCES"] = default
default = config.get("TV_script_init", "")
config["TV_script_init"]=default
default = config.get("TV_script_end", "")
config["TV_script_end"]=default
default = config.get("av_delay_hdmi", 0)
config["av_delay_hdmi"]=default
default = config.get("AV_Port", 23)
config["AV_Port"]=default
default = config.get("timeout_oppo_mount", 60)
config["timeout_oppo_mount"]=default
default = config.get("language","es-ES")
config["language"]=default
default = config.get("default_nfs",False)
config["default_nfs"]=default
default = config.get("wait_nfs",False)
config["wait_nfs"]=default
default = config.get("refresh_time",5)
config["refresh_time"]=default
default = config.get("check_beta",False)
config["check_beta"]=default
default = config.get("smbtrick",False)
config["smbtrick"]=default
default = config.get("BRDisc",False)
config["BRDisc"]=default
## testeado de rutas
edit_server=0
server_list = config["servers"]
for server in server_list:
default = server.get("Test_OK", False)
server_list[edit_server]["Test_OK"]=default
edit_server=edit_server+1
## Cambio de booleans de texto antiguos a boleans actuales.
if config["TV"]=='True':
config["TV"]=True;
if config["TV"]=='False':
config["TV"]=False;
if config["AV"]=='True':
config["AV"]=True;
if config["AV"]=='False':
config["AV"]=False;
config["servers"]=server_list
config["tv_dirs"]=get_dir_folders(tv_path);
config["av_dirs"]=get_dir_folders(av_path);
config["langs"]=get_dir_folders(lang_path);
return(config)
def check_version(config):
url = "https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js"
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
print(config["check_beta"])
if config["check_beta"]==True:
last_version=version["beta_version"]
last_version_file=version["beta_version_file"]
else:
last_version=version["curr_version"]
last_version_file=version["curr_version_file"]
xno_version=get_version()
resp = {}
resp["version"]=last_version
resp["file"]=last_version_file
print(xno_version)
print(last_version)
if xno_version<last_version:
resp["new_version"]=True
else:
resp["new_version"]=False
print(resp)
return(resp)
def update_version(config,vers_path,cwd):
url = "https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js"
headers = {}
response = requests.get(url, headers=headers)
version = json.loads(response.text)
print(version)
if config["check_beta"]==True:
last_version=version["beta_version"]
last_version_file=version["beta_version_file"]
else:
last_version=version["curr_version"]
last_version_file=version["curr_version_file"]
url2 = "https://github.com/siberian-git/Xnoppo/raw/main/versions/" + last_version_file
headers = {}
response2 = requests.get(url2, headers=headers)
filename=vers_path + last_version_file
with open(filename, 'wb') as f:
f.write(response2.content)
f.close()
shutil.unpack_archive(filename, cwd)
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
if config["TV"]==True and config["TV_model"]!="":
move_files(tv_path + config["TV_model"],lib_path)
if config["AV"]==True and config["AV_model"]!="":
move_files(av_path + config["AV_model"],lib_path)
resp = {}
resp["version"]=last_version
resp["file"]=last_version_file
resp["new_version"]=False
return(resp)
def cargar_lang(config_file):
with open(config_file.encode(sys.getfilesystemencoding()), 'r',encoding='latin-1') as f:
config = json.load(f)
#ver_configuracion(config)
f.close
## new options default config values
return(config)
def leer_file(web_file):
with open(web_file, 'r',encoding='utf8') as f:
num=f.read()
f.close
return(num)
def leer_img(web_file):
with open(web_file, 'rb') as f:
num=f.read()
f.close
return(num)
def test_path(config,server):
rutas = get_mount_path(server["Emby_Path"] + "/test.mkv",server)
result2 = test_mount_path(config,rutas["Servidor"],rutas["Carpeta"])
return(result2)
def get_mount_path(movie,server_data):
movie = movie.replace(server_data["Emby_Path"],server_data["Oppo_Path"])
movie = movie.replace('\\\\','\\')
movie = movie.replace('\\','/')
word = '/'
inicio = movie.find(word)
inicio = inicio +1
final = movie.find(word,inicio,len(movie))
servidor = movie[inicio:final]
ultimo=final+1
result=final+1
while result > 0:
ultimo=result+1
result=movie.find(word,ultimo,len(movie))
fichero=movie[ultimo:len(movie)]
final=final+1
ultimo=ultimo-1
carpeta=movie[final:ultimo]
resultado={}
resultado["Servidor"]=servidor
resultado["Carpeta"]=carpeta
resultado["Fichero"]=fichero
return(resultado)
def test_mount_path(config,servidor,carpeta):
sendnotifyremote(config["Oppo_IP"])
#print("Conectando con el OPPO")
result=check_socket(config)
if result==0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey("EJT",config)
time.sleep(1)
#print("Solicitando montar ruta al OPPO")
response_data6b = getsetupmenu(config)
while response_data6f.find('devicelist":[]') > 0:
time.sleep(1)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey("QPW",config)
device_list=json.loads(response_data6f)
if config["DebugLevel"]>0: print(device_list)
nfs=config["default_nfs"]
for device in device_list["devicelist"]:
if device["name"].upper()==servidor.upper():
if device["sub_type"]=="nfs":
nfs=True
break
else:
nfs=False
break
if nfs:
response_login = LoginNFS(config,servidor)
else:
response_login = LoginSambaWithOutID(config,servidor)
if config["Always_ON"]==False:
time.sleep(5)
response_data6b = getsetupmenu(config)
if nfs:
response_mount = mountSharedNFSFolder(servidor,carpeta,'','',config)
else:
response_mount = mountSharedFolder(servidor,carpeta,'','',config)
response=json.loads(response_mount)
#print(response)
if config["Autoscript"]==True:
result=umountSharedFolder(config)
if response["success"]==True:
a = "OK"
else:
a = "FAILURE"
return(a)
else:
print("No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo")
def test_emby(config):
try:
EmbySession=EmbyHttp(config)
user_info = EmbySession.user_info
if user_info["SessionInfo"]["Id"]!="":
return("OK")
else:
return("FAILED")
except:
return("FAILED")
def test_oppo(config):
result=check_socket(config)
if result==0:
return("OK")
else:
return("FAILED")
def carga_libraries(config):
try:
EmbySession=EmbyHttp(config)
views_list=EmbySession.get_user_views(EmbySession.user_info["User"]["Id"])
libraries = []
for view in views_list:
library= {}
library["Name"]=view["Name"]
library["Id"]=view["Id"]
library["Active"]=False
try:
lib_list=config["Libraries"]
except:
lib_list={}
for lib in lib_list:
if lib["Id"]==view["Id"]:
library["Active"]=lib["Active"]
libraries.append(library)
config["Libraries"]=libraries
return(0)
except:
return(1)
def is_library_active(config,libraryname):
for library in config["Libraries"]:
if library["Name"]==libraryname:
return(library["Active"])
return(False)
def get_selectableFolders(config):
EmbySession=EmbyHttp(config)
MediaFolders = EmbySession.get_emby_selectablefolders()
servers=[]
for Folder in MediaFolders:
index=1
active=is_library_active(config,Folder["Name"])
if config["enable_all_libraries"]==True:
active=True;
if active==True:
for SubFolder in Folder["SubFolders"]:
server={}
server["Id"]=SubFolder["Id"]
if index>1:
server["name"]=Folder["Name"]+"("+str(index)+")"
else:
server["name"]=Folder["Name"]
server["Emby_Path"]=SubFolder["Path"]
server["Oppo_Path"]="/"
try:
serv_list=config["servers"]
except:
serv_list={}
for serv in serv_list:
if server["Emby_Path"]==serv["Emby_Path"]:
server["name"]=serv["name"];
server["Oppo_Path"]=serv["Oppo_Path"];
server["Test_OK"]=serv["Test_OK"];
servers.append(server)
index=index+1
config["servers"]=servers
def get_dir_folders(directory):
os.chdir(directory)
dirs = os.listdir(".")
encontrado=False
list_dir=[]
#a =""
#list_dir.append(a)
for x in dirs:
if os.path.isdir(x):
list_dir.append(x)
return(list_dir)
def move_files(src, dest):
os.chdir(src)
src_files = os.listdir('.')
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if os.path.isfile(full_file_name):
shutil.copy(full_file_name, dest)
return(0)
def get_devices(config):
try:
EmbySession=EmbyHttp(config)
devices = EmbySession.get_emby_devices()
index=0
dev_temp = []
for device in devices["Items"]:
try:
if device["Id"]!='Xnoppo':
device["Name"]=device["Name"] + " / " + device["AppName"]
device["Id"]=device["ReportedDeviceId"]
dev_temp.append(device)
except:
pass
config["devices"]=dev_temp
return('OK')
except:
return('FAILURE')
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
resource_path=cwd + separador + 'web' + separador + 'resources' + separador
html_path = cwd + separador + 'web' + separador
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/emby_conf.html':
i = leer_file(html_path + 'emby_conf.html')
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/oppo_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'oppo_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/lib_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'lib_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/path_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'path_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/tv_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'tv_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/av_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'av_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/other_conf.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'other_conf.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/status.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'status.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/help.html':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_file(html_path + 'help.html')
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/remote.html':
i = leer_file(html_path + 'remote.html')
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes(i,"utf-8"))
return(0)
if self.path == '/android-chrome-36x36.png':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_img(resource_path + 'android-chrome-36x36.png')
self.wfile.write(bytes(i))
return(0)
if self.path == '/av-receiver-icon-2.jpg':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_img(resource_path + 'av-receiver-icon-2.jpg')
self.wfile.write(bytes(i))
return(0)
if self.path == '/dragon.png':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
i = leer_img(resource_path + 'dragon.png')
self.wfile.write(bytes(i))
return(0)
if self.path == '/xnoppo_config':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/xnoppo_config_lib':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
carga_libraries(a)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/xnoppo_config_dev':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
get_devices(a)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/check_version':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = check_version(config)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/update_version':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = update_version(config,vers_path,cwd)
restart()
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/get_state':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = get_state()
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/restart':
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
a = "Restarting"
self.wfile.write(bytes(a,"utf-8"))
restart()
if self.path == '/refresh_paths':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
get_selectableFolders(a)
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/lang':
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = cargar_lang(lang_path + config["language"] + separador +'lang.js')
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path.find("/send_key?")>=0:
get_data = self.path
print(get_data)
a = len('/send_key?sendkey=')
b=get_data[a:len(get_data)]
print(b)
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
sendnotifyremote(config["Oppo_IP"])
result=check_socket(config)
if b=='PON':
if result==0:
response_data6a = getmainfirmwareversion(config)
response_data6c = getdevicelist(config)
response_data6b = getsetupmenu(config)
response_data6c = OppoSignin(config)
response_data6d = getdevicelist(config)
response_data6e = getglobalinfo(config)
response_data6f = getdevicelist(config)
response_data_on = sendremotekey("EJT",config)
if config["BRDisc"]==True:
time.sleep(1)
response_data_on = sendremotekey("EJT",config)
time.sleep(1)
response_data6b = getsetupmenu(config)
else:
response_data_on = sendremotekey(b,config)
self.send_response(200)
self.send_header("Content-type", "text")
self.end_headers()
a = "ok"
self.wfile.write(bytes(a,"utf-8"))
return(0)
if self.path == '/log.txt':
self.send_response(200)
self.send_header("Content-type", "text")
self.end_headers()
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')
self.wfile.write(bytes(a))
return(0)
else:
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes("<html><head><title>https://pythonbasics.org</title></head>", "utf-8"))
self.wfile.write(bytes("<p>Request: %s</p>" % self.path, "utf-8"))
self.wfile.write(bytes("<body>", "utf-8"))
self.wfile.write(bytes("<p>This is an example web server.</p>", "utf-8"))
self.wfile.write(bytes("</body></html>", "utf-8"))
def do_POST(self):
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
resource_path=cwd + separador + 'web' + separador + 'resources' + separador
html_path = cwd + separador + 'web' + separador
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
print(self.path)
if self.path == '/save_config':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
if self.path == '/check_emby':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = test_emby(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
status = get_state()
if status["Playstate"]=="Not_Connected":
save_config(cwd + separador + 'config.json',config)
emby_wsocket.ws_config=config
restart()
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/check_oppo':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = test_oppo(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/test_path':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
server = json.loads(post_data.decode('utf-8'))
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = test_path(config,server)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(server))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(server),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/navigate_path':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
path_obj = json.loads(post_data.decode('utf-8'))
path = path_obj["path"]
config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)
a = navigate_folder(path,config)
a_json=json.dumps(a)
print(len(a_json))
self.send_response(200)
self.send_header("Content-Length", len(a_json))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(a),"utf-8"))
return(0)
if self.path == '/move_tv':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json',config)
move_files(tv_path + config["TV_model"],lib_path)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
restart()
return(0)
if self.path == '/move_av':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
save_config(cwd + separador + 'config.json',config)
move_files(av_path + config["AV_model"],lib_path)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
restart()
return(0)
if self.path == '/get_tv_key':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = get_tv_key(config)
if a == 'OK':
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/tv_test_conn':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = tv_test_conn(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/get_tv_sources':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = get_tv_sources(config)
if a == 'OK':
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/get_av_sources':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = get_hdmi_list(config)
if a != None:
config["AV_SOURCES"]=a
save_config(cwd + separador + 'config.json',config)
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/tv_test_init':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = tv_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/tv_test_end':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = tv_set_prev(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/av_test_on':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = av_check_power(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/av_test_off':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = av_power_off(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if self.path == '/av_test_hdmi':
content_length = int(self.headers['Content-Length']) # <--- Gets the size of data
post_data = self.rfile.read(content_length) # <--- Gets the data itself
config = json.loads(post_data.decode('utf-8'))
a = av_change_hdmi(config)
if a == 'OK':
self.send_response(200)
self.send_header("Content-Length", len(config))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes(json.dumps(config),"utf-8"))
else:
self.send_response(300)
self.send_header("Content-Length", len("ERROR"))
self.send_header("Content-Type", "text/html")
self.send_header('Access-Control-Allow-Credentials', 'true')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(bytes("ERROR","utf-8"))
return(0)
if __name__ == "__main__":
cwd = os.path.dirname(os.path.abspath(__file__))
if sys.platform.startswith('win'):
separador="\\"
else:
separador="/"
config_file = cwd + separador + "config.json"
resource_path=cwd + separador + 'web' + separador + 'resources' + separador
html_path = cwd + separador + 'web' + separador
tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador
av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador
lib_path = cwd + separador + 'lib' + separador
lang_path = cwd + separador + 'web' + separador + 'lang' + separador
vers_path = cwd + separador + 'versions' + separador
config = cargar_config(config_file,tv_path,av_path,lang_path)
logfile=cwd + separador + "emby_xnoppo_client_logging.log"
lang = cargar_lang(lang_path + config["language"] + separador +'lang.js')
if config["DebugLevel"]==0:
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.CRITICAL)
elif config["DebugLevel"]==1:
rfh = logging.handlers.RotatingFileHandler(
filename=logfile,
mode='a',
maxBytes=50*1024*1024,
backupCount=2,
encoding=None,
delay=0
)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.INFO,handlers=[rfh])
elif config["DebugLevel"]==2:
rfh = logging.handlers.RotatingFileHandler(
filename=logfile,
mode='a',
maxBytes=5*1024*1024,
backupCount=2,
encoding=None,
delay=0
)
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.DEBUG,handlers=[rfh])
emby_wsocket = xnoppo_ws()
emby_wsocket.ws_config=config
emby_wsocket.config_file=config_file
emby_wsocket.ws_lang=lang
x = threading.Thread(target=thread_function, args=(emby_wsocket,))
x.start()
espera=0
estado_anterior=''
logging.debug('Arrancamos el Servidor Web\n')
serverPort = 8090
webServer = HTTPServer(("", serverPort), MyServer)
print("Server started http://%s:%s" % ("", serverPort))
try:
webServer.serve_forever()
except KeyboardInterrupt:
pass
webServer.server_close()
logging.info('Fin proceso')
logging.info('Finished')
print("Server stopped.")
|
normal
|
{
"blob_id": "2ff85ac059f160fcc6b39b4298e8216cbad77ab3",
"index": 504,
"step-1": "<mask token>\n\n\ndef get_version():\n return '2.01'\n\n\n<mask token>\n\n\ndef restart():\n print('restart')\n try:\n emby_wsocket.stop()\n except:\n sys.exit()\n sys.exit()\n print('fin restart')\n\n\ndef save_config(config_file, config):\n with open(config_file, 'w') as fw:\n json.dump(config, fw, indent=4)\n fw.close\n try:\n emby_wsocket.ws_config = config\n emby_wsocket.EmbySession.config = config\n except:\n emby_wsocket.ws_config = config\n\n\n<mask token>\n\n\ndef cargar_config(config_file, tv_path, av_path, lang_path):\n with open(config_file, 'r') as f:\n config = json.load(f)\n f.close\n config['Version'] = get_version()\n default = config.get('Autoscript', False)\n config['Autoscript'] = default\n default = config.get('enable_all_libraries', False)\n config['enable_all_libraries'] = default\n default = config.get('TV_model', '')\n config['TV_model'] = default\n default = config.get('TV_SOURCES', [])\n config['TV_SOURCES'] = default\n default = config.get('AV_model', '')\n config['AV_model'] = default\n default = config.get('AV_SOURCES', [])\n config['AV_SOURCES'] = default\n default = config.get('TV_script_init', '')\n config['TV_script_init'] = default\n default = config.get('TV_script_end', '')\n config['TV_script_end'] = default\n default = config.get('av_delay_hdmi', 0)\n config['av_delay_hdmi'] = default\n default = config.get('AV_Port', 23)\n config['AV_Port'] = default\n default = config.get('timeout_oppo_mount', 60)\n config['timeout_oppo_mount'] = default\n default = config.get('language', 'es-ES')\n config['language'] = default\n default = config.get('default_nfs', False)\n config['default_nfs'] = default\n default = config.get('wait_nfs', False)\n config['wait_nfs'] = default\n default = config.get('refresh_time', 5)\n config['refresh_time'] = default\n default = config.get('check_beta', False)\n config['check_beta'] = default\n default = config.get('smbtrick', False)\n config['smbtrick'] = default\n default = config.get('BRDisc', False)\n config['BRDisc'] = default\n edit_server = 0\n server_list = config['servers']\n for server in server_list:\n default = server.get('Test_OK', False)\n server_list[edit_server]['Test_OK'] = default\n edit_server = edit_server + 1\n if config['TV'] == 'True':\n config['TV'] = True\n if config['TV'] == 'False':\n config['TV'] = False\n if config['AV'] == 'True':\n config['AV'] = True\n if config['AV'] == 'False':\n config['AV'] = False\n config['servers'] = server_list\n config['tv_dirs'] = get_dir_folders(tv_path)\n config['av_dirs'] = get_dir_folders(av_path)\n config['langs'] = get_dir_folders(lang_path)\n return config\n\n\ndef check_version(config):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n print(config['check_beta'])\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n xno_version = get_version()\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n print(xno_version)\n print(last_version)\n if xno_version < last_version:\n resp['new_version'] = True\n else:\n resp['new_version'] = False\n print(resp)\n return resp\n\n\ndef update_version(config, vers_path, cwd):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +\n last_version_file)\n headers = {}\n response2 = requests.get(url2, headers=headers)\n filename = vers_path + last_version_file\n with open(filename, 'wb') as f:\n f.write(response2.content)\n f.close()\n shutil.unpack_archive(filename, cwd)\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n if config['TV'] == True and config['TV_model'] != '':\n move_files(tv_path + config['TV_model'], lib_path)\n if config['AV'] == True and config['AV_model'] != '':\n move_files(av_path + config['AV_model'], lib_path)\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n resp['new_version'] = False\n return resp\n\n\ndef cargar_lang(config_file):\n with open(config_file.encode(sys.getfilesystemencoding()), 'r',\n encoding='latin-1') as f:\n config = json.load(f)\n f.close\n return config\n\n\ndef leer_file(web_file):\n with open(web_file, 'r', encoding='utf8') as f:\n num = f.read()\n f.close\n return num\n\n\ndef leer_img(web_file):\n with open(web_file, 'rb') as f:\n num = f.read()\n f.close\n return num\n\n\ndef test_path(config, server):\n rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)\n result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])\n return result2\n\n\ndef get_mount_path(movie, server_data):\n movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])\n movie = movie.replace('\\\\\\\\', '\\\\')\n movie = movie.replace('\\\\', '/')\n word = '/'\n inicio = movie.find(word)\n inicio = inicio + 1\n final = movie.find(word, inicio, len(movie))\n servidor = movie[inicio:final]\n ultimo = final + 1\n result = final + 1\n while result > 0:\n ultimo = result + 1\n result = movie.find(word, ultimo, len(movie))\n fichero = movie[ultimo:len(movie)]\n final = final + 1\n ultimo = ultimo - 1\n carpeta = movie[final:ultimo]\n resultado = {}\n resultado['Servidor'] = servidor\n resultado['Carpeta'] = carpeta\n resultado['Fichero'] = fichero\n return resultado\n\n\ndef test_mount_path(config, servidor, carpeta):\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n while response_data6f.find('devicelist\":[]') > 0:\n time.sleep(1)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('QPW', config)\n device_list = json.loads(response_data6f)\n if config['DebugLevel'] > 0:\n print(device_list)\n nfs = config['default_nfs']\n for device in device_list['devicelist']:\n if device['name'].upper() == servidor.upper():\n if device['sub_type'] == 'nfs':\n nfs = True\n break\n else:\n nfs = False\n break\n if nfs:\n response_login = LoginNFS(config, servidor)\n else:\n response_login = LoginSambaWithOutID(config, servidor)\n if config['Always_ON'] == False:\n time.sleep(5)\n response_data6b = getsetupmenu(config)\n if nfs:\n response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',\n config)\n else:\n response_mount = mountSharedFolder(servidor, carpeta, '', '',\n config)\n response = json.loads(response_mount)\n if config['Autoscript'] == True:\n result = umountSharedFolder(config)\n if response['success'] == True:\n a = 'OK'\n else:\n a = 'FAILURE'\n return a\n else:\n print(\n 'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'\n )\n\n\ndef test_emby(config):\n try:\n EmbySession = EmbyHttp(config)\n user_info = EmbySession.user_info\n if user_info['SessionInfo']['Id'] != '':\n return 'OK'\n else:\n return 'FAILED'\n except:\n return 'FAILED'\n\n\n<mask token>\n\n\ndef carga_libraries(config):\n try:\n EmbySession = EmbyHttp(config)\n views_list = EmbySession.get_user_views(EmbySession.user_info[\n 'User']['Id'])\n libraries = []\n for view in views_list:\n library = {}\n library['Name'] = view['Name']\n library['Id'] = view['Id']\n library['Active'] = False\n try:\n lib_list = config['Libraries']\n except:\n lib_list = {}\n for lib in lib_list:\n if lib['Id'] == view['Id']:\n library['Active'] = lib['Active']\n libraries.append(library)\n config['Libraries'] = libraries\n return 0\n except:\n return 1\n\n\ndef is_library_active(config, libraryname):\n for library in config['Libraries']:\n if library['Name'] == libraryname:\n return library['Active']\n return False\n\n\ndef get_selectableFolders(config):\n EmbySession = EmbyHttp(config)\n MediaFolders = EmbySession.get_emby_selectablefolders()\n servers = []\n for Folder in MediaFolders:\n index = 1\n active = is_library_active(config, Folder['Name'])\n if config['enable_all_libraries'] == True:\n active = True\n if active == True:\n for SubFolder in Folder['SubFolders']:\n server = {}\n server['Id'] = SubFolder['Id']\n if index > 1:\n server['name'] = Folder['Name'] + '(' + str(index) + ')'\n else:\n server['name'] = Folder['Name']\n server['Emby_Path'] = SubFolder['Path']\n server['Oppo_Path'] = '/'\n try:\n serv_list = config['servers']\n except:\n serv_list = {}\n for serv in serv_list:\n if server['Emby_Path'] == serv['Emby_Path']:\n server['name'] = serv['name']\n server['Oppo_Path'] = serv['Oppo_Path']\n server['Test_OK'] = serv['Test_OK']\n servers.append(server)\n index = index + 1\n config['servers'] = servers\n\n\ndef get_dir_folders(directory):\n os.chdir(directory)\n dirs = os.listdir('.')\n encontrado = False\n list_dir = []\n for x in dirs:\n if os.path.isdir(x):\n list_dir.append(x)\n return list_dir\n\n\n<mask token>\n\n\ndef get_devices(config):\n try:\n EmbySession = EmbyHttp(config)\n devices = EmbySession.get_emby_devices()\n index = 0\n dev_temp = []\n for device in devices['Items']:\n try:\n if device['Id'] != 'Xnoppo':\n device['Name'] = device['Name'] + ' / ' + device['AppName']\n device['Id'] = device['ReportedDeviceId']\n dev_temp.append(device)\n except:\n pass\n config['devices'] = dev_temp\n return 'OK'\n except:\n return 'FAILURE'\n\n\nclass MyServer(BaseHTTPRequestHandler):\n\n def do_GET(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/emby_conf.html':\n i = leer_file(html_path + 'emby_conf.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/oppo_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'oppo_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/lib_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'lib_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/path_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'path_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/tv_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'tv_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/av_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'av_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/other_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'other_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/status.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'status.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/help.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'help.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/remote.html':\n i = leer_file(html_path + 'remote.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/android-chrome-36x36.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'android-chrome-36x36.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/av-receiver-icon-2.jpg':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'av-receiver-icon-2.jpg')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/dragon.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'dragon.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/xnoppo_config':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_lib':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n carga_libraries(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_dev':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_devices(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/check_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = check_version(config)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/update_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = update_version(config, vers_path, cwd)\n restart()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/get_state':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = get_state()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/restart':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n a = 'Restarting'\n self.wfile.write(bytes(a, 'utf-8'))\n restart()\n if self.path == '/refresh_paths':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_selectableFolders(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/lang':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = cargar_lang(lang_path + config['language'] + separador +\n 'lang.js')\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path.find('/send_key?') >= 0:\n get_data = self.path\n print(get_data)\n a = len('/send_key?sendkey=')\n b = get_data[a:len(get_data)]\n print(b)\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if b == 'PON':\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n if config['BRDisc'] == True:\n time.sleep(1)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n else:\n response_data_on = sendremotekey(b, config)\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n a = 'ok'\n self.wfile.write(bytes(a, 'utf-8'))\n return 0\n if self.path == '/log.txt':\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')\n self.wfile.write(bytes(a))\n return 0\n else:\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(\n '<html><head><title>https://pythonbasics.org</title></head>',\n 'utf-8'))\n self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))\n self.wfile.write(bytes('<body>', 'utf-8'))\n self.wfile.write(bytes('<p>This is an example web server.</p>',\n 'utf-8'))\n self.wfile.write(bytes('</body></html>', 'utf-8'))\n\n def do_POST(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lib_path = cwd + separador + 'lib' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/save_config':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n if self.path == '/check_emby':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_emby(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n status = get_state()\n if status['Playstate'] == 'Not_Connected':\n save_config(cwd + separador + 'config.json', config)\n emby_wsocket.ws_config = config\n restart()\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/check_oppo':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_oppo(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/test_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n server = json.loads(post_data.decode('utf-8'))\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = test_path(config, server)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(server))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(server), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/navigate_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n path_obj = json.loads(post_data.decode('utf-8'))\n path = path_obj['path']\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = navigate_folder(path, config)\n a_json = json.dumps(a)\n print(len(a_json))\n self.send_response(200)\n self.send_header('Content-Length', len(a_json))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/move_tv':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(tv_path + config['TV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/move_av':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(av_path + config['AV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/get_tv_key':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_key(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_conn':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_test_conn(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_tv_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_sources(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_av_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_hdmi_list(config)\n if a != None:\n config['AV_SOURCES'] = a\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_init':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_end':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_set_prev(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_on':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_check_power(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_off':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_power_off(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_hdmi':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_version():\n return '2.01'\n\n\ndef thread_function(ws_object):\n print('Thread: starting')\n ws_object.start()\n print('Thread: finishing')\n\n\ndef restart():\n print('restart')\n try:\n emby_wsocket.stop()\n except:\n sys.exit()\n sys.exit()\n print('fin restart')\n\n\ndef save_config(config_file, config):\n with open(config_file, 'w') as fw:\n json.dump(config, fw, indent=4)\n fw.close\n try:\n emby_wsocket.ws_config = config\n emby_wsocket.EmbySession.config = config\n except:\n emby_wsocket.ws_config = config\n\n\ndef get_state():\n status = {}\n status['Version'] = get_version()\n try:\n status['Playstate'] = emby_wsocket.EmbySession.playstate\n status['playedtitle'] = emby_wsocket.EmbySession.playedtitle\n status['server'] = emby_wsocket.EmbySession.server\n status['folder'] = emby_wsocket.EmbySession.folder\n status['filename'] = emby_wsocket.EmbySession.filename\n status['CurrentData'] = emby_wsocket.EmbySession.currentdata\n except:\n status['Playstate'] = 'Not_Connected'\n status['playedtitle'] = ''\n status['server'] = ''\n status['folder'] = ''\n status['filename'] = ''\n status['CurrentData'] = ''\n status['cpu_perc'] = psutil.cpu_percent()\n status['mem_perc'] = psutil.virtual_memory().percent\n print(psutil.virtual_memory().percent)\n print(status)\n return status\n\n\ndef cargar_config(config_file, tv_path, av_path, lang_path):\n with open(config_file, 'r') as f:\n config = json.load(f)\n f.close\n config['Version'] = get_version()\n default = config.get('Autoscript', False)\n config['Autoscript'] = default\n default = config.get('enable_all_libraries', False)\n config['enable_all_libraries'] = default\n default = config.get('TV_model', '')\n config['TV_model'] = default\n default = config.get('TV_SOURCES', [])\n config['TV_SOURCES'] = default\n default = config.get('AV_model', '')\n config['AV_model'] = default\n default = config.get('AV_SOURCES', [])\n config['AV_SOURCES'] = default\n default = config.get('TV_script_init', '')\n config['TV_script_init'] = default\n default = config.get('TV_script_end', '')\n config['TV_script_end'] = default\n default = config.get('av_delay_hdmi', 0)\n config['av_delay_hdmi'] = default\n default = config.get('AV_Port', 23)\n config['AV_Port'] = default\n default = config.get('timeout_oppo_mount', 60)\n config['timeout_oppo_mount'] = default\n default = config.get('language', 'es-ES')\n config['language'] = default\n default = config.get('default_nfs', False)\n config['default_nfs'] = default\n default = config.get('wait_nfs', False)\n config['wait_nfs'] = default\n default = config.get('refresh_time', 5)\n config['refresh_time'] = default\n default = config.get('check_beta', False)\n config['check_beta'] = default\n default = config.get('smbtrick', False)\n config['smbtrick'] = default\n default = config.get('BRDisc', False)\n config['BRDisc'] = default\n edit_server = 0\n server_list = config['servers']\n for server in server_list:\n default = server.get('Test_OK', False)\n server_list[edit_server]['Test_OK'] = default\n edit_server = edit_server + 1\n if config['TV'] == 'True':\n config['TV'] = True\n if config['TV'] == 'False':\n config['TV'] = False\n if config['AV'] == 'True':\n config['AV'] = True\n if config['AV'] == 'False':\n config['AV'] = False\n config['servers'] = server_list\n config['tv_dirs'] = get_dir_folders(tv_path)\n config['av_dirs'] = get_dir_folders(av_path)\n config['langs'] = get_dir_folders(lang_path)\n return config\n\n\ndef check_version(config):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n print(config['check_beta'])\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n xno_version = get_version()\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n print(xno_version)\n print(last_version)\n if xno_version < last_version:\n resp['new_version'] = True\n else:\n resp['new_version'] = False\n print(resp)\n return resp\n\n\ndef update_version(config, vers_path, cwd):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +\n last_version_file)\n headers = {}\n response2 = requests.get(url2, headers=headers)\n filename = vers_path + last_version_file\n with open(filename, 'wb') as f:\n f.write(response2.content)\n f.close()\n shutil.unpack_archive(filename, cwd)\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n if config['TV'] == True and config['TV_model'] != '':\n move_files(tv_path + config['TV_model'], lib_path)\n if config['AV'] == True and config['AV_model'] != '':\n move_files(av_path + config['AV_model'], lib_path)\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n resp['new_version'] = False\n return resp\n\n\ndef cargar_lang(config_file):\n with open(config_file.encode(sys.getfilesystemencoding()), 'r',\n encoding='latin-1') as f:\n config = json.load(f)\n f.close\n return config\n\n\ndef leer_file(web_file):\n with open(web_file, 'r', encoding='utf8') as f:\n num = f.read()\n f.close\n return num\n\n\ndef leer_img(web_file):\n with open(web_file, 'rb') as f:\n num = f.read()\n f.close\n return num\n\n\ndef test_path(config, server):\n rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)\n result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])\n return result2\n\n\ndef get_mount_path(movie, server_data):\n movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])\n movie = movie.replace('\\\\\\\\', '\\\\')\n movie = movie.replace('\\\\', '/')\n word = '/'\n inicio = movie.find(word)\n inicio = inicio + 1\n final = movie.find(word, inicio, len(movie))\n servidor = movie[inicio:final]\n ultimo = final + 1\n result = final + 1\n while result > 0:\n ultimo = result + 1\n result = movie.find(word, ultimo, len(movie))\n fichero = movie[ultimo:len(movie)]\n final = final + 1\n ultimo = ultimo - 1\n carpeta = movie[final:ultimo]\n resultado = {}\n resultado['Servidor'] = servidor\n resultado['Carpeta'] = carpeta\n resultado['Fichero'] = fichero\n return resultado\n\n\ndef test_mount_path(config, servidor, carpeta):\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n while response_data6f.find('devicelist\":[]') > 0:\n time.sleep(1)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('QPW', config)\n device_list = json.loads(response_data6f)\n if config['DebugLevel'] > 0:\n print(device_list)\n nfs = config['default_nfs']\n for device in device_list['devicelist']:\n if device['name'].upper() == servidor.upper():\n if device['sub_type'] == 'nfs':\n nfs = True\n break\n else:\n nfs = False\n break\n if nfs:\n response_login = LoginNFS(config, servidor)\n else:\n response_login = LoginSambaWithOutID(config, servidor)\n if config['Always_ON'] == False:\n time.sleep(5)\n response_data6b = getsetupmenu(config)\n if nfs:\n response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',\n config)\n else:\n response_mount = mountSharedFolder(servidor, carpeta, '', '',\n config)\n response = json.loads(response_mount)\n if config['Autoscript'] == True:\n result = umountSharedFolder(config)\n if response['success'] == True:\n a = 'OK'\n else:\n a = 'FAILURE'\n return a\n else:\n print(\n 'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'\n )\n\n\ndef test_emby(config):\n try:\n EmbySession = EmbyHttp(config)\n user_info = EmbySession.user_info\n if user_info['SessionInfo']['Id'] != '':\n return 'OK'\n else:\n return 'FAILED'\n except:\n return 'FAILED'\n\n\ndef test_oppo(config):\n result = check_socket(config)\n if result == 0:\n return 'OK'\n else:\n return 'FAILED'\n\n\ndef carga_libraries(config):\n try:\n EmbySession = EmbyHttp(config)\n views_list = EmbySession.get_user_views(EmbySession.user_info[\n 'User']['Id'])\n libraries = []\n for view in views_list:\n library = {}\n library['Name'] = view['Name']\n library['Id'] = view['Id']\n library['Active'] = False\n try:\n lib_list = config['Libraries']\n except:\n lib_list = {}\n for lib in lib_list:\n if lib['Id'] == view['Id']:\n library['Active'] = lib['Active']\n libraries.append(library)\n config['Libraries'] = libraries\n return 0\n except:\n return 1\n\n\ndef is_library_active(config, libraryname):\n for library in config['Libraries']:\n if library['Name'] == libraryname:\n return library['Active']\n return False\n\n\ndef get_selectableFolders(config):\n EmbySession = EmbyHttp(config)\n MediaFolders = EmbySession.get_emby_selectablefolders()\n servers = []\n for Folder in MediaFolders:\n index = 1\n active = is_library_active(config, Folder['Name'])\n if config['enable_all_libraries'] == True:\n active = True\n if active == True:\n for SubFolder in Folder['SubFolders']:\n server = {}\n server['Id'] = SubFolder['Id']\n if index > 1:\n server['name'] = Folder['Name'] + '(' + str(index) + ')'\n else:\n server['name'] = Folder['Name']\n server['Emby_Path'] = SubFolder['Path']\n server['Oppo_Path'] = '/'\n try:\n serv_list = config['servers']\n except:\n serv_list = {}\n for serv in serv_list:\n if server['Emby_Path'] == serv['Emby_Path']:\n server['name'] = serv['name']\n server['Oppo_Path'] = serv['Oppo_Path']\n server['Test_OK'] = serv['Test_OK']\n servers.append(server)\n index = index + 1\n config['servers'] = servers\n\n\ndef get_dir_folders(directory):\n os.chdir(directory)\n dirs = os.listdir('.')\n encontrado = False\n list_dir = []\n for x in dirs:\n if os.path.isdir(x):\n list_dir.append(x)\n return list_dir\n\n\n<mask token>\n\n\ndef get_devices(config):\n try:\n EmbySession = EmbyHttp(config)\n devices = EmbySession.get_emby_devices()\n index = 0\n dev_temp = []\n for device in devices['Items']:\n try:\n if device['Id'] != 'Xnoppo':\n device['Name'] = device['Name'] + ' / ' + device['AppName']\n device['Id'] = device['ReportedDeviceId']\n dev_temp.append(device)\n except:\n pass\n config['devices'] = dev_temp\n return 'OK'\n except:\n return 'FAILURE'\n\n\nclass MyServer(BaseHTTPRequestHandler):\n\n def do_GET(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/emby_conf.html':\n i = leer_file(html_path + 'emby_conf.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/oppo_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'oppo_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/lib_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'lib_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/path_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'path_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/tv_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'tv_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/av_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'av_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/other_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'other_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/status.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'status.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/help.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'help.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/remote.html':\n i = leer_file(html_path + 'remote.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/android-chrome-36x36.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'android-chrome-36x36.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/av-receiver-icon-2.jpg':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'av-receiver-icon-2.jpg')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/dragon.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'dragon.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/xnoppo_config':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_lib':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n carga_libraries(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_dev':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_devices(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/check_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = check_version(config)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/update_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = update_version(config, vers_path, cwd)\n restart()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/get_state':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = get_state()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/restart':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n a = 'Restarting'\n self.wfile.write(bytes(a, 'utf-8'))\n restart()\n if self.path == '/refresh_paths':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_selectableFolders(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/lang':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = cargar_lang(lang_path + config['language'] + separador +\n 'lang.js')\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path.find('/send_key?') >= 0:\n get_data = self.path\n print(get_data)\n a = len('/send_key?sendkey=')\n b = get_data[a:len(get_data)]\n print(b)\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if b == 'PON':\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n if config['BRDisc'] == True:\n time.sleep(1)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n else:\n response_data_on = sendremotekey(b, config)\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n a = 'ok'\n self.wfile.write(bytes(a, 'utf-8'))\n return 0\n if self.path == '/log.txt':\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')\n self.wfile.write(bytes(a))\n return 0\n else:\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(\n '<html><head><title>https://pythonbasics.org</title></head>',\n 'utf-8'))\n self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))\n self.wfile.write(bytes('<body>', 'utf-8'))\n self.wfile.write(bytes('<p>This is an example web server.</p>',\n 'utf-8'))\n self.wfile.write(bytes('</body></html>', 'utf-8'))\n\n def do_POST(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lib_path = cwd + separador + 'lib' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/save_config':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n if self.path == '/check_emby':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_emby(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n status = get_state()\n if status['Playstate'] == 'Not_Connected':\n save_config(cwd + separador + 'config.json', config)\n emby_wsocket.ws_config = config\n restart()\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/check_oppo':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_oppo(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/test_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n server = json.loads(post_data.decode('utf-8'))\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = test_path(config, server)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(server))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(server), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/navigate_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n path_obj = json.loads(post_data.decode('utf-8'))\n path = path_obj['path']\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = navigate_folder(path, config)\n a_json = json.dumps(a)\n print(len(a_json))\n self.send_response(200)\n self.send_header('Content-Length', len(a_json))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/move_tv':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(tv_path + config['TV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/move_av':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(av_path + config['AV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/get_tv_key':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_key(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_conn':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_test_conn(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_tv_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_sources(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_av_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_hdmi_list(config)\n if a != None:\n config['AV_SOURCES'] = a\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_init':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_end':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_set_prev(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_on':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_check_power(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_off':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_power_off(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_hdmi':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_version():\n return '2.01'\n\n\ndef thread_function(ws_object):\n print('Thread: starting')\n ws_object.start()\n print('Thread: finishing')\n\n\ndef restart():\n print('restart')\n try:\n emby_wsocket.stop()\n except:\n sys.exit()\n sys.exit()\n print('fin restart')\n\n\ndef save_config(config_file, config):\n with open(config_file, 'w') as fw:\n json.dump(config, fw, indent=4)\n fw.close\n try:\n emby_wsocket.ws_config = config\n emby_wsocket.EmbySession.config = config\n except:\n emby_wsocket.ws_config = config\n\n\ndef get_state():\n status = {}\n status['Version'] = get_version()\n try:\n status['Playstate'] = emby_wsocket.EmbySession.playstate\n status['playedtitle'] = emby_wsocket.EmbySession.playedtitle\n status['server'] = emby_wsocket.EmbySession.server\n status['folder'] = emby_wsocket.EmbySession.folder\n status['filename'] = emby_wsocket.EmbySession.filename\n status['CurrentData'] = emby_wsocket.EmbySession.currentdata\n except:\n status['Playstate'] = 'Not_Connected'\n status['playedtitle'] = ''\n status['server'] = ''\n status['folder'] = ''\n status['filename'] = ''\n status['CurrentData'] = ''\n status['cpu_perc'] = psutil.cpu_percent()\n status['mem_perc'] = psutil.virtual_memory().percent\n print(psutil.virtual_memory().percent)\n print(status)\n return status\n\n\ndef cargar_config(config_file, tv_path, av_path, lang_path):\n with open(config_file, 'r') as f:\n config = json.load(f)\n f.close\n config['Version'] = get_version()\n default = config.get('Autoscript', False)\n config['Autoscript'] = default\n default = config.get('enable_all_libraries', False)\n config['enable_all_libraries'] = default\n default = config.get('TV_model', '')\n config['TV_model'] = default\n default = config.get('TV_SOURCES', [])\n config['TV_SOURCES'] = default\n default = config.get('AV_model', '')\n config['AV_model'] = default\n default = config.get('AV_SOURCES', [])\n config['AV_SOURCES'] = default\n default = config.get('TV_script_init', '')\n config['TV_script_init'] = default\n default = config.get('TV_script_end', '')\n config['TV_script_end'] = default\n default = config.get('av_delay_hdmi', 0)\n config['av_delay_hdmi'] = default\n default = config.get('AV_Port', 23)\n config['AV_Port'] = default\n default = config.get('timeout_oppo_mount', 60)\n config['timeout_oppo_mount'] = default\n default = config.get('language', 'es-ES')\n config['language'] = default\n default = config.get('default_nfs', False)\n config['default_nfs'] = default\n default = config.get('wait_nfs', False)\n config['wait_nfs'] = default\n default = config.get('refresh_time', 5)\n config['refresh_time'] = default\n default = config.get('check_beta', False)\n config['check_beta'] = default\n default = config.get('smbtrick', False)\n config['smbtrick'] = default\n default = config.get('BRDisc', False)\n config['BRDisc'] = default\n edit_server = 0\n server_list = config['servers']\n for server in server_list:\n default = server.get('Test_OK', False)\n server_list[edit_server]['Test_OK'] = default\n edit_server = edit_server + 1\n if config['TV'] == 'True':\n config['TV'] = True\n if config['TV'] == 'False':\n config['TV'] = False\n if config['AV'] == 'True':\n config['AV'] = True\n if config['AV'] == 'False':\n config['AV'] = False\n config['servers'] = server_list\n config['tv_dirs'] = get_dir_folders(tv_path)\n config['av_dirs'] = get_dir_folders(av_path)\n config['langs'] = get_dir_folders(lang_path)\n return config\n\n\ndef check_version(config):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n print(config['check_beta'])\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n xno_version = get_version()\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n print(xno_version)\n print(last_version)\n if xno_version < last_version:\n resp['new_version'] = True\n else:\n resp['new_version'] = False\n print(resp)\n return resp\n\n\ndef update_version(config, vers_path, cwd):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +\n last_version_file)\n headers = {}\n response2 = requests.get(url2, headers=headers)\n filename = vers_path + last_version_file\n with open(filename, 'wb') as f:\n f.write(response2.content)\n f.close()\n shutil.unpack_archive(filename, cwd)\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n if config['TV'] == True and config['TV_model'] != '':\n move_files(tv_path + config['TV_model'], lib_path)\n if config['AV'] == True and config['AV_model'] != '':\n move_files(av_path + config['AV_model'], lib_path)\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n resp['new_version'] = False\n return resp\n\n\ndef cargar_lang(config_file):\n with open(config_file.encode(sys.getfilesystemencoding()), 'r',\n encoding='latin-1') as f:\n config = json.load(f)\n f.close\n return config\n\n\ndef leer_file(web_file):\n with open(web_file, 'r', encoding='utf8') as f:\n num = f.read()\n f.close\n return num\n\n\ndef leer_img(web_file):\n with open(web_file, 'rb') as f:\n num = f.read()\n f.close\n return num\n\n\ndef test_path(config, server):\n rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)\n result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])\n return result2\n\n\ndef get_mount_path(movie, server_data):\n movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])\n movie = movie.replace('\\\\\\\\', '\\\\')\n movie = movie.replace('\\\\', '/')\n word = '/'\n inicio = movie.find(word)\n inicio = inicio + 1\n final = movie.find(word, inicio, len(movie))\n servidor = movie[inicio:final]\n ultimo = final + 1\n result = final + 1\n while result > 0:\n ultimo = result + 1\n result = movie.find(word, ultimo, len(movie))\n fichero = movie[ultimo:len(movie)]\n final = final + 1\n ultimo = ultimo - 1\n carpeta = movie[final:ultimo]\n resultado = {}\n resultado['Servidor'] = servidor\n resultado['Carpeta'] = carpeta\n resultado['Fichero'] = fichero\n return resultado\n\n\ndef test_mount_path(config, servidor, carpeta):\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n while response_data6f.find('devicelist\":[]') > 0:\n time.sleep(1)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('QPW', config)\n device_list = json.loads(response_data6f)\n if config['DebugLevel'] > 0:\n print(device_list)\n nfs = config['default_nfs']\n for device in device_list['devicelist']:\n if device['name'].upper() == servidor.upper():\n if device['sub_type'] == 'nfs':\n nfs = True\n break\n else:\n nfs = False\n break\n if nfs:\n response_login = LoginNFS(config, servidor)\n else:\n response_login = LoginSambaWithOutID(config, servidor)\n if config['Always_ON'] == False:\n time.sleep(5)\n response_data6b = getsetupmenu(config)\n if nfs:\n response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',\n config)\n else:\n response_mount = mountSharedFolder(servidor, carpeta, '', '',\n config)\n response = json.loads(response_mount)\n if config['Autoscript'] == True:\n result = umountSharedFolder(config)\n if response['success'] == True:\n a = 'OK'\n else:\n a = 'FAILURE'\n return a\n else:\n print(\n 'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'\n )\n\n\ndef test_emby(config):\n try:\n EmbySession = EmbyHttp(config)\n user_info = EmbySession.user_info\n if user_info['SessionInfo']['Id'] != '':\n return 'OK'\n else:\n return 'FAILED'\n except:\n return 'FAILED'\n\n\ndef test_oppo(config):\n result = check_socket(config)\n if result == 0:\n return 'OK'\n else:\n return 'FAILED'\n\n\ndef carga_libraries(config):\n try:\n EmbySession = EmbyHttp(config)\n views_list = EmbySession.get_user_views(EmbySession.user_info[\n 'User']['Id'])\n libraries = []\n for view in views_list:\n library = {}\n library['Name'] = view['Name']\n library['Id'] = view['Id']\n library['Active'] = False\n try:\n lib_list = config['Libraries']\n except:\n lib_list = {}\n for lib in lib_list:\n if lib['Id'] == view['Id']:\n library['Active'] = lib['Active']\n libraries.append(library)\n config['Libraries'] = libraries\n return 0\n except:\n return 1\n\n\ndef is_library_active(config, libraryname):\n for library in config['Libraries']:\n if library['Name'] == libraryname:\n return library['Active']\n return False\n\n\ndef get_selectableFolders(config):\n EmbySession = EmbyHttp(config)\n MediaFolders = EmbySession.get_emby_selectablefolders()\n servers = []\n for Folder in MediaFolders:\n index = 1\n active = is_library_active(config, Folder['Name'])\n if config['enable_all_libraries'] == True:\n active = True\n if active == True:\n for SubFolder in Folder['SubFolders']:\n server = {}\n server['Id'] = SubFolder['Id']\n if index > 1:\n server['name'] = Folder['Name'] + '(' + str(index) + ')'\n else:\n server['name'] = Folder['Name']\n server['Emby_Path'] = SubFolder['Path']\n server['Oppo_Path'] = '/'\n try:\n serv_list = config['servers']\n except:\n serv_list = {}\n for serv in serv_list:\n if server['Emby_Path'] == serv['Emby_Path']:\n server['name'] = serv['name']\n server['Oppo_Path'] = serv['Oppo_Path']\n server['Test_OK'] = serv['Test_OK']\n servers.append(server)\n index = index + 1\n config['servers'] = servers\n\n\ndef get_dir_folders(directory):\n os.chdir(directory)\n dirs = os.listdir('.')\n encontrado = False\n list_dir = []\n for x in dirs:\n if os.path.isdir(x):\n list_dir.append(x)\n return list_dir\n\n\ndef move_files(src, dest):\n os.chdir(src)\n src_files = os.listdir('.')\n for file_name in src_files:\n full_file_name = os.path.join(src, file_name)\n if os.path.isfile(full_file_name):\n shutil.copy(full_file_name, dest)\n return 0\n\n\ndef get_devices(config):\n try:\n EmbySession = EmbyHttp(config)\n devices = EmbySession.get_emby_devices()\n index = 0\n dev_temp = []\n for device in devices['Items']:\n try:\n if device['Id'] != 'Xnoppo':\n device['Name'] = device['Name'] + ' / ' + device['AppName']\n device['Id'] = device['ReportedDeviceId']\n dev_temp.append(device)\n except:\n pass\n config['devices'] = dev_temp\n return 'OK'\n except:\n return 'FAILURE'\n\n\nclass MyServer(BaseHTTPRequestHandler):\n\n def do_GET(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/emby_conf.html':\n i = leer_file(html_path + 'emby_conf.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/oppo_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'oppo_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/lib_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'lib_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/path_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'path_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/tv_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'tv_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/av_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'av_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/other_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'other_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/status.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'status.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/help.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'help.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/remote.html':\n i = leer_file(html_path + 'remote.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/android-chrome-36x36.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'android-chrome-36x36.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/av-receiver-icon-2.jpg':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'av-receiver-icon-2.jpg')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/dragon.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'dragon.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/xnoppo_config':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_lib':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n carga_libraries(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_dev':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_devices(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/check_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = check_version(config)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/update_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = update_version(config, vers_path, cwd)\n restart()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/get_state':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = get_state()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/restart':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n a = 'Restarting'\n self.wfile.write(bytes(a, 'utf-8'))\n restart()\n if self.path == '/refresh_paths':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_selectableFolders(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/lang':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = cargar_lang(lang_path + config['language'] + separador +\n 'lang.js')\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path.find('/send_key?') >= 0:\n get_data = self.path\n print(get_data)\n a = len('/send_key?sendkey=')\n b = get_data[a:len(get_data)]\n print(b)\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if b == 'PON':\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n if config['BRDisc'] == True:\n time.sleep(1)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n else:\n response_data_on = sendremotekey(b, config)\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n a = 'ok'\n self.wfile.write(bytes(a, 'utf-8'))\n return 0\n if self.path == '/log.txt':\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')\n self.wfile.write(bytes(a))\n return 0\n else:\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(\n '<html><head><title>https://pythonbasics.org</title></head>',\n 'utf-8'))\n self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))\n self.wfile.write(bytes('<body>', 'utf-8'))\n self.wfile.write(bytes('<p>This is an example web server.</p>',\n 'utf-8'))\n self.wfile.write(bytes('</body></html>', 'utf-8'))\n\n def do_POST(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lib_path = cwd + separador + 'lib' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/save_config':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n if self.path == '/check_emby':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_emby(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n status = get_state()\n if status['Playstate'] == 'Not_Connected':\n save_config(cwd + separador + 'config.json', config)\n emby_wsocket.ws_config = config\n restart()\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/check_oppo':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_oppo(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/test_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n server = json.loads(post_data.decode('utf-8'))\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = test_path(config, server)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(server))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(server), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/navigate_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n path_obj = json.loads(post_data.decode('utf-8'))\n path = path_obj['path']\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = navigate_folder(path, config)\n a_json = json.dumps(a)\n print(len(a_json))\n self.send_response(200)\n self.send_header('Content-Length', len(a_json))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/move_tv':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(tv_path + config['TV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/move_av':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(av_path + config['AV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/get_tv_key':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_key(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_conn':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_test_conn(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_tv_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_sources(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_av_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_hdmi_list(config)\n if a != None:\n config['AV_SOURCES'] = a\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_init':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_end':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_set_prev(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_on':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_check_power(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_off':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_power_off(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_hdmi':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n\n\n<mask token>\n",
"step-4": "import http.server\nimport socketserver\nfrom http.server import BaseHTTPRequestHandler, HTTPServer\nimport time\nimport json\nimport io\nimport urllib\nimport requests\nfrom lib.Emby_ws import xnoppo_ws\nfrom lib.Emby_http import *\nfrom lib.Xnoppo import *\nfrom lib.Xnoppo_TV import *\nimport lib.Xnoppo_AVR\nimport shutil\nimport asyncio\nimport threading\nimport logging\nimport logging.handlers\nimport psutil\n\n\ndef get_version():\n return '2.01'\n\n\ndef thread_function(ws_object):\n print('Thread: starting')\n ws_object.start()\n print('Thread: finishing')\n\n\ndef restart():\n print('restart')\n try:\n emby_wsocket.stop()\n except:\n sys.exit()\n sys.exit()\n print('fin restart')\n\n\ndef save_config(config_file, config):\n with open(config_file, 'w') as fw:\n json.dump(config, fw, indent=4)\n fw.close\n try:\n emby_wsocket.ws_config = config\n emby_wsocket.EmbySession.config = config\n except:\n emby_wsocket.ws_config = config\n\n\ndef get_state():\n status = {}\n status['Version'] = get_version()\n try:\n status['Playstate'] = emby_wsocket.EmbySession.playstate\n status['playedtitle'] = emby_wsocket.EmbySession.playedtitle\n status['server'] = emby_wsocket.EmbySession.server\n status['folder'] = emby_wsocket.EmbySession.folder\n status['filename'] = emby_wsocket.EmbySession.filename\n status['CurrentData'] = emby_wsocket.EmbySession.currentdata\n except:\n status['Playstate'] = 'Not_Connected'\n status['playedtitle'] = ''\n status['server'] = ''\n status['folder'] = ''\n status['filename'] = ''\n status['CurrentData'] = ''\n status['cpu_perc'] = psutil.cpu_percent()\n status['mem_perc'] = psutil.virtual_memory().percent\n print(psutil.virtual_memory().percent)\n print(status)\n return status\n\n\ndef cargar_config(config_file, tv_path, av_path, lang_path):\n with open(config_file, 'r') as f:\n config = json.load(f)\n f.close\n config['Version'] = get_version()\n default = config.get('Autoscript', False)\n config['Autoscript'] = default\n default = config.get('enable_all_libraries', False)\n config['enable_all_libraries'] = default\n default = config.get('TV_model', '')\n config['TV_model'] = default\n default = config.get('TV_SOURCES', [])\n config['TV_SOURCES'] = default\n default = config.get('AV_model', '')\n config['AV_model'] = default\n default = config.get('AV_SOURCES', [])\n config['AV_SOURCES'] = default\n default = config.get('TV_script_init', '')\n config['TV_script_init'] = default\n default = config.get('TV_script_end', '')\n config['TV_script_end'] = default\n default = config.get('av_delay_hdmi', 0)\n config['av_delay_hdmi'] = default\n default = config.get('AV_Port', 23)\n config['AV_Port'] = default\n default = config.get('timeout_oppo_mount', 60)\n config['timeout_oppo_mount'] = default\n default = config.get('language', 'es-ES')\n config['language'] = default\n default = config.get('default_nfs', False)\n config['default_nfs'] = default\n default = config.get('wait_nfs', False)\n config['wait_nfs'] = default\n default = config.get('refresh_time', 5)\n config['refresh_time'] = default\n default = config.get('check_beta', False)\n config['check_beta'] = default\n default = config.get('smbtrick', False)\n config['smbtrick'] = default\n default = config.get('BRDisc', False)\n config['BRDisc'] = default\n edit_server = 0\n server_list = config['servers']\n for server in server_list:\n default = server.get('Test_OK', False)\n server_list[edit_server]['Test_OK'] = default\n edit_server = edit_server + 1\n if config['TV'] == 'True':\n config['TV'] = True\n if config['TV'] == 'False':\n config['TV'] = False\n if config['AV'] == 'True':\n config['AV'] = True\n if config['AV'] == 'False':\n config['AV'] = False\n config['servers'] = server_list\n config['tv_dirs'] = get_dir_folders(tv_path)\n config['av_dirs'] = get_dir_folders(av_path)\n config['langs'] = get_dir_folders(lang_path)\n return config\n\n\ndef check_version(config):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n print(config['check_beta'])\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n xno_version = get_version()\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n print(xno_version)\n print(last_version)\n if xno_version < last_version:\n resp['new_version'] = True\n else:\n resp['new_version'] = False\n print(resp)\n return resp\n\n\ndef update_version(config, vers_path, cwd):\n url = (\n 'https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js'\n )\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n if config['check_beta'] == True:\n last_version = version['beta_version']\n last_version_file = version['beta_version_file']\n else:\n last_version = version['curr_version']\n last_version_file = version['curr_version_file']\n url2 = ('https://github.com/siberian-git/Xnoppo/raw/main/versions/' +\n last_version_file)\n headers = {}\n response2 = requests.get(url2, headers=headers)\n filename = vers_path + last_version_file\n with open(filename, 'wb') as f:\n f.write(response2.content)\n f.close()\n shutil.unpack_archive(filename, cwd)\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n if config['TV'] == True and config['TV_model'] != '':\n move_files(tv_path + config['TV_model'], lib_path)\n if config['AV'] == True and config['AV_model'] != '':\n move_files(av_path + config['AV_model'], lib_path)\n resp = {}\n resp['version'] = last_version\n resp['file'] = last_version_file\n resp['new_version'] = False\n return resp\n\n\ndef cargar_lang(config_file):\n with open(config_file.encode(sys.getfilesystemencoding()), 'r',\n encoding='latin-1') as f:\n config = json.load(f)\n f.close\n return config\n\n\ndef leer_file(web_file):\n with open(web_file, 'r', encoding='utf8') as f:\n num = f.read()\n f.close\n return num\n\n\ndef leer_img(web_file):\n with open(web_file, 'rb') as f:\n num = f.read()\n f.close\n return num\n\n\ndef test_path(config, server):\n rutas = get_mount_path(server['Emby_Path'] + '/test.mkv', server)\n result2 = test_mount_path(config, rutas['Servidor'], rutas['Carpeta'])\n return result2\n\n\ndef get_mount_path(movie, server_data):\n movie = movie.replace(server_data['Emby_Path'], server_data['Oppo_Path'])\n movie = movie.replace('\\\\\\\\', '\\\\')\n movie = movie.replace('\\\\', '/')\n word = '/'\n inicio = movie.find(word)\n inicio = inicio + 1\n final = movie.find(word, inicio, len(movie))\n servidor = movie[inicio:final]\n ultimo = final + 1\n result = final + 1\n while result > 0:\n ultimo = result + 1\n result = movie.find(word, ultimo, len(movie))\n fichero = movie[ultimo:len(movie)]\n final = final + 1\n ultimo = ultimo - 1\n carpeta = movie[final:ultimo]\n resultado = {}\n resultado['Servidor'] = servidor\n resultado['Carpeta'] = carpeta\n resultado['Fichero'] = fichero\n return resultado\n\n\ndef test_mount_path(config, servidor, carpeta):\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n while response_data6f.find('devicelist\":[]') > 0:\n time.sleep(1)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('QPW', config)\n device_list = json.loads(response_data6f)\n if config['DebugLevel'] > 0:\n print(device_list)\n nfs = config['default_nfs']\n for device in device_list['devicelist']:\n if device['name'].upper() == servidor.upper():\n if device['sub_type'] == 'nfs':\n nfs = True\n break\n else:\n nfs = False\n break\n if nfs:\n response_login = LoginNFS(config, servidor)\n else:\n response_login = LoginSambaWithOutID(config, servidor)\n if config['Always_ON'] == False:\n time.sleep(5)\n response_data6b = getsetupmenu(config)\n if nfs:\n response_mount = mountSharedNFSFolder(servidor, carpeta, '', '',\n config)\n else:\n response_mount = mountSharedFolder(servidor, carpeta, '', '',\n config)\n response = json.loads(response_mount)\n if config['Autoscript'] == True:\n result = umountSharedFolder(config)\n if response['success'] == True:\n a = 'OK'\n else:\n a = 'FAILURE'\n return a\n else:\n print(\n 'No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo'\n )\n\n\ndef test_emby(config):\n try:\n EmbySession = EmbyHttp(config)\n user_info = EmbySession.user_info\n if user_info['SessionInfo']['Id'] != '':\n return 'OK'\n else:\n return 'FAILED'\n except:\n return 'FAILED'\n\n\ndef test_oppo(config):\n result = check_socket(config)\n if result == 0:\n return 'OK'\n else:\n return 'FAILED'\n\n\ndef carga_libraries(config):\n try:\n EmbySession = EmbyHttp(config)\n views_list = EmbySession.get_user_views(EmbySession.user_info[\n 'User']['Id'])\n libraries = []\n for view in views_list:\n library = {}\n library['Name'] = view['Name']\n library['Id'] = view['Id']\n library['Active'] = False\n try:\n lib_list = config['Libraries']\n except:\n lib_list = {}\n for lib in lib_list:\n if lib['Id'] == view['Id']:\n library['Active'] = lib['Active']\n libraries.append(library)\n config['Libraries'] = libraries\n return 0\n except:\n return 1\n\n\ndef is_library_active(config, libraryname):\n for library in config['Libraries']:\n if library['Name'] == libraryname:\n return library['Active']\n return False\n\n\ndef get_selectableFolders(config):\n EmbySession = EmbyHttp(config)\n MediaFolders = EmbySession.get_emby_selectablefolders()\n servers = []\n for Folder in MediaFolders:\n index = 1\n active = is_library_active(config, Folder['Name'])\n if config['enable_all_libraries'] == True:\n active = True\n if active == True:\n for SubFolder in Folder['SubFolders']:\n server = {}\n server['Id'] = SubFolder['Id']\n if index > 1:\n server['name'] = Folder['Name'] + '(' + str(index) + ')'\n else:\n server['name'] = Folder['Name']\n server['Emby_Path'] = SubFolder['Path']\n server['Oppo_Path'] = '/'\n try:\n serv_list = config['servers']\n except:\n serv_list = {}\n for serv in serv_list:\n if server['Emby_Path'] == serv['Emby_Path']:\n server['name'] = serv['name']\n server['Oppo_Path'] = serv['Oppo_Path']\n server['Test_OK'] = serv['Test_OK']\n servers.append(server)\n index = index + 1\n config['servers'] = servers\n\n\ndef get_dir_folders(directory):\n os.chdir(directory)\n dirs = os.listdir('.')\n encontrado = False\n list_dir = []\n for x in dirs:\n if os.path.isdir(x):\n list_dir.append(x)\n return list_dir\n\n\ndef move_files(src, dest):\n os.chdir(src)\n src_files = os.listdir('.')\n for file_name in src_files:\n full_file_name = os.path.join(src, file_name)\n if os.path.isfile(full_file_name):\n shutil.copy(full_file_name, dest)\n return 0\n\n\ndef get_devices(config):\n try:\n EmbySession = EmbyHttp(config)\n devices = EmbySession.get_emby_devices()\n index = 0\n dev_temp = []\n for device in devices['Items']:\n try:\n if device['Id'] != 'Xnoppo':\n device['Name'] = device['Name'] + ' / ' + device['AppName']\n device['Id'] = device['ReportedDeviceId']\n dev_temp.append(device)\n except:\n pass\n config['devices'] = dev_temp\n return 'OK'\n except:\n return 'FAILURE'\n\n\nclass MyServer(BaseHTTPRequestHandler):\n\n def do_GET(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/emby_conf.html':\n i = leer_file(html_path + 'emby_conf.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/oppo_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'oppo_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/lib_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'lib_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/path_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'path_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/tv_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'tv_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/av_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'av_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/other_conf.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'other_conf.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/status.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'status.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/help.html':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_file(html_path + 'help.html')\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/remote.html':\n i = leer_file(html_path + 'remote.html')\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(i, 'utf-8'))\n return 0\n if self.path == '/android-chrome-36x36.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'android-chrome-36x36.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/av-receiver-icon-2.jpg':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'av-receiver-icon-2.jpg')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/dragon.png':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n i = leer_img(resource_path + 'dragon.png')\n self.wfile.write(bytes(i))\n return 0\n if self.path == '/xnoppo_config':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_lib':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n carga_libraries(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/xnoppo_config_dev':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_devices(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/check_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = check_version(config)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/update_version':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = update_version(config, vers_path, cwd)\n restart()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/get_state':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = get_state()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/restart':\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n a = 'Restarting'\n self.wfile.write(bytes(a, 'utf-8'))\n restart()\n if self.path == '/refresh_paths':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n get_selectableFolders(a)\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/lang':\n self.send_response(200)\n self.send_header('Content-type', 'application/json')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = cargar_lang(lang_path + config['language'] + separador +\n 'lang.js')\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path.find('/send_key?') >= 0:\n get_data = self.path\n print(get_data)\n a = len('/send_key?sendkey=')\n b = get_data[a:len(get_data)]\n print(b)\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n sendnotifyremote(config['Oppo_IP'])\n result = check_socket(config)\n if b == 'PON':\n if result == 0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey('EJT', config)\n if config['BRDisc'] == True:\n time.sleep(1)\n response_data_on = sendremotekey('EJT', config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n else:\n response_data_on = sendremotekey(b, config)\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n a = 'ok'\n self.wfile.write(bytes(a, 'utf-8'))\n return 0\n if self.path == '/log.txt':\n self.send_response(200)\n self.send_header('Content-type', 'text')\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')\n self.wfile.write(bytes(a))\n return 0\n else:\n self.send_response(200)\n self.send_header('Content-type', 'text/html')\n self.end_headers()\n self.wfile.write(bytes(\n '<html><head><title>https://pythonbasics.org</title></head>',\n 'utf-8'))\n self.wfile.write(bytes('<p>Request: %s</p>' % self.path, 'utf-8'))\n self.wfile.write(bytes('<body>', 'utf-8'))\n self.wfile.write(bytes('<p>This is an example web server.</p>',\n 'utf-8'))\n self.wfile.write(bytes('</body></html>', 'utf-8'))\n\n def do_POST(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lib_path = cwd + separador + 'lib' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n print(self.path)\n if self.path == '/save_config':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n if self.path == '/check_emby':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_emby(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n status = get_state()\n if status['Playstate'] == 'Not_Connected':\n save_config(cwd + separador + 'config.json', config)\n emby_wsocket.ws_config = config\n restart()\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/check_oppo':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = test_oppo(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/test_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n server = json.loads(post_data.decode('utf-8'))\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = test_path(config, server)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(server))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(server), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/navigate_path':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n path_obj = json.loads(post_data.decode('utf-8'))\n path = path_obj['path']\n config = cargar_config(cwd + separador + 'config.json', tv_path,\n av_path, lang_path)\n a = navigate_folder(path, config)\n a_json = json.dumps(a)\n print(len(a_json))\n self.send_response(200)\n self.send_header('Content-Length', len(a_json))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(a), 'utf-8'))\n return 0\n if self.path == '/move_tv':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(tv_path + config['TV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/move_av':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json', config)\n move_files(av_path + config['AV_model'], lib_path)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n restart()\n return 0\n if self.path == '/get_tv_key':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_key(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_conn':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_test_conn(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_tv_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_tv_sources(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/get_av_sources':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = get_hdmi_list(config)\n if a != None:\n config['AV_SOURCES'] = a\n save_config(cwd + separador + 'config.json', config)\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_init':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/tv_test_end':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = tv_set_prev(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_on':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_check_power(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_off':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_power_off(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n if self.path == '/av_test_hdmi':\n content_length = int(self.headers['Content-Length'])\n post_data = self.rfile.read(content_length)\n config = json.loads(post_data.decode('utf-8'))\n a = av_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header('Content-Length', len(config))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config), 'utf-8'))\n else:\n self.send_response(300)\n self.send_header('Content-Length', len('ERROR'))\n self.send_header('Content-Type', 'text/html')\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes('ERROR', 'utf-8'))\n return 0\n\n\nif __name__ == '__main__':\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador = '\\\\'\n else:\n separador = '/'\n config_file = cwd + separador + 'config.json'\n resource_path = (cwd + separador + 'web' + separador + 'resources' +\n separador)\n html_path = cwd + separador + 'web' + separador\n tv_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'TV' + separador)\n av_path = (cwd + separador + 'web' + separador + 'libraries' +\n separador + 'AV' + separador)\n lib_path = cwd + separador + 'lib' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n config = cargar_config(config_file, tv_path, av_path, lang_path)\n logfile = cwd + separador + 'emby_xnoppo_client_logging.log'\n lang = cargar_lang(lang_path + config['language'] + separador + 'lang.js')\n if config['DebugLevel'] == 0:\n logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',\n datefmt='%d/%m/%Y %I:%M:%S %p', level=logging.CRITICAL)\n elif config['DebugLevel'] == 1:\n rfh = logging.handlers.RotatingFileHandler(filename=logfile, mode=\n 'a', maxBytes=50 * 1024 * 1024, backupCount=2, encoding=None,\n delay=0)\n logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',\n datefmt='%d/%m/%Y %I:%M:%S %p', level=logging.INFO, handlers=[rfh])\n elif config['DebugLevel'] == 2:\n rfh = logging.handlers.RotatingFileHandler(filename=logfile, mode=\n 'a', maxBytes=5 * 1024 * 1024, backupCount=2, encoding=None,\n delay=0)\n logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',\n datefmt='%d/%m/%Y %I:%M:%S %p', level=logging.DEBUG, handlers=[rfh]\n )\n emby_wsocket = xnoppo_ws()\n emby_wsocket.ws_config = config\n emby_wsocket.config_file = config_file\n emby_wsocket.ws_lang = lang\n x = threading.Thread(target=thread_function, args=(emby_wsocket,))\n x.start()\n espera = 0\n estado_anterior = ''\n logging.debug('Arrancamos el Servidor Web\\n')\n serverPort = 8090\n webServer = HTTPServer(('', serverPort), MyServer)\n print('Server started http://%s:%s' % ('', serverPort))\n try:\n webServer.serve_forever()\n except KeyboardInterrupt:\n pass\n webServer.server_close()\n logging.info('Fin proceso')\n logging.info('Finished')\n print('Server stopped.')\n",
"step-5": "import http.server\nimport socketserver\nfrom http.server import BaseHTTPRequestHandler, HTTPServer\nimport time\nimport json\nimport io\nimport urllib\nimport requests\nfrom lib.Emby_ws import xnoppo_ws\nfrom lib.Emby_http import *\nfrom lib.Xnoppo import *\nfrom lib.Xnoppo_TV import *\nimport lib.Xnoppo_AVR\nimport shutil\nimport asyncio\nimport threading\nimport logging\nimport logging.handlers\nimport psutil\n\ndef get_version():\n return(\"2.01\")\n\ndef thread_function(ws_object):\n print(\"Thread: starting\")\n ws_object.start()\n print(\"Thread: finishing\")\n\ndef restart():\n print('restart')\n try:\n emby_wsocket.stop()\n except:\n sys.exit()\n sys.exit()\n print('fin restart')\n \ndef save_config(config_file, config):\n with open(config_file, 'w') as fw:\n json.dump(config, fw, indent=4)\n fw.close\n try:\n emby_wsocket.ws_config=config\n emby_wsocket.EmbySession.config=config\n except:\n emby_wsocket.ws_config=config\ndef get_state():\n status={}\n status[\"Version\"]=get_version()\n try:\n status[\"Playstate\"]=emby_wsocket.EmbySession.playstate\n status[\"playedtitle\"]=emby_wsocket.EmbySession.playedtitle\n status[\"server\"]=emby_wsocket.EmbySession.server\n status[\"folder\"]=emby_wsocket.EmbySession.folder\n status[\"filename\"]=emby_wsocket.EmbySession.filename\n status[\"CurrentData\"]=emby_wsocket.EmbySession.currentdata\n # gives a single float value\n except:\n status[\"Playstate\"]=\"Not_Connected\"\n status[\"playedtitle\"]=\"\"\n status[\"server\"]=\"\"\n status[\"folder\"]=\"\"\n status[\"filename\"]=\"\"\n status[\"CurrentData\"]=\"\"\n status[\"cpu_perc\"]=psutil.cpu_percent()\n status[\"mem_perc\"]=psutil.virtual_memory().percent\n \n # you can have the percentage of used RAM\n print(psutil.virtual_memory().percent)\n\n\n print(status)\n return(status)\n\ndef cargar_config(config_file,tv_path,av_path,lang_path):\n\n with open(config_file, 'r') as f: \n config = json.load(f)\n #ver_configuracion(config)\n f.close\n ## new options default config values\n config[\"Version\"]=get_version()\n default = config.get(\"Autoscript\", False)\n config[\"Autoscript\"]=default\n default = config.get(\"enable_all_libraries\", False)\n config[\"enable_all_libraries\"]=default\n default = config.get(\"TV_model\", \"\")\n config[\"TV_model\"]=default\n default = config.get(\"TV_SOURCES\", [])\n config[\"TV_SOURCES\"] = default\n default = config.get(\"AV_model\", \"\")\n config[\"AV_model\"]=default\n default = config.get(\"AV_SOURCES\", [])\n config[\"AV_SOURCES\"] = default\n default = config.get(\"TV_script_init\", \"\")\n config[\"TV_script_init\"]=default\n default = config.get(\"TV_script_end\", \"\")\n config[\"TV_script_end\"]=default\n default = config.get(\"av_delay_hdmi\", 0)\n config[\"av_delay_hdmi\"]=default\n default = config.get(\"AV_Port\", 23)\n config[\"AV_Port\"]=default\n default = config.get(\"timeout_oppo_mount\", 60)\n config[\"timeout_oppo_mount\"]=default\n default = config.get(\"language\",\"es-ES\")\n config[\"language\"]=default\n default = config.get(\"default_nfs\",False)\n config[\"default_nfs\"]=default\n default = config.get(\"wait_nfs\",False)\n config[\"wait_nfs\"]=default\n default = config.get(\"refresh_time\",5)\n config[\"refresh_time\"]=default\n default = config.get(\"check_beta\",False)\n config[\"check_beta\"]=default\n default = config.get(\"smbtrick\",False)\n config[\"smbtrick\"]=default\n default = config.get(\"BRDisc\",False)\n config[\"BRDisc\"]=default\n\n ## testeado de rutas\n edit_server=0\n server_list = config[\"servers\"]\n for server in server_list:\n default = server.get(\"Test_OK\", False)\n server_list[edit_server][\"Test_OK\"]=default\n edit_server=edit_server+1\n ## Cambio de booleans de texto antiguos a boleans actuales.\n if config[\"TV\"]=='True':\n config[\"TV\"]=True;\n if config[\"TV\"]=='False':\n config[\"TV\"]=False;\n if config[\"AV\"]=='True':\n config[\"AV\"]=True;\n if config[\"AV\"]=='False':\n config[\"AV\"]=False;\n config[\"servers\"]=server_list\n config[\"tv_dirs\"]=get_dir_folders(tv_path);\n config[\"av_dirs\"]=get_dir_folders(av_path);\n config[\"langs\"]=get_dir_folders(lang_path);\n\n return(config)\n\ndef check_version(config):\n\n url = \"https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js\"\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n print(config[\"check_beta\"])\n if config[\"check_beta\"]==True:\n last_version=version[\"beta_version\"]\n last_version_file=version[\"beta_version_file\"]\n else:\n last_version=version[\"curr_version\"]\n last_version_file=version[\"curr_version_file\"]\n xno_version=get_version()\n resp = {}\n resp[\"version\"]=last_version\n resp[\"file\"]=last_version_file\n print(xno_version)\n print(last_version)\n if xno_version<last_version:\n resp[\"new_version\"]=True\n else:\n resp[\"new_version\"]=False\n print(resp)\n return(resp)\n\ndef update_version(config,vers_path,cwd):\n\n url = \"https://raw.githubusercontent.com/siberian-git/Xnoppo/main/versions/version.js\"\n headers = {}\n response = requests.get(url, headers=headers)\n version = json.loads(response.text)\n print(version)\n if config[\"check_beta\"]==True:\n last_version=version[\"beta_version\"]\n last_version_file=version[\"beta_version_file\"]\n else:\n last_version=version[\"curr_version\"]\n last_version_file=version[\"curr_version_file\"]\n url2 = \"https://github.com/siberian-git/Xnoppo/raw/main/versions/\" + last_version_file\n headers = {}\n response2 = requests.get(url2, headers=headers)\n filename=vers_path + last_version_file\n with open(filename, 'wb') as f:\n f.write(response2.content)\n f.close()\n shutil.unpack_archive(filename, cwd)\n if sys.platform.startswith('win'):\n separador=\"\\\\\"\n else:\n separador=\"/\"\n tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador\n av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador\n if config[\"TV\"]==True and config[\"TV_model\"]!=\"\":\n move_files(tv_path + config[\"TV_model\"],lib_path)\n if config[\"AV\"]==True and config[\"AV_model\"]!=\"\":\n move_files(av_path + config[\"AV_model\"],lib_path)\n resp = {}\n resp[\"version\"]=last_version\n resp[\"file\"]=last_version_file\n resp[\"new_version\"]=False\n return(resp)\n\ndef cargar_lang(config_file):\n\n with open(config_file.encode(sys.getfilesystemencoding()), 'r',encoding='latin-1') as f: \n config = json.load(f)\n #ver_configuracion(config)\n f.close\n ## new options default config values\n return(config)\n\ndef leer_file(web_file):\n\n with open(web_file, 'r',encoding='utf8') as f:\n num=f.read()\n f.close\n return(num)\n\ndef leer_img(web_file):\n\n with open(web_file, 'rb') as f:\n num=f.read()\n f.close\n return(num)\n\n\ndef test_path(config,server):\n \n rutas = get_mount_path(server[\"Emby_Path\"] + \"/test.mkv\",server)\n result2 = test_mount_path(config,rutas[\"Servidor\"],rutas[\"Carpeta\"])\n return(result2)\n\ndef get_mount_path(movie,server_data):\n\n movie = movie.replace(server_data[\"Emby_Path\"],server_data[\"Oppo_Path\"])\n movie = movie.replace('\\\\\\\\','\\\\')\n movie = movie.replace('\\\\','/')\n word = '/'\n inicio = movie.find(word)\n inicio = inicio +1 \n final = movie.find(word,inicio,len(movie))\n servidor = movie[inicio:final]\n ultimo=final+1\n result=final+1\n while result > 0:\n ultimo=result+1\n result=movie.find(word,ultimo,len(movie))\n fichero=movie[ultimo:len(movie)]\n final=final+1\n ultimo=ultimo-1\n carpeta=movie[final:ultimo]\n resultado={}\n resultado[\"Servidor\"]=servidor\n resultado[\"Carpeta\"]=carpeta\n resultado[\"Fichero\"]=fichero\n return(resultado)\n\ndef test_mount_path(config,servidor,carpeta):\n sendnotifyremote(config[\"Oppo_IP\"])\n #print(\"Conectando con el OPPO\")\n result=check_socket(config)\n if result==0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey(\"EJT\",config)\n time.sleep(1)\n #print(\"Solicitando montar ruta al OPPO\")\n response_data6b = getsetupmenu(config)\n while response_data6f.find('devicelist\":[]') > 0:\n time.sleep(1)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey(\"QPW\",config)\n device_list=json.loads(response_data6f)\n if config[\"DebugLevel\"]>0: print(device_list)\n nfs=config[\"default_nfs\"]\n for device in device_list[\"devicelist\"]:\n if device[\"name\"].upper()==servidor.upper():\n if device[\"sub_type\"]==\"nfs\":\n nfs=True\n break\n else:\n nfs=False\n break\n if nfs:\n response_login = LoginNFS(config,servidor)\n else:\n response_login = LoginSambaWithOutID(config,servidor)\n if config[\"Always_ON\"]==False:\n time.sleep(5)\n response_data6b = getsetupmenu(config)\n if nfs:\n response_mount = mountSharedNFSFolder(servidor,carpeta,'','',config)\n else:\n response_mount = mountSharedFolder(servidor,carpeta,'','',config)\n response=json.loads(response_mount)\n #print(response)\n if config[\"Autoscript\"]==True:\n result=umountSharedFolder(config)\n if response[\"success\"]==True:\n a = \"OK\"\n else:\n a = \"FAILURE\" \n return(a)\n else:\n print(\"No se puede conectar, revisa las configuraciones o que el OPPO este encendido o en reposo\")\n\ndef test_emby(config):\n try:\n EmbySession=EmbyHttp(config)\n user_info = EmbySession.user_info\n if user_info[\"SessionInfo\"][\"Id\"]!=\"\":\n return(\"OK\")\n else:\n return(\"FAILED\")\n except:\n return(\"FAILED\")\n\ndef test_oppo(config):\n result=check_socket(config)\n if result==0:\n return(\"OK\")\n else:\n return(\"FAILED\")\n\ndef carga_libraries(config):\n try:\n EmbySession=EmbyHttp(config)\n views_list=EmbySession.get_user_views(EmbySession.user_info[\"User\"][\"Id\"])\n libraries = []\n for view in views_list:\n library= {}\n library[\"Name\"]=view[\"Name\"]\n library[\"Id\"]=view[\"Id\"]\n library[\"Active\"]=False\n try:\n lib_list=config[\"Libraries\"]\n except:\n lib_list={}\n for lib in lib_list:\n if lib[\"Id\"]==view[\"Id\"]:\n library[\"Active\"]=lib[\"Active\"]\n libraries.append(library)\n config[\"Libraries\"]=libraries\n return(0)\n except:\n return(1)\ndef is_library_active(config,libraryname):\n for library in config[\"Libraries\"]:\n if library[\"Name\"]==libraryname:\n return(library[\"Active\"])\n return(False)\n\ndef get_selectableFolders(config):\n EmbySession=EmbyHttp(config)\n MediaFolders = EmbySession.get_emby_selectablefolders()\n servers=[]\n for Folder in MediaFolders:\n index=1\n active=is_library_active(config,Folder[\"Name\"])\n if config[\"enable_all_libraries\"]==True:\n active=True;\n if active==True:\n for SubFolder in Folder[\"SubFolders\"]: \n server={}\n server[\"Id\"]=SubFolder[\"Id\"]\n if index>1:\n server[\"name\"]=Folder[\"Name\"]+\"(\"+str(index)+\")\"\n else:\n server[\"name\"]=Folder[\"Name\"]\n server[\"Emby_Path\"]=SubFolder[\"Path\"]\n server[\"Oppo_Path\"]=\"/\"\n try:\n serv_list=config[\"servers\"]\n except:\n serv_list={}\n for serv in serv_list:\n if server[\"Emby_Path\"]==serv[\"Emby_Path\"]:\n server[\"name\"]=serv[\"name\"];\n server[\"Oppo_Path\"]=serv[\"Oppo_Path\"];\n server[\"Test_OK\"]=serv[\"Test_OK\"];\n servers.append(server)\n index=index+1\n config[\"servers\"]=servers\n\ndef get_dir_folders(directory):\n os.chdir(directory)\n dirs = os.listdir(\".\")\n encontrado=False\n list_dir=[]\n #a =\"\"\n #list_dir.append(a)\n for x in dirs:\n if os.path.isdir(x):\n list_dir.append(x)\n return(list_dir)\n\ndef move_files(src, dest):\n os.chdir(src)\n src_files = os.listdir('.')\n for file_name in src_files:\n full_file_name = os.path.join(src, file_name)\n if os.path.isfile(full_file_name):\n shutil.copy(full_file_name, dest)\n return(0)\n\ndef get_devices(config):\n try:\n EmbySession=EmbyHttp(config)\n devices = EmbySession.get_emby_devices()\n index=0\n dev_temp = []\n for device in devices[\"Items\"]:\n try:\n if device[\"Id\"]!='Xnoppo':\n device[\"Name\"]=device[\"Name\"] + \" / \" + device[\"AppName\"]\n device[\"Id\"]=device[\"ReportedDeviceId\"]\n dev_temp.append(device)\n except:\n pass\n config[\"devices\"]=dev_temp\n return('OK')\n except:\n return('FAILURE')\n\nclass MyServer(BaseHTTPRequestHandler):\n def do_GET(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador=\"\\\\\"\n else:\n separador=\"/\"\n resource_path=cwd + separador + 'web' + separador + 'resources' + separador\n html_path = cwd + separador + 'web' + separador\n tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador\n av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n \n print(self.path)\n if self.path == '/emby_conf.html':\n i = leer_file(html_path + 'emby_conf.html')\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/oppo_conf.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'oppo_conf.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/lib_conf.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'lib_conf.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/path_conf.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'path_conf.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/tv_conf.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'tv_conf.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/av_conf.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'av_conf.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/other_conf.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'other_conf.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/status.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'status.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/help.html':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_file(html_path + 'help.html')\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/remote.html':\n i = leer_file(html_path + 'remote.html')\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n self.wfile.write(bytes(i,\"utf-8\"))\n return(0)\n if self.path == '/android-chrome-36x36.png':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_img(resource_path + 'android-chrome-36x36.png')\n self.wfile.write(bytes(i))\n return(0)\n if self.path == '/av-receiver-icon-2.jpg':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_img(resource_path + 'av-receiver-icon-2.jpg')\n self.wfile.write(bytes(i))\n return(0)\n if self.path == '/dragon.png':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n i = leer_img(resource_path + 'dragon.png')\n self.wfile.write(bytes(i))\n return(0)\n if self.path == '/xnoppo_config':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n self.wfile.write(bytes(json.dumps(a),\"utf-8\")) \n return(0)\n if self.path == '/xnoppo_config_lib':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n carga_libraries(a)\n self.wfile.write(bytes(json.dumps(a),\"utf-8\"))\n return(0)\n if self.path == '/xnoppo_config_dev':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n get_devices(a)\n self.wfile.write(bytes(json.dumps(a),\"utf-8\"))\n return(0)\n if self.path == '/check_version':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n a = check_version(config)\n self.wfile.write(bytes(json.dumps(a),\"utf-8\"))\n return(0)\n if self.path == '/update_version':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n a = update_version(config,vers_path,cwd)\n restart()\n self.wfile.write(bytes(json.dumps(a),\"utf-8\"))\n return(0)\n if self.path == '/get_state':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n a = get_state()\n self.wfile.write(bytes(json.dumps(a),\"utf-8\"))\n return(0)\n if self.path == '/restart':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n a = \"Restarting\"\n self.wfile.write(bytes(a,\"utf-8\"))\n restart()\n if self.path == '/refresh_paths':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n a = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n get_selectableFolders(a)\n self.wfile.write(bytes(json.dumps(a),\"utf-8\"))\n return(0)\n if self.path == '/lang':\n self.send_response(200)\n self.send_header(\"Content-type\", \"application/json\")\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n a = cargar_lang(lang_path + config[\"language\"] + separador +'lang.js')\n self.wfile.write(bytes(json.dumps(a),\"utf-8\")) \n return(0)\n if self.path.find(\"/send_key?\")>=0:\n get_data = self.path\n print(get_data)\n a = len('/send_key?sendkey=')\n b=get_data[a:len(get_data)]\n print(b)\n config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n sendnotifyremote(config[\"Oppo_IP\"])\n result=check_socket(config)\n if b=='PON':\n if result==0:\n response_data6a = getmainfirmwareversion(config)\n response_data6c = getdevicelist(config)\n response_data6b = getsetupmenu(config)\n response_data6c = OppoSignin(config)\n response_data6d = getdevicelist(config)\n response_data6e = getglobalinfo(config)\n response_data6f = getdevicelist(config)\n response_data_on = sendremotekey(\"EJT\",config)\n if config[\"BRDisc\"]==True:\n time.sleep(1)\n response_data_on = sendremotekey(\"EJT\",config)\n time.sleep(1)\n response_data6b = getsetupmenu(config)\n else:\n response_data_on = sendremotekey(b,config)\n self.send_response(200)\n self.send_header(\"Content-type\", \"text\")\n self.end_headers()\n a = \"ok\"\n self.wfile.write(bytes(a,\"utf-8\")) \n return(0)\n if self.path == '/log.txt':\n self.send_response(200)\n self.send_header(\"Content-type\", \"text\")\n self.end_headers()\n config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n a = leer_img(cwd + separador + 'emby_xnoppo_client_logging.log')\n self.wfile.write(bytes(a)) \n return(0)\n else:\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n self.wfile.write(bytes(\"<html><head><title>https://pythonbasics.org</title></head>\", \"utf-8\"))\n self.wfile.write(bytes(\"<p>Request: %s</p>\" % self.path, \"utf-8\"))\n self.wfile.write(bytes(\"<body>\", \"utf-8\"))\n self.wfile.write(bytes(\"<p>This is an example web server.</p>\", \"utf-8\"))\n self.wfile.write(bytes(\"</body></html>\", \"utf-8\"))\n\n def do_POST(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador=\"\\\\\"\n else:\n separador=\"/\"\n resource_path=cwd + separador + 'web' + separador + 'resources' + separador\n html_path = cwd + separador + 'web' + separador\n tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador\n av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador\n lib_path = cwd + separador + 'lib' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n \n print(self.path)\n if self.path == '/save_config':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json',config)\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n if self.path == '/check_emby':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = test_emby(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n status = get_state()\n if status[\"Playstate\"]==\"Not_Connected\":\n save_config(cwd + separador + 'config.json',config)\n emby_wsocket.ws_config=config\n restart()\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/check_oppo':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = test_oppo(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/test_path':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n server = json.loads(post_data.decode('utf-8'))\n config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n a = test_path(config,server)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(server))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(server),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/navigate_path':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n path_obj = json.loads(post_data.decode('utf-8'))\n path = path_obj[\"path\"]\n config = cargar_config(cwd + separador + 'config.json',tv_path,av_path,lang_path)\n a = navigate_folder(path,config)\n a_json=json.dumps(a)\n print(len(a_json))\n self.send_response(200)\n self.send_header(\"Content-Length\", len(a_json))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(a),\"utf-8\"))\n return(0)\n\n if self.path == '/move_tv':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json',config)\n move_files(tv_path + config[\"TV_model\"],lib_path)\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n restart()\n return(0)\n if self.path == '/move_av':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8'))\n save_config(cwd + separador + 'config.json',config)\n move_files(av_path + config[\"AV_model\"],lib_path)\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n restart()\n return(0)\n if self.path == '/get_tv_key':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = get_tv_key(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json',config)\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/tv_test_conn':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = tv_test_conn(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/get_tv_sources':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = get_tv_sources(config)\n if a == 'OK':\n save_config(cwd + separador + 'config.json',config)\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/get_av_sources':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = get_hdmi_list(config)\n if a != None:\n config[\"AV_SOURCES\"]=a\n save_config(cwd + separador + 'config.json',config)\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/tv_test_init':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = tv_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/tv_test_end':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = tv_set_prev(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/av_test_on':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = av_check_power(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/av_test_off':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = av_power_off(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\n if self.path == '/av_test_hdmi':\n content_length = int(self.headers['Content-Length']) # <--- Gets the size of data\n post_data = self.rfile.read(content_length) # <--- Gets the data itself\n config = json.loads(post_data.decode('utf-8')) \n a = av_change_hdmi(config)\n if a == 'OK':\n self.send_response(200)\n self.send_header(\"Content-Length\", len(config))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(json.dumps(config),\"utf-8\"))\n else:\n self.send_response(300)\n self.send_header(\"Content-Length\", len(\"ERROR\"))\n self.send_header(\"Content-Type\", \"text/html\")\n self.send_header('Access-Control-Allow-Credentials', 'true')\n self.send_header('Access-Control-Allow-Origin', '*')\n self.end_headers()\n self.wfile.write(bytes(\"ERROR\",\"utf-8\"))\n return(0)\nif __name__ == \"__main__\":\n\n cwd = os.path.dirname(os.path.abspath(__file__))\n if sys.platform.startswith('win'):\n separador=\"\\\\\"\n else:\n separador=\"/\"\n config_file = cwd + separador + \"config.json\"\n resource_path=cwd + separador + 'web' + separador + 'resources' + separador\n html_path = cwd + separador + 'web' + separador\n tv_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'TV' + separador\n av_path = cwd + separador + 'web' + separador + 'libraries' + separador + 'AV' + separador\n lib_path = cwd + separador + 'lib' + separador\n lang_path = cwd + separador + 'web' + separador + 'lang' + separador\n vers_path = cwd + separador + 'versions' + separador\n config = cargar_config(config_file,tv_path,av_path,lang_path)\n logfile=cwd + separador + \"emby_xnoppo_client_logging.log\"\n lang = cargar_lang(lang_path + config[\"language\"] + separador +'lang.js')\n\n if config[\"DebugLevel\"]==0:\n logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.CRITICAL)\n elif config[\"DebugLevel\"]==1:\n rfh = logging.handlers.RotatingFileHandler(\n filename=logfile, \n mode='a',\n maxBytes=50*1024*1024,\n backupCount=2,\n encoding=None,\n delay=0\n )\n logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.INFO,handlers=[rfh])\n elif config[\"DebugLevel\"]==2:\n rfh = logging.handlers.RotatingFileHandler(\n filename=logfile, \n mode='a',\n maxBytes=5*1024*1024,\n backupCount=2,\n encoding=None,\n delay=0\n )\n logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',datefmt='%d/%m/%Y %I:%M:%S %p',level=logging.DEBUG,handlers=[rfh])\n emby_wsocket = xnoppo_ws()\n emby_wsocket.ws_config=config\n emby_wsocket.config_file=config_file\n emby_wsocket.ws_lang=lang\n x = threading.Thread(target=thread_function, args=(emby_wsocket,))\n x.start()\n espera=0\n estado_anterior=''\n\n logging.debug('Arrancamos el Servidor Web\\n')\n serverPort = 8090\n webServer = HTTPServer((\"\", serverPort), MyServer)\n print(\"Server started http://%s:%s\" % (\"\", serverPort))\n try:\n webServer.serve_forever()\n except KeyboardInterrupt:\n pass\n webServer.server_close()\n logging.info('Fin proceso')\n logging.info('Finished')\n print(\"Server stopped.\")\n",
"step-ids": [
21,
24,
25,
27,
28
]
}
|
[
21,
24,
25,
27,
28
] |
n = int(input())
A = list(map(int, input().split()))
g = 1000
for s1, s2 in zip(A[:-1], A[1:]):
if s1 < s2:
stockNum = g // s1
g += stockNum * (s2 - s1)
print(g)
|
normal
|
{
"blob_id": "da903409d75ba2a07443317e30bce568444fbca5",
"index": 9956,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor s1, s2 in zip(A[:-1], A[1:]):\n if s1 < s2:\n stockNum = g // s1\n g += stockNum * (s2 - s1)\nprint(g)\n",
"step-3": "n = int(input())\nA = list(map(int, input().split()))\ng = 1000\nfor s1, s2 in zip(A[:-1], A[1:]):\n if s1 < s2:\n stockNum = g // s1\n g += stockNum * (s2 - s1)\nprint(g)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: UTF-8 -*-
'''
Evaluate trained PredNet on KITTI sequences.
Calculates mean-squared error and plots predictions.
'''
import os
import numpy as np
from six.moves import cPickle
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from keras import backend as K
from keras.models import Model, model_from_json
from keras.layers import Input, Dense, Flatten
from prednet import PredNet
from data_utils import SequenceGenerator
from kitti_settings import *
n_plot = 40
batch_size = 10
nt = 5
# 相关的weights,json的文件
weights_file = os.path.join(WEIGHTS_DIR, 'prednet_facebook_segmpred_weights.hdf5')
json_file = os.path.join(WEIGHTS_DIR, 'prednet_facebook_segmpred_model.json')
# weights_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_weights.hdf5')
# json_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_model.json')
# weights_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_weights-extrapfinetuned.hdf5') # where weights will be saved
# json_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_model-extrapfinetuned.json')
test_file = os.path.join(DATA_DIR, 'facebook_segmpred_X_test.hkl')
test_sources = os.path.join(DATA_DIR, 'facebook_segmpred_sources_test.hkl')
# Load trained model
# 加载模型的json文件
f = open(json_file, 'r')
# 读取的json文件
json_string = f.read()
f.close()
# 从训练后存储的模型中序列化出模型,同时包含PredNet模型定制的参数,之后加载权重模型
# 存储模型将相应的json文件和weights文件存储即可,加载模型从对应的json文件和weights文件反序列化即可
train_model = model_from_json(json_string, custom_objects = {'PredNet': PredNet})
train_model.load_weights(weights_file)
# Create testing model (to output predictions)
# 创建测试模型
# 训练模型包含了InputLayer,PredNet等等,这里选取第二层即为PredNet
# print(train_model.layers)
layer_config = train_model.layers[1].get_config()
# 评估版本中将output_mode输出模型从误差error修改为predication预测
layer_config['output_mode'] = 'prediction'
data_format = layer_config['data_format'] if 'data_format' in layer_config else layer_config['dim_ordering']
# 将网络中部分修改参数加载重构为PredNet网络,keras中具有get_config和get_weights等方法
test_prednet = PredNet(weights=train_model.layers[1].get_weights(), **layer_config)
# 输入层的shape为不包括batch的batch_input_shape从第一列之后的所有
# input_shape = list(train_model.layers[0].batch_input_shape[1:])
# 输入数据为nt,总共有10帧,来预测将来的一帧
# input_shape[0] = nt
# print('input_shape:', input_shape)
test_generator = SequenceGenerator(test_file, test_sources, nt, sequence_start_mode='unique', data_format=data_format)
X_test = test_generator.create_all()
input_shape = X_test.shape[1:]
# print('input_shape:', input_shape)
# 构建输入层
inputs = Input(shape=tuple(input_shape))
# 将输入层输入到prednet网络中测试输出
predictions = test_prednet(inputs)
# 构建输入和输出模型
test_model = Model(inputs=inputs, outputs=predictions)
# 测试评估数据生成器
# test_generator = SequenceGenerator(test_file, test_sources, nt, sequence_start_mode='unique', data_format=data_format)
# X_test = test_generator.create_all()
# 预测模型时参照batch_size,一个批次的进行load然后predict
X_hat = test_model.predict(X_test, batch_size)
# 这里模型的默认通道均在最后一位
if data_format == 'channels_first':
X_test = np.transpose(X_test, (0, 1, 3, 4, 2))
X_hat = np.transpose(X_hat, (0, 1, 3, 4, 2))
print('X_hat.shape:', X_hat.shape)
print('X_test.shape:', X_test.shape)
# Compare MSE of PredNet predictions vs. using last frame. Write results to prediction_scores.txt
# 比较测试结果
mse_model = np.mean( (X_test[:, 1:] - X_hat[:, 1:])**2 ) # look at all timesteps except the first
mse_prev = np.mean( (X_test[:, :-1] - X_test[:, 1:])**2 )
if not os.path.exists(RESULTS_SAVE_DIR): os.mkdir(RESULTS_SAVE_DIR)
f = open(RESULTS_SAVE_DIR + 'prediction_scores.txt', 'w')
f.write("Model MSE: %f\n" % mse_model)
f.write("Previous Frame MSE: %f" % mse_prev)
f.close()
# Plot some predictions
aspect_ratio = float(X_hat.shape[2]) / X_hat.shape[3]
plt.figure(figsize = (nt, 2*aspect_ratio))
gs = gridspec.GridSpec(2, nt)
gs.update(wspace=0., hspace=0.)
plot_save_dir = os.path.join(RESULTS_SAVE_DIR, 'prediction_plots/')
if not os.path.exists(plot_save_dir): os.mkdir(plot_save_dir)
plot_idx = np.random.permutation(X_test.shape[0])[:n_plot]
for i in plot_idx:
for t in range(nt):
plt.subplot(gs[t])
plt.imshow(X_test[i,t], interpolation='none')
plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off', labelleft='off')
if t==0: plt.ylabel('Actual', fontsize=10)
plt.subplot(gs[t + nt])
plt.imshow(X_hat[i,t], interpolation='none')
plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off', labelleft='off')
if t==0: plt.ylabel('Predicted', fontsize=10)
plt.savefig(plot_save_dir + 'plot_' + str(i) + '.png')
plt.clf()
|
normal
|
{
"blob_id": "a3507019ca3310d7ad7eb2a0168dcdfe558643f6",
"index": 1615,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nmatplotlib.use('Agg')\n<mask token>\nf.close()\n<mask token>\ntrain_model.load_weights(weights_file)\n<mask token>\nif data_format == 'channels_first':\n X_test = np.transpose(X_test, (0, 1, 3, 4, 2))\n X_hat = np.transpose(X_hat, (0, 1, 3, 4, 2))\nprint('X_hat.shape:', X_hat.shape)\nprint('X_test.shape:', X_test.shape)\n<mask token>\nif not os.path.exists(RESULTS_SAVE_DIR):\n os.mkdir(RESULTS_SAVE_DIR)\n<mask token>\nf.write('Model MSE: %f\\n' % mse_model)\nf.write('Previous Frame MSE: %f' % mse_prev)\nf.close()\n<mask token>\nplt.figure(figsize=(nt, 2 * aspect_ratio))\n<mask token>\ngs.update(wspace=0.0, hspace=0.0)\n<mask token>\nif not os.path.exists(plot_save_dir):\n os.mkdir(plot_save_dir)\n<mask token>\nfor i in plot_idx:\n for t in range(nt):\n plt.subplot(gs[t])\n plt.imshow(X_test[i, t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off',\n left='off', right='off', labelbottom='off', labelleft='off')\n if t == 0:\n plt.ylabel('Actual', fontsize=10)\n plt.subplot(gs[t + nt])\n plt.imshow(X_hat[i, t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off',\n left='off', right='off', labelbottom='off', labelleft='off')\n if t == 0:\n plt.ylabel('Predicted', fontsize=10)\n plt.savefig(plot_save_dir + 'plot_' + str(i) + '.png')\n plt.clf()\n",
"step-3": "<mask token>\nmatplotlib.use('Agg')\n<mask token>\nn_plot = 40\nbatch_size = 10\nnt = 5\nweights_file = os.path.join(WEIGHTS_DIR,\n 'prednet_facebook_segmpred_weights.hdf5')\njson_file = os.path.join(WEIGHTS_DIR, 'prednet_facebook_segmpred_model.json')\ntest_file = os.path.join(DATA_DIR, 'facebook_segmpred_X_test.hkl')\ntest_sources = os.path.join(DATA_DIR, 'facebook_segmpred_sources_test.hkl')\nf = open(json_file, 'r')\njson_string = f.read()\nf.close()\ntrain_model = model_from_json(json_string, custom_objects={'PredNet': PredNet})\ntrain_model.load_weights(weights_file)\nlayer_config = train_model.layers[1].get_config()\nlayer_config['output_mode'] = 'prediction'\ndata_format = layer_config['data_format'\n ] if 'data_format' in layer_config else layer_config['dim_ordering']\ntest_prednet = PredNet(weights=train_model.layers[1].get_weights(), **\n layer_config)\ntest_generator = SequenceGenerator(test_file, test_sources, nt,\n sequence_start_mode='unique', data_format=data_format)\nX_test = test_generator.create_all()\ninput_shape = X_test.shape[1:]\ninputs = Input(shape=tuple(input_shape))\npredictions = test_prednet(inputs)\ntest_model = Model(inputs=inputs, outputs=predictions)\nX_hat = test_model.predict(X_test, batch_size)\nif data_format == 'channels_first':\n X_test = np.transpose(X_test, (0, 1, 3, 4, 2))\n X_hat = np.transpose(X_hat, (0, 1, 3, 4, 2))\nprint('X_hat.shape:', X_hat.shape)\nprint('X_test.shape:', X_test.shape)\nmse_model = np.mean((X_test[:, 1:] - X_hat[:, 1:]) ** 2)\nmse_prev = np.mean((X_test[:, :-1] - X_test[:, 1:]) ** 2)\nif not os.path.exists(RESULTS_SAVE_DIR):\n os.mkdir(RESULTS_SAVE_DIR)\nf = open(RESULTS_SAVE_DIR + 'prediction_scores.txt', 'w')\nf.write('Model MSE: %f\\n' % mse_model)\nf.write('Previous Frame MSE: %f' % mse_prev)\nf.close()\naspect_ratio = float(X_hat.shape[2]) / X_hat.shape[3]\nplt.figure(figsize=(nt, 2 * aspect_ratio))\ngs = gridspec.GridSpec(2, nt)\ngs.update(wspace=0.0, hspace=0.0)\nplot_save_dir = os.path.join(RESULTS_SAVE_DIR, 'prediction_plots/')\nif not os.path.exists(plot_save_dir):\n os.mkdir(plot_save_dir)\nplot_idx = np.random.permutation(X_test.shape[0])[:n_plot]\nfor i in plot_idx:\n for t in range(nt):\n plt.subplot(gs[t])\n plt.imshow(X_test[i, t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off',\n left='off', right='off', labelbottom='off', labelleft='off')\n if t == 0:\n plt.ylabel('Actual', fontsize=10)\n plt.subplot(gs[t + nt])\n plt.imshow(X_hat[i, t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off',\n left='off', right='off', labelbottom='off', labelleft='off')\n if t == 0:\n plt.ylabel('Predicted', fontsize=10)\n plt.savefig(plot_save_dir + 'plot_' + str(i) + '.png')\n plt.clf()\n",
"step-4": "<mask token>\nimport os\nimport numpy as np\nfrom six.moves import cPickle\nimport matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.pyplot as plt\nimport matplotlib.gridspec as gridspec\nfrom keras import backend as K\nfrom keras.models import Model, model_from_json\nfrom keras.layers import Input, Dense, Flatten\nfrom prednet import PredNet\nfrom data_utils import SequenceGenerator\nfrom kitti_settings import *\nn_plot = 40\nbatch_size = 10\nnt = 5\nweights_file = os.path.join(WEIGHTS_DIR,\n 'prednet_facebook_segmpred_weights.hdf5')\njson_file = os.path.join(WEIGHTS_DIR, 'prednet_facebook_segmpred_model.json')\ntest_file = os.path.join(DATA_DIR, 'facebook_segmpred_X_test.hkl')\ntest_sources = os.path.join(DATA_DIR, 'facebook_segmpred_sources_test.hkl')\nf = open(json_file, 'r')\njson_string = f.read()\nf.close()\ntrain_model = model_from_json(json_string, custom_objects={'PredNet': PredNet})\ntrain_model.load_weights(weights_file)\nlayer_config = train_model.layers[1].get_config()\nlayer_config['output_mode'] = 'prediction'\ndata_format = layer_config['data_format'\n ] if 'data_format' in layer_config else layer_config['dim_ordering']\ntest_prednet = PredNet(weights=train_model.layers[1].get_weights(), **\n layer_config)\ntest_generator = SequenceGenerator(test_file, test_sources, nt,\n sequence_start_mode='unique', data_format=data_format)\nX_test = test_generator.create_all()\ninput_shape = X_test.shape[1:]\ninputs = Input(shape=tuple(input_shape))\npredictions = test_prednet(inputs)\ntest_model = Model(inputs=inputs, outputs=predictions)\nX_hat = test_model.predict(X_test, batch_size)\nif data_format == 'channels_first':\n X_test = np.transpose(X_test, (0, 1, 3, 4, 2))\n X_hat = np.transpose(X_hat, (0, 1, 3, 4, 2))\nprint('X_hat.shape:', X_hat.shape)\nprint('X_test.shape:', X_test.shape)\nmse_model = np.mean((X_test[:, 1:] - X_hat[:, 1:]) ** 2)\nmse_prev = np.mean((X_test[:, :-1] - X_test[:, 1:]) ** 2)\nif not os.path.exists(RESULTS_SAVE_DIR):\n os.mkdir(RESULTS_SAVE_DIR)\nf = open(RESULTS_SAVE_DIR + 'prediction_scores.txt', 'w')\nf.write('Model MSE: %f\\n' % mse_model)\nf.write('Previous Frame MSE: %f' % mse_prev)\nf.close()\naspect_ratio = float(X_hat.shape[2]) / X_hat.shape[3]\nplt.figure(figsize=(nt, 2 * aspect_ratio))\ngs = gridspec.GridSpec(2, nt)\ngs.update(wspace=0.0, hspace=0.0)\nplot_save_dir = os.path.join(RESULTS_SAVE_DIR, 'prediction_plots/')\nif not os.path.exists(plot_save_dir):\n os.mkdir(plot_save_dir)\nplot_idx = np.random.permutation(X_test.shape[0])[:n_plot]\nfor i in plot_idx:\n for t in range(nt):\n plt.subplot(gs[t])\n plt.imshow(X_test[i, t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off',\n left='off', right='off', labelbottom='off', labelleft='off')\n if t == 0:\n plt.ylabel('Actual', fontsize=10)\n plt.subplot(gs[t + nt])\n plt.imshow(X_hat[i, t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off',\n left='off', right='off', labelbottom='off', labelleft='off')\n if t == 0:\n plt.ylabel('Predicted', fontsize=10)\n plt.savefig(plot_save_dir + 'plot_' + str(i) + '.png')\n plt.clf()\n",
"step-5": "# -*- coding: UTF-8 -*-\n'''\nEvaluate trained PredNet on KITTI sequences.\nCalculates mean-squared error and plots predictions.\n'''\n\nimport os\nimport numpy as np\nfrom six.moves import cPickle\nimport matplotlib\nmatplotlib.use('Agg')\nimport matplotlib.pyplot as plt\nimport matplotlib.gridspec as gridspec\n\nfrom keras import backend as K\nfrom keras.models import Model, model_from_json\nfrom keras.layers import Input, Dense, Flatten\n\nfrom prednet import PredNet\nfrom data_utils import SequenceGenerator\nfrom kitti_settings import *\n\n\nn_plot = 40\nbatch_size = 10\nnt = 5\n\n# 相关的weights,json的文件\nweights_file = os.path.join(WEIGHTS_DIR, 'prednet_facebook_segmpred_weights.hdf5')\njson_file = os.path.join(WEIGHTS_DIR, 'prednet_facebook_segmpred_model.json')\n# weights_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_weights.hdf5')\n# json_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_model.json')\n# weights_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_weights-extrapfinetuned.hdf5') # where weights will be saved\n# json_file = os.path.join(WEIGHTS_DIR, 'prednet_kitti_model-extrapfinetuned.json')\ntest_file = os.path.join(DATA_DIR, 'facebook_segmpred_X_test.hkl')\ntest_sources = os.path.join(DATA_DIR, 'facebook_segmpred_sources_test.hkl')\n\n# Load trained model\n# 加载模型的json文件\nf = open(json_file, 'r')\n# 读取的json文件\njson_string = f.read()\nf.close()\n# 从训练后存储的模型中序列化出模型,同时包含PredNet模型定制的参数,之后加载权重模型\n# 存储模型将相应的json文件和weights文件存储即可,加载模型从对应的json文件和weights文件反序列化即可\ntrain_model = model_from_json(json_string, custom_objects = {'PredNet': PredNet})\ntrain_model.load_weights(weights_file)\n\n# Create testing model (to output predictions)\n# 创建测试模型\n# 训练模型包含了InputLayer,PredNet等等,这里选取第二层即为PredNet\n# print(train_model.layers)\nlayer_config = train_model.layers[1].get_config()\n# 评估版本中将output_mode输出模型从误差error修改为predication预测\nlayer_config['output_mode'] = 'prediction'\ndata_format = layer_config['data_format'] if 'data_format' in layer_config else layer_config['dim_ordering']\n# 将网络中部分修改参数加载重构为PredNet网络,keras中具有get_config和get_weights等方法\ntest_prednet = PredNet(weights=train_model.layers[1].get_weights(), **layer_config)\n# 输入层的shape为不包括batch的batch_input_shape从第一列之后的所有\n# input_shape = list(train_model.layers[0].batch_input_shape[1:])\n# 输入数据为nt,总共有10帧,来预测将来的一帧\n# input_shape[0] = nt\n# print('input_shape:', input_shape)\ntest_generator = SequenceGenerator(test_file, test_sources, nt, sequence_start_mode='unique', data_format=data_format)\nX_test = test_generator.create_all()\ninput_shape = X_test.shape[1:]\n# print('input_shape:', input_shape)\n# 构建输入层\ninputs = Input(shape=tuple(input_shape))\n# 将输入层输入到prednet网络中测试输出\npredictions = test_prednet(inputs)\n# 构建输入和输出模型\ntest_model = Model(inputs=inputs, outputs=predictions)\n\n# 测试评估数据生成器\n# test_generator = SequenceGenerator(test_file, test_sources, nt, sequence_start_mode='unique', data_format=data_format)\n# X_test = test_generator.create_all()\n# 预测模型时参照batch_size,一个批次的进行load然后predict\nX_hat = test_model.predict(X_test, batch_size)\n# 这里模型的默认通道均在最后一位\nif data_format == 'channels_first':\n X_test = np.transpose(X_test, (0, 1, 3, 4, 2))\n X_hat = np.transpose(X_hat, (0, 1, 3, 4, 2))\nprint('X_hat.shape:', X_hat.shape)\nprint('X_test.shape:', X_test.shape)\n# Compare MSE of PredNet predictions vs. using last frame. Write results to prediction_scores.txt\n# 比较测试结果\nmse_model = np.mean( (X_test[:, 1:] - X_hat[:, 1:])**2 ) # look at all timesteps except the first\nmse_prev = np.mean( (X_test[:, :-1] - X_test[:, 1:])**2 )\nif not os.path.exists(RESULTS_SAVE_DIR): os.mkdir(RESULTS_SAVE_DIR)\nf = open(RESULTS_SAVE_DIR + 'prediction_scores.txt', 'w')\nf.write(\"Model MSE: %f\\n\" % mse_model)\nf.write(\"Previous Frame MSE: %f\" % mse_prev)\nf.close()\n\n# Plot some predictions\naspect_ratio = float(X_hat.shape[2]) / X_hat.shape[3]\nplt.figure(figsize = (nt, 2*aspect_ratio))\ngs = gridspec.GridSpec(2, nt)\ngs.update(wspace=0., hspace=0.)\nplot_save_dir = os.path.join(RESULTS_SAVE_DIR, 'prediction_plots/')\nif not os.path.exists(plot_save_dir): os.mkdir(plot_save_dir)\nplot_idx = np.random.permutation(X_test.shape[0])[:n_plot]\nfor i in plot_idx:\n for t in range(nt):\n plt.subplot(gs[t])\n plt.imshow(X_test[i,t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off', labelleft='off')\n if t==0: plt.ylabel('Actual', fontsize=10)\n\n plt.subplot(gs[t + nt])\n plt.imshow(X_hat[i,t], interpolation='none')\n plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off', labelleft='off')\n if t==0: plt.ylabel('Predicted', fontsize=10)\n\n plt.savefig(plot_save_dir + 'plot_' + str(i) + '.png')\n plt.clf()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
from cost_functions import trajectory_cost_fn
import time
class Controller():
def __init__(self):
pass
# Get the appropriate action(s) for this state(s)
def get_action(self, state):
pass
class RandomController(Controller):
def __init__(self, env):
""" YOUR CODE HERE """
pass
def get_action(self, state):
""" YOUR CODE HERE """
""" Your code should randomly sample an action uniformly from the action space """
pass
class MPCcontroller(Controller):
""" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 """
def __init__(self,
env,
dyn_model,
horizon=5,
cost_fn=None,
num_simulated_paths=10,
):
self.env = env
self.dyn_model = dyn_model
self.horizon = horizon
self.cost_fn = cost_fn
self.num_simulated_paths = num_simulated_paths
def get_action(self, state):
""" YOUR CODE HERE """
""" Note: be careful to batch your simulations through the model for speed """
|
normal
|
{
"blob_id": "7112eb52aea9be6f8e682b4dacc6b615365c8cea",
"index": 7510,
"step-1": "<mask token>\n\n\nclass Controller:\n <mask token>\n <mask token>\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-2": "<mask token>\n\n\nclass Controller:\n <mask token>\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-3": "<mask token>\n\n\nclass Controller:\n\n def __init__(self):\n pass\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-4": "import numpy as np\nfrom cost_functions import trajectory_cost_fn\nimport time\n\n\nclass Controller:\n\n def __init__(self):\n pass\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n",
"step-5": "import numpy as np\r\nfrom cost_functions import trajectory_cost_fn\r\nimport time\r\n\r\nclass Controller():\r\n\tdef __init__(self):\r\n\t\tpass\r\n\r\n\t# Get the appropriate action(s) for this state(s)\r\n\tdef get_action(self, state):\r\n\t\tpass\r\n\r\n\r\nclass RandomController(Controller):\r\n\tdef __init__(self, env):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\tpass\r\n\r\n\tdef get_action(self, state):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\t\"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\r\n\t\tpass\r\n\r\n\r\nclass MPCcontroller(Controller):\r\n\t\"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\r\n\tdef __init__(self, \r\n\t\t\t\t env, \r\n\t\t\t\t dyn_model, \r\n\t\t\t\t horizon=5, \r\n\t\t\t\t cost_fn=None, \r\n\t\t\t\t num_simulated_paths=10,\r\n\t\t\t\t ):\r\n\t\tself.env = env\r\n\t\tself.dyn_model = dyn_model\r\n\t\tself.horizon = horizon\r\n\t\tself.cost_fn = cost_fn\r\n\t\tself.num_simulated_paths = num_simulated_paths\r\n\r\n\tdef get_action(self, state):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\t\"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\r\n\r\n",
"step-ids": [
8,
9,
10,
11,
12
]
}
|
[
8,
9,
10,
11,
12
] |
from kafka import KafkaConsumer
import csv
users = set()
# returns string of title given a ConsumerRecord
def parse_cr(cr):
binary = cr.value
string = binary.decode('utf-8')
# [time, user id, GET request]
return string.split(',')
# returns string of title given a ConsumerRecord in name+name+year format regardless of rate or data
def get_title(cr):
get = parse_cr(cr)[2]
head = get[5:9]
if head == 'data':
trunc = get[12:]
return trunc.split('/')[0]
else:
trunc = get[10:]
return trunc.split('=')[0]
dates = set()
def gather_popularity():
first = None
popularity = dict()
consumer = KafkaConsumer(
'movielog',
bootstrap_servers=['localhost:9092'],
auto_offset_reset='earliest',
group_id='jcerwin-stream',
enable_auto_commit=True,
auto_commit_interval_ms=1000
)
duration = 0
max_duration = 500000000
for message in consumer:
if duration > max_duration: break
else: duration += 1
if duration % (max_duration / 100) == 0:
print(duration / (max_duration / 100), "% complete")
if first is None:
first = message
else:
if message == first:
print("repeat")
break
parsed = parse_cr(message)
r_block = parsed[2]
head = r_block[5:9]
# look for watches only not reviews
if head == 'data':
trunc = r_block[12:]
title = trunc.split('/')[0]
minutes = r_block.split('/')[4][:-4]
else:
continue
if int(minutes) == 0:
date = (parsed[0])[5:10]
if title in popularity:
count = popularity[title]
popularity[title] = count + 1
else:
popularity[title] = 1
dates.add(date)
return popularity
def gather_titles():
consumer = KafkaConsumer(
'movielog',
bootstrap_servers=['localhost:9092'],
auto_offset_reset='earliest',
group_id='jcerwin-new',
enable_auto_commit=True,
auto_commit_interval_ms=1000
)
f = open("movie_titles.txt", "r")
fl = f.readlines()
f.close()
s = set(fl)
i = len(s)
f = open("movie_titles.txt", "a")
for message in consumer:
if i > 27000:
break
title = get_title(message) + '\n'
if title in s:
continue
else:
s.add(title)
f.write(title)
i = i + 1
f.close()
#with open('views.csv', 'w') as csv_file:
# writer = csv.writer(csv_file)
# for key, value in gather_popularity().items():
# writer.writerow([key, value])
results = gather_popularity()
num_days = len(dates)
with open('views3.csv', 'w') as csv_file:
writer = csv.writer(csv_file)
for key, value in results.items():
writer.writerow([key, value / num_days])
|
normal
|
{
"blob_id": "374fbb986524f28cc86f6e579f504eeb8ddc9701",
"index": 1122,
"step-1": "<mask token>\n\n\ndef parse_cr(cr):\n binary = cr.value\n string = binary.decode('utf-8')\n return string.split(',')\n\n\ndef get_title(cr):\n get = parse_cr(cr)[2]\n head = get[5:9]\n if head == 'data':\n trunc = get[12:]\n return trunc.split('/')[0]\n else:\n trunc = get[10:]\n return trunc.split('=')[0]\n\n\n<mask token>\n\n\ndef gather_popularity():\n first = None\n popularity = dict()\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-stream', enable_auto_commit=True, auto_commit_interval_ms=1000\n )\n duration = 0\n max_duration = 500000000\n for message in consumer:\n if duration > max_duration:\n break\n else:\n duration += 1\n if duration % (max_duration / 100) == 0:\n print(duration / (max_duration / 100), '% complete')\n if first is None:\n first = message\n elif message == first:\n print('repeat')\n break\n parsed = parse_cr(message)\n r_block = parsed[2]\n head = r_block[5:9]\n if head == 'data':\n trunc = r_block[12:]\n title = trunc.split('/')[0]\n minutes = r_block.split('/')[4][:-4]\n else:\n continue\n if int(minutes) == 0:\n date = parsed[0][5:10]\n if title in popularity:\n count = popularity[title]\n popularity[title] = count + 1\n else:\n popularity[title] = 1\n dates.add(date)\n return popularity\n\n\ndef gather_titles():\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-new', enable_auto_commit=True, auto_commit_interval_ms=1000)\n f = open('movie_titles.txt', 'r')\n fl = f.readlines()\n f.close()\n s = set(fl)\n i = len(s)\n f = open('movie_titles.txt', 'a')\n for message in consumer:\n if i > 27000:\n break\n title = get_title(message) + '\\n'\n if title in s:\n continue\n else:\n s.add(title)\n f.write(title)\n i = i + 1\n f.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_cr(cr):\n binary = cr.value\n string = binary.decode('utf-8')\n return string.split(',')\n\n\ndef get_title(cr):\n get = parse_cr(cr)[2]\n head = get[5:9]\n if head == 'data':\n trunc = get[12:]\n return trunc.split('/')[0]\n else:\n trunc = get[10:]\n return trunc.split('=')[0]\n\n\n<mask token>\n\n\ndef gather_popularity():\n first = None\n popularity = dict()\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-stream', enable_auto_commit=True, auto_commit_interval_ms=1000\n )\n duration = 0\n max_duration = 500000000\n for message in consumer:\n if duration > max_duration:\n break\n else:\n duration += 1\n if duration % (max_duration / 100) == 0:\n print(duration / (max_duration / 100), '% complete')\n if first is None:\n first = message\n elif message == first:\n print('repeat')\n break\n parsed = parse_cr(message)\n r_block = parsed[2]\n head = r_block[5:9]\n if head == 'data':\n trunc = r_block[12:]\n title = trunc.split('/')[0]\n minutes = r_block.split('/')[4][:-4]\n else:\n continue\n if int(minutes) == 0:\n date = parsed[0][5:10]\n if title in popularity:\n count = popularity[title]\n popularity[title] = count + 1\n else:\n popularity[title] = 1\n dates.add(date)\n return popularity\n\n\ndef gather_titles():\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-new', enable_auto_commit=True, auto_commit_interval_ms=1000)\n f = open('movie_titles.txt', 'r')\n fl = f.readlines()\n f.close()\n s = set(fl)\n i = len(s)\n f = open('movie_titles.txt', 'a')\n for message in consumer:\n if i > 27000:\n break\n title = get_title(message) + '\\n'\n if title in s:\n continue\n else:\n s.add(title)\n f.write(title)\n i = i + 1\n f.close()\n\n\n<mask token>\nwith open('views3.csv', 'w') as csv_file:\n writer = csv.writer(csv_file)\n for key, value in results.items():\n writer.writerow([key, value / num_days])\n",
"step-3": "<mask token>\nusers = set()\n\n\ndef parse_cr(cr):\n binary = cr.value\n string = binary.decode('utf-8')\n return string.split(',')\n\n\ndef get_title(cr):\n get = parse_cr(cr)[2]\n head = get[5:9]\n if head == 'data':\n trunc = get[12:]\n return trunc.split('/')[0]\n else:\n trunc = get[10:]\n return trunc.split('=')[0]\n\n\ndates = set()\n\n\ndef gather_popularity():\n first = None\n popularity = dict()\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-stream', enable_auto_commit=True, auto_commit_interval_ms=1000\n )\n duration = 0\n max_duration = 500000000\n for message in consumer:\n if duration > max_duration:\n break\n else:\n duration += 1\n if duration % (max_duration / 100) == 0:\n print(duration / (max_duration / 100), '% complete')\n if first is None:\n first = message\n elif message == first:\n print('repeat')\n break\n parsed = parse_cr(message)\n r_block = parsed[2]\n head = r_block[5:9]\n if head == 'data':\n trunc = r_block[12:]\n title = trunc.split('/')[0]\n minutes = r_block.split('/')[4][:-4]\n else:\n continue\n if int(minutes) == 0:\n date = parsed[0][5:10]\n if title in popularity:\n count = popularity[title]\n popularity[title] = count + 1\n else:\n popularity[title] = 1\n dates.add(date)\n return popularity\n\n\ndef gather_titles():\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-new', enable_auto_commit=True, auto_commit_interval_ms=1000)\n f = open('movie_titles.txt', 'r')\n fl = f.readlines()\n f.close()\n s = set(fl)\n i = len(s)\n f = open('movie_titles.txt', 'a')\n for message in consumer:\n if i > 27000:\n break\n title = get_title(message) + '\\n'\n if title in s:\n continue\n else:\n s.add(title)\n f.write(title)\n i = i + 1\n f.close()\n\n\nresults = gather_popularity()\nnum_days = len(dates)\nwith open('views3.csv', 'w') as csv_file:\n writer = csv.writer(csv_file)\n for key, value in results.items():\n writer.writerow([key, value / num_days])\n",
"step-4": "from kafka import KafkaConsumer\nimport csv\nusers = set()\n\n\ndef parse_cr(cr):\n binary = cr.value\n string = binary.decode('utf-8')\n return string.split(',')\n\n\ndef get_title(cr):\n get = parse_cr(cr)[2]\n head = get[5:9]\n if head == 'data':\n trunc = get[12:]\n return trunc.split('/')[0]\n else:\n trunc = get[10:]\n return trunc.split('=')[0]\n\n\ndates = set()\n\n\ndef gather_popularity():\n first = None\n popularity = dict()\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-stream', enable_auto_commit=True, auto_commit_interval_ms=1000\n )\n duration = 0\n max_duration = 500000000\n for message in consumer:\n if duration > max_duration:\n break\n else:\n duration += 1\n if duration % (max_duration / 100) == 0:\n print(duration / (max_duration / 100), '% complete')\n if first is None:\n first = message\n elif message == first:\n print('repeat')\n break\n parsed = parse_cr(message)\n r_block = parsed[2]\n head = r_block[5:9]\n if head == 'data':\n trunc = r_block[12:]\n title = trunc.split('/')[0]\n minutes = r_block.split('/')[4][:-4]\n else:\n continue\n if int(minutes) == 0:\n date = parsed[0][5:10]\n if title in popularity:\n count = popularity[title]\n popularity[title] = count + 1\n else:\n popularity[title] = 1\n dates.add(date)\n return popularity\n\n\ndef gather_titles():\n consumer = KafkaConsumer('movielog', bootstrap_servers=[\n 'localhost:9092'], auto_offset_reset='earliest', group_id=\n 'jcerwin-new', enable_auto_commit=True, auto_commit_interval_ms=1000)\n f = open('movie_titles.txt', 'r')\n fl = f.readlines()\n f.close()\n s = set(fl)\n i = len(s)\n f = open('movie_titles.txt', 'a')\n for message in consumer:\n if i > 27000:\n break\n title = get_title(message) + '\\n'\n if title in s:\n continue\n else:\n s.add(title)\n f.write(title)\n i = i + 1\n f.close()\n\n\nresults = gather_popularity()\nnum_days = len(dates)\nwith open('views3.csv', 'w') as csv_file:\n writer = csv.writer(csv_file)\n for key, value in results.items():\n writer.writerow([key, value / num_days])\n",
"step-5": "from kafka import KafkaConsumer\nimport csv\n\nusers = set()\n\n# returns string of title given a ConsumerRecord\ndef parse_cr(cr):\n binary = cr.value\n string = binary.decode('utf-8')\n # [time, user id, GET request]\n return string.split(',')\n\n\n# returns string of title given a ConsumerRecord in name+name+year format regardless of rate or data\ndef get_title(cr):\n get = parse_cr(cr)[2]\n head = get[5:9]\n if head == 'data':\n trunc = get[12:]\n return trunc.split('/')[0]\n else:\n trunc = get[10:]\n return trunc.split('=')[0]\n\ndates = set()\ndef gather_popularity():\n first = None\n popularity = dict()\n\n\n consumer = KafkaConsumer(\n 'movielog',\n bootstrap_servers=['localhost:9092'],\n auto_offset_reset='earliest',\n group_id='jcerwin-stream',\n enable_auto_commit=True,\n auto_commit_interval_ms=1000\n )\n duration = 0\n max_duration = 500000000\n for message in consumer:\n if duration > max_duration: break\n else: duration += 1\n\n if duration % (max_duration / 100) == 0:\n print(duration / (max_duration / 100), \"% complete\")\n\n if first is None:\n first = message\n else:\n if message == first:\n print(\"repeat\")\n break\n\n parsed = parse_cr(message)\n r_block = parsed[2]\n head = r_block[5:9]\n # look for watches only not reviews\n if head == 'data':\n trunc = r_block[12:]\n title = trunc.split('/')[0]\n\n minutes = r_block.split('/')[4][:-4]\n else:\n continue\n\n if int(minutes) == 0:\n date = (parsed[0])[5:10]\n if title in popularity:\n count = popularity[title]\n popularity[title] = count + 1\n\n else:\n popularity[title] = 1\n\n dates.add(date)\n\n\n return popularity\n\ndef gather_titles():\n consumer = KafkaConsumer(\n 'movielog',\n bootstrap_servers=['localhost:9092'],\n auto_offset_reset='earliest',\n group_id='jcerwin-new',\n enable_auto_commit=True,\n auto_commit_interval_ms=1000\n )\n\n f = open(\"movie_titles.txt\", \"r\")\n fl = f.readlines()\n f.close()\n s = set(fl)\n i = len(s)\n\n f = open(\"movie_titles.txt\", \"a\")\n for message in consumer:\n if i > 27000:\n break\n title = get_title(message) + '\\n'\n if title in s:\n continue\n else:\n s.add(title)\n f.write(title)\n i = i + 1\n\n f.close()\n\n#with open('views.csv', 'w') as csv_file:\n# writer = csv.writer(csv_file)\n# for key, value in gather_popularity().items():\n# writer.writerow([key, value])\n\n\nresults = gather_popularity()\nnum_days = len(dates)\n\nwith open('views3.csv', 'w') as csv_file:\n writer = csv.writer(csv_file)\n for key, value in results.items():\n writer.writerow([key, value / num_days])\n\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import psycopg2
from .configuration import ConfigurationException
DB_CONNECT_STRING = "host='{host}' dbname='{dbname}' user='{user}' password='{passwd}'"
class DBItemCompany:
def __init__(self, _id, tweeter, category, categoryUrl, provenScore, ranking, location, url, categoryId):
self.id = _id
self.twitterAccount = tweeter
self.category = category
self.categoryUrl = categoryUrl
self.provenScore = provenScore
self.ranking = ranking
self.location = location
self.url = url
self.categoryId = categoryId
@property
def invalidScore(self):
return self.provenScore is None or self.provenScore < 1
@property
def twitter(self):
return '@' + self.twitterAccount
class DBException(Exception):
"""
Represents a generic exception thrown by the Database Manager
"""
pass
class DBManager:
def __init__(self, cfg):
self.cfg = cfg
self.__companies = {}
self.__loggedIn = False
self.connection = None
self.cursor = None
def __del__(self):
try:
self.connection.close()
except psycopg2.Error:
pass
def __logInDb(self):
try:
dbSettings = self.cfg.databaseSettings
self.connection = psycopg2.connect(DB_CONNECT_STRING.format(
host=dbSettings[0], dbname=dbSettings[1],
user=dbSettings[2], passwd=dbSettings[3]
))
self.cursor = self.connection.cursor()
self.__loggedIn = True
return True
except (psycopg2.OperationalError, ConfigurationException):
return False
def __getDomainName(self, schema):
try:
self.cursor.execute("SELECT domain_url FROM customers_customer WHERE schema_name='{schemaname}'".format(schemaname=schema))
return 'http://' + self.cursor.fetchone()[0]
except psycopg2.DatabaseError:
raise DBException('Failed to extract domain name from database')
def __buildCategoryUrl(self, catId, schemaName):
return '{domain}/vendors/?find=category-{categoryId}'.format(domain=self.__getDomainName(schemaName), categoryId=catId)
def __buildProfileUrl(self, catSlug, profSlug, schemaName):
return '{domain}/vendors/{categorySlug}/{profileSlug}'.format(domain=self.__getDomainName(schemaName),
categorySlug=catSlug,
profileSlug=profSlug)
def __buildProfileUrlWOCategory(self, profSlug, schemaName):
return '{domain}/vendors/{profileSlug}'.format(domain=self.__getDomainName(schemaName), profileSlug=profSlug)
def __getCompaniesData(self, schema):
"""
Load Companies list from database
"""
try:
self.cursor.execute("""SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE
twitter <> ''""".format(schema=schema))
data = self.cursor.fetchall()
companies = []
for entry in data:
self.cursor.execute('SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'.format(schema=schema, vendor=entry[0]))
cities = self.cursor.fetchall()
if cities is None:
continue
city = ''
for cityId in cities:
self.cursor.execute('SELECT city FROM {schema}.locations_location WHERE id = {city}'.format(schema=schema, city=cityId[0]))
cityName = self.cursor.fetchone()
if cityName is not None:
city += cityName[0]
self.cursor.execute('SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND "primary" is true'.format(schema=schema, vendor=entry[0]))
customKind = self.cursor.fetchone()
if customKind is None:
catId = rank = None
else:
catId, rank = customKind
if catId is not None:
self.cursor.execute('SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'.format(schema=schema, cat=catId))
catData = self.cursor.fetchone()
else:
catData = None
companies.append(DBItemCompany(
_id = entry[0],
tweeter = entry[1],
category = catData[0] if catData is not None else None,
categoryUrl = self.__buildCategoryUrl(catId, schema) if catId is not None else None,
provenScore = entry[2],
ranking = rank,
location = city,
url = self.__buildProfileUrl(catData[1], entry[3], schema) if catData is not None else self.__buildProfileUrlWOCategory(entry[3], schema),
categoryId = catId
))
self.__companies[schema] = companies
except psycopg2.DatabaseError as err:
raise DBException(err.args[0])
def domainUrl(self, schema):
return self.__getDomainName(schema)
def refreshData(self, schemas):
if not self.__loggedIn:
if not self.__logInDb():
return False
for schema in schemas:
self.__getCompaniesData(schema)
return True
@property
def companies(self):
return self.__companies
@property
def isConnected(self):
return self.__loggedIn
|
normal
|
{
"blob_id": "31b87a3ceca1f48665ecc9754d5f87bb9b7bbf13",
"index": 7579,
"step-1": "<mask token>\n\n\nclass DBException(Exception):\n \"\"\"\n Represents a generic exception thrown by the Database Manager\n \"\"\"\n pass\n\n\nclass DBManager:\n\n def __init__(self, cfg):\n self.cfg = cfg\n self.__companies = {}\n self.__loggedIn = False\n self.connection = None\n self.cursor = None\n\n def __del__(self):\n try:\n self.connection.close()\n except psycopg2.Error:\n pass\n\n def __logInDb(self):\n try:\n dbSettings = self.cfg.databaseSettings\n self.connection = psycopg2.connect(DB_CONNECT_STRING.format(\n host=dbSettings[0], dbname=dbSettings[1], user=dbSettings[2\n ], passwd=dbSettings[3]))\n self.cursor = self.connection.cursor()\n self.__loggedIn = True\n return True\n except (psycopg2.OperationalError, ConfigurationException):\n return False\n\n def __getDomainName(self, schema):\n try:\n self.cursor.execute(\n \"SELECT domain_url FROM customers_customer WHERE schema_name='{schemaname}'\"\n .format(schemaname=schema))\n return 'http://' + self.cursor.fetchone()[0]\n except psycopg2.DatabaseError:\n raise DBException('Failed to extract domain name from database')\n\n def __buildCategoryUrl(self, catId, schemaName):\n return '{domain}/vendors/?find=category-{categoryId}'.format(domain\n =self.__getDomainName(schemaName), categoryId=catId)\n\n def __buildProfileUrl(self, catSlug, profSlug, schemaName):\n return '{domain}/vendors/{categorySlug}/{profileSlug}'.format(domain\n =self.__getDomainName(schemaName), categorySlug=catSlug,\n profileSlug=profSlug)\n\n def __buildProfileUrlWOCategory(self, profSlug, schemaName):\n return '{domain}/vendors/{profileSlug}'.format(domain=self.\n __getDomainName(schemaName), profileSlug=profSlug)\n\n def __getCompaniesData(self, schema):\n \"\"\"\n Load Companies list from database\n \"\"\"\n try:\n self.cursor.execute(\n \"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\"\n .format(schema=schema))\n data = self.cursor.fetchall()\n companies = []\n for entry in data:\n self.cursor.execute(\n 'SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'\n .format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n if cities is None:\n continue\n city = ''\n for cityId in cities:\n self.cursor.execute(\n 'SELECT city FROM {schema}.locations_location WHERE id = {city}'\n .format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n if cityName is not None:\n city += cityName[0]\n self.cursor.execute(\n 'SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'\n .format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n if catId is not None:\n self.cursor.execute(\n 'SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'\n .format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n companies.append(DBItemCompany(_id=entry[0], tweeter=entry[\n 1], category=catData[0] if catData is not None else\n None, categoryUrl=self.__buildCategoryUrl(catId, schema\n ) if catId is not None else None, provenScore=entry[2],\n ranking=rank, location=city, url=self.__buildProfileUrl\n (catData[1], entry[3], schema) if catData is not None else\n self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId=catId))\n self.__companies[schema] = companies\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])\n\n def domainUrl(self, schema):\n return self.__getDomainName(schema)\n\n def refreshData(self, schemas):\n if not self.__loggedIn:\n if not self.__logInDb():\n return False\n for schema in schemas:\n self.__getCompaniesData(schema)\n return True\n\n @property\n def companies(self):\n return self.__companies\n\n @property\n def isConnected(self):\n return self.__loggedIn\n",
"step-2": "<mask token>\n\n\nclass DBItemCompany:\n\n def __init__(self, _id, tweeter, category, categoryUrl, provenScore,\n ranking, location, url, categoryId):\n self.id = _id\n self.twitterAccount = tweeter\n self.category = category\n self.categoryUrl = categoryUrl\n self.provenScore = provenScore\n self.ranking = ranking\n self.location = location\n self.url = url\n self.categoryId = categoryId\n <mask token>\n <mask token>\n\n\nclass DBException(Exception):\n \"\"\"\n Represents a generic exception thrown by the Database Manager\n \"\"\"\n pass\n\n\nclass DBManager:\n\n def __init__(self, cfg):\n self.cfg = cfg\n self.__companies = {}\n self.__loggedIn = False\n self.connection = None\n self.cursor = None\n\n def __del__(self):\n try:\n self.connection.close()\n except psycopg2.Error:\n pass\n\n def __logInDb(self):\n try:\n dbSettings = self.cfg.databaseSettings\n self.connection = psycopg2.connect(DB_CONNECT_STRING.format(\n host=dbSettings[0], dbname=dbSettings[1], user=dbSettings[2\n ], passwd=dbSettings[3]))\n self.cursor = self.connection.cursor()\n self.__loggedIn = True\n return True\n except (psycopg2.OperationalError, ConfigurationException):\n return False\n\n def __getDomainName(self, schema):\n try:\n self.cursor.execute(\n \"SELECT domain_url FROM customers_customer WHERE schema_name='{schemaname}'\"\n .format(schemaname=schema))\n return 'http://' + self.cursor.fetchone()[0]\n except psycopg2.DatabaseError:\n raise DBException('Failed to extract domain name from database')\n\n def __buildCategoryUrl(self, catId, schemaName):\n return '{domain}/vendors/?find=category-{categoryId}'.format(domain\n =self.__getDomainName(schemaName), categoryId=catId)\n\n def __buildProfileUrl(self, catSlug, profSlug, schemaName):\n return '{domain}/vendors/{categorySlug}/{profileSlug}'.format(domain\n =self.__getDomainName(schemaName), categorySlug=catSlug,\n profileSlug=profSlug)\n\n def __buildProfileUrlWOCategory(self, profSlug, schemaName):\n return '{domain}/vendors/{profileSlug}'.format(domain=self.\n __getDomainName(schemaName), profileSlug=profSlug)\n\n def __getCompaniesData(self, schema):\n \"\"\"\n Load Companies list from database\n \"\"\"\n try:\n self.cursor.execute(\n \"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\"\n .format(schema=schema))\n data = self.cursor.fetchall()\n companies = []\n for entry in data:\n self.cursor.execute(\n 'SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'\n .format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n if cities is None:\n continue\n city = ''\n for cityId in cities:\n self.cursor.execute(\n 'SELECT city FROM {schema}.locations_location WHERE id = {city}'\n .format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n if cityName is not None:\n city += cityName[0]\n self.cursor.execute(\n 'SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'\n .format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n if catId is not None:\n self.cursor.execute(\n 'SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'\n .format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n companies.append(DBItemCompany(_id=entry[0], tweeter=entry[\n 1], category=catData[0] if catData is not None else\n None, categoryUrl=self.__buildCategoryUrl(catId, schema\n ) if catId is not None else None, provenScore=entry[2],\n ranking=rank, location=city, url=self.__buildProfileUrl\n (catData[1], entry[3], schema) if catData is not None else\n self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId=catId))\n self.__companies[schema] = companies\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])\n\n def domainUrl(self, schema):\n return self.__getDomainName(schema)\n\n def refreshData(self, schemas):\n if not self.__loggedIn:\n if not self.__logInDb():\n return False\n for schema in schemas:\n self.__getCompaniesData(schema)\n return True\n\n @property\n def companies(self):\n return self.__companies\n\n @property\n def isConnected(self):\n return self.__loggedIn\n",
"step-3": "<mask token>\n\n\nclass DBItemCompany:\n\n def __init__(self, _id, tweeter, category, categoryUrl, provenScore,\n ranking, location, url, categoryId):\n self.id = _id\n self.twitterAccount = tweeter\n self.category = category\n self.categoryUrl = categoryUrl\n self.provenScore = provenScore\n self.ranking = ranking\n self.location = location\n self.url = url\n self.categoryId = categoryId\n <mask token>\n\n @property\n def twitter(self):\n return '@' + self.twitterAccount\n\n\nclass DBException(Exception):\n \"\"\"\n Represents a generic exception thrown by the Database Manager\n \"\"\"\n pass\n\n\nclass DBManager:\n\n def __init__(self, cfg):\n self.cfg = cfg\n self.__companies = {}\n self.__loggedIn = False\n self.connection = None\n self.cursor = None\n\n def __del__(self):\n try:\n self.connection.close()\n except psycopg2.Error:\n pass\n\n def __logInDb(self):\n try:\n dbSettings = self.cfg.databaseSettings\n self.connection = psycopg2.connect(DB_CONNECT_STRING.format(\n host=dbSettings[0], dbname=dbSettings[1], user=dbSettings[2\n ], passwd=dbSettings[3]))\n self.cursor = self.connection.cursor()\n self.__loggedIn = True\n return True\n except (psycopg2.OperationalError, ConfigurationException):\n return False\n\n def __getDomainName(self, schema):\n try:\n self.cursor.execute(\n \"SELECT domain_url FROM customers_customer WHERE schema_name='{schemaname}'\"\n .format(schemaname=schema))\n return 'http://' + self.cursor.fetchone()[0]\n except psycopg2.DatabaseError:\n raise DBException('Failed to extract domain name from database')\n\n def __buildCategoryUrl(self, catId, schemaName):\n return '{domain}/vendors/?find=category-{categoryId}'.format(domain\n =self.__getDomainName(schemaName), categoryId=catId)\n\n def __buildProfileUrl(self, catSlug, profSlug, schemaName):\n return '{domain}/vendors/{categorySlug}/{profileSlug}'.format(domain\n =self.__getDomainName(schemaName), categorySlug=catSlug,\n profileSlug=profSlug)\n\n def __buildProfileUrlWOCategory(self, profSlug, schemaName):\n return '{domain}/vendors/{profileSlug}'.format(domain=self.\n __getDomainName(schemaName), profileSlug=profSlug)\n\n def __getCompaniesData(self, schema):\n \"\"\"\n Load Companies list from database\n \"\"\"\n try:\n self.cursor.execute(\n \"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\"\n .format(schema=schema))\n data = self.cursor.fetchall()\n companies = []\n for entry in data:\n self.cursor.execute(\n 'SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'\n .format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n if cities is None:\n continue\n city = ''\n for cityId in cities:\n self.cursor.execute(\n 'SELECT city FROM {schema}.locations_location WHERE id = {city}'\n .format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n if cityName is not None:\n city += cityName[0]\n self.cursor.execute(\n 'SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'\n .format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n if catId is not None:\n self.cursor.execute(\n 'SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'\n .format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n companies.append(DBItemCompany(_id=entry[0], tweeter=entry[\n 1], category=catData[0] if catData is not None else\n None, categoryUrl=self.__buildCategoryUrl(catId, schema\n ) if catId is not None else None, provenScore=entry[2],\n ranking=rank, location=city, url=self.__buildProfileUrl\n (catData[1], entry[3], schema) if catData is not None else\n self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId=catId))\n self.__companies[schema] = companies\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])\n\n def domainUrl(self, schema):\n return self.__getDomainName(schema)\n\n def refreshData(self, schemas):\n if not self.__loggedIn:\n if not self.__logInDb():\n return False\n for schema in schemas:\n self.__getCompaniesData(schema)\n return True\n\n @property\n def companies(self):\n return self.__companies\n\n @property\n def isConnected(self):\n return self.__loggedIn\n",
"step-4": "<mask token>\n\n\nclass DBItemCompany:\n\n def __init__(self, _id, tweeter, category, categoryUrl, provenScore,\n ranking, location, url, categoryId):\n self.id = _id\n self.twitterAccount = tweeter\n self.category = category\n self.categoryUrl = categoryUrl\n self.provenScore = provenScore\n self.ranking = ranking\n self.location = location\n self.url = url\n self.categoryId = categoryId\n\n @property\n def invalidScore(self):\n return self.provenScore is None or self.provenScore < 1\n\n @property\n def twitter(self):\n return '@' + self.twitterAccount\n\n\nclass DBException(Exception):\n \"\"\"\n Represents a generic exception thrown by the Database Manager\n \"\"\"\n pass\n\n\nclass DBManager:\n\n def __init__(self, cfg):\n self.cfg = cfg\n self.__companies = {}\n self.__loggedIn = False\n self.connection = None\n self.cursor = None\n\n def __del__(self):\n try:\n self.connection.close()\n except psycopg2.Error:\n pass\n\n def __logInDb(self):\n try:\n dbSettings = self.cfg.databaseSettings\n self.connection = psycopg2.connect(DB_CONNECT_STRING.format(\n host=dbSettings[0], dbname=dbSettings[1], user=dbSettings[2\n ], passwd=dbSettings[3]))\n self.cursor = self.connection.cursor()\n self.__loggedIn = True\n return True\n except (psycopg2.OperationalError, ConfigurationException):\n return False\n\n def __getDomainName(self, schema):\n try:\n self.cursor.execute(\n \"SELECT domain_url FROM customers_customer WHERE schema_name='{schemaname}'\"\n .format(schemaname=schema))\n return 'http://' + self.cursor.fetchone()[0]\n except psycopg2.DatabaseError:\n raise DBException('Failed to extract domain name from database')\n\n def __buildCategoryUrl(self, catId, schemaName):\n return '{domain}/vendors/?find=category-{categoryId}'.format(domain\n =self.__getDomainName(schemaName), categoryId=catId)\n\n def __buildProfileUrl(self, catSlug, profSlug, schemaName):\n return '{domain}/vendors/{categorySlug}/{profileSlug}'.format(domain\n =self.__getDomainName(schemaName), categorySlug=catSlug,\n profileSlug=profSlug)\n\n def __buildProfileUrlWOCategory(self, profSlug, schemaName):\n return '{domain}/vendors/{profileSlug}'.format(domain=self.\n __getDomainName(schemaName), profileSlug=profSlug)\n\n def __getCompaniesData(self, schema):\n \"\"\"\n Load Companies list from database\n \"\"\"\n try:\n self.cursor.execute(\n \"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\"\n .format(schema=schema))\n data = self.cursor.fetchall()\n companies = []\n for entry in data:\n self.cursor.execute(\n 'SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'\n .format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n if cities is None:\n continue\n city = ''\n for cityId in cities:\n self.cursor.execute(\n 'SELECT city FROM {schema}.locations_location WHERE id = {city}'\n .format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n if cityName is not None:\n city += cityName[0]\n self.cursor.execute(\n 'SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'\n .format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n if catId is not None:\n self.cursor.execute(\n 'SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'\n .format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n companies.append(DBItemCompany(_id=entry[0], tweeter=entry[\n 1], category=catData[0] if catData is not None else\n None, categoryUrl=self.__buildCategoryUrl(catId, schema\n ) if catId is not None else None, provenScore=entry[2],\n ranking=rank, location=city, url=self.__buildProfileUrl\n (catData[1], entry[3], schema) if catData is not None else\n self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId=catId))\n self.__companies[schema] = companies\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])\n\n def domainUrl(self, schema):\n return self.__getDomainName(schema)\n\n def refreshData(self, schemas):\n if not self.__loggedIn:\n if not self.__logInDb():\n return False\n for schema in schemas:\n self.__getCompaniesData(schema)\n return True\n\n @property\n def companies(self):\n return self.__companies\n\n @property\n def isConnected(self):\n return self.__loggedIn\n",
"step-5": "import psycopg2\n\nfrom .configuration import ConfigurationException\n\nDB_CONNECT_STRING = \"host='{host}' dbname='{dbname}' user='{user}' password='{passwd}'\"\n\nclass DBItemCompany:\n def __init__(self, _id, tweeter, category, categoryUrl, provenScore, ranking, location, url, categoryId):\n self.id = _id\n self.twitterAccount = tweeter\n self.category = category\n self.categoryUrl = categoryUrl\n self.provenScore = provenScore\n self.ranking = ranking\n self.location = location\n self.url = url\n self.categoryId = categoryId\n\n @property\n def invalidScore(self):\n return self.provenScore is None or self.provenScore < 1\n\n @property\n def twitter(self):\n return '@' + self.twitterAccount\n\nclass DBException(Exception):\n \"\"\"\n Represents a generic exception thrown by the Database Manager\n \"\"\"\n pass\n\nclass DBManager:\n def __init__(self, cfg):\n self.cfg = cfg\n\n self.__companies = {}\n self.__loggedIn = False\n self.connection = None\n self.cursor = None\n\n def __del__(self):\n try:\n self.connection.close()\n except psycopg2.Error:\n pass\n\n def __logInDb(self):\n try:\n dbSettings = self.cfg.databaseSettings\n\n self.connection = psycopg2.connect(DB_CONNECT_STRING.format(\n host=dbSettings[0], dbname=dbSettings[1],\n user=dbSettings[2], passwd=dbSettings[3]\n ))\n self.cursor = self.connection.cursor()\n\n self.__loggedIn = True\n\n return True\n except (psycopg2.OperationalError, ConfigurationException):\n return False\n\n def __getDomainName(self, schema):\n try:\n self.cursor.execute(\"SELECT domain_url FROM customers_customer WHERE schema_name='{schemaname}'\".format(schemaname=schema))\n return 'http://' + self.cursor.fetchone()[0]\n except psycopg2.DatabaseError:\n raise DBException('Failed to extract domain name from database')\n\n def __buildCategoryUrl(self, catId, schemaName):\n return '{domain}/vendors/?find=category-{categoryId}'.format(domain=self.__getDomainName(schemaName), categoryId=catId)\n\n def __buildProfileUrl(self, catSlug, profSlug, schemaName):\n return '{domain}/vendors/{categorySlug}/{profileSlug}'.format(domain=self.__getDomainName(schemaName),\n categorySlug=catSlug,\n profileSlug=profSlug)\n\n def __buildProfileUrlWOCategory(self, profSlug, schemaName):\n return '{domain}/vendors/{profileSlug}'.format(domain=self.__getDomainName(schemaName), profileSlug=profSlug)\n\n def __getCompaniesData(self, schema):\n \"\"\"\n Load Companies list from database\n \"\"\"\n try:\n self.cursor.execute(\"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\".format(schema=schema))\n data = self.cursor.fetchall()\n\n companies = []\n for entry in data:\n self.cursor.execute('SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'.format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n\n if cities is None:\n continue\n\n city = ''\n\n for cityId in cities:\n self.cursor.execute('SELECT city FROM {schema}.locations_location WHERE id = {city}'.format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n\n if cityName is not None:\n city += cityName[0]\n\n self.cursor.execute('SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'.format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n\n if catId is not None:\n self.cursor.execute('SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'.format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n\n companies.append(DBItemCompany(\n _id = entry[0],\n tweeter = entry[1],\n category = catData[0] if catData is not None else None,\n categoryUrl = self.__buildCategoryUrl(catId, schema) if catId is not None else None,\n provenScore = entry[2],\n ranking = rank,\n location = city,\n url = self.__buildProfileUrl(catData[1], entry[3], schema) if catData is not None else self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId = catId\n ))\n\n self.__companies[schema] = companies\n\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])\n\n def domainUrl(self, schema):\n return self.__getDomainName(schema)\n\n def refreshData(self, schemas):\n if not self.__loggedIn:\n if not self.__logInDb():\n return False\n\n for schema in schemas:\n self.__getCompaniesData(schema)\n\n return True\n\n @property\n def companies(self):\n return self.__companies\n\n @property\n def isConnected(self):\n return self.__loggedIn\n",
"step-ids": [
15,
17,
18,
19,
22
]
}
|
[
15,
17,
18,
19,
22
] |
import os
import json
import codecs
import markdown
from flask import current_app
def get_json_file(filename, lang='en'):
"""
Get the contents of a JSON file.
"""
filepath = os.path.join(current_app.config['APP_PATH'], 'data', filename)
with open(filepath, 'r') as f:
return json.loads(f.read())
def get_markdown_file(name, lang='en'):
"""
Get the contents of a markdown file.
"""
filename_temp = "{0}_{1}.markdown"
md_dir = os.path.join(current_app.config['APP_PATH'], 'markdown')
filepath = os.path.join(md_dir, filename_temp.format(name, lang))
if not os.path.isfile(filepath) and lang == 'fr':
filepath = os.path.join(md_dir, filename_temp.format(name, 'en'))
if not os.path.isfile(filepath):
return None
with codecs.open(filepath, mode='r', encoding="utf-8") as f:
return markdown.markdown(f.read())
|
normal
|
{
"blob_id": "213ab22a269abc8180524462a8966e5d929ef7d1",
"index": 322,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_markdown_file(name, lang='en'):\n \"\"\"\n Get the contents of a markdown file.\n \"\"\"\n filename_temp = '{0}_{1}.markdown'\n md_dir = os.path.join(current_app.config['APP_PATH'], 'markdown')\n filepath = os.path.join(md_dir, filename_temp.format(name, lang))\n if not os.path.isfile(filepath) and lang == 'fr':\n filepath = os.path.join(md_dir, filename_temp.format(name, 'en'))\n if not os.path.isfile(filepath):\n return None\n with codecs.open(filepath, mode='r', encoding='utf-8') as f:\n return markdown.markdown(f.read())\n",
"step-3": "<mask token>\n\n\ndef get_json_file(filename, lang='en'):\n \"\"\"\n Get the contents of a JSON file.\n \"\"\"\n filepath = os.path.join(current_app.config['APP_PATH'], 'data', filename)\n with open(filepath, 'r') as f:\n return json.loads(f.read())\n\n\ndef get_markdown_file(name, lang='en'):\n \"\"\"\n Get the contents of a markdown file.\n \"\"\"\n filename_temp = '{0}_{1}.markdown'\n md_dir = os.path.join(current_app.config['APP_PATH'], 'markdown')\n filepath = os.path.join(md_dir, filename_temp.format(name, lang))\n if not os.path.isfile(filepath) and lang == 'fr':\n filepath = os.path.join(md_dir, filename_temp.format(name, 'en'))\n if not os.path.isfile(filepath):\n return None\n with codecs.open(filepath, mode='r', encoding='utf-8') as f:\n return markdown.markdown(f.read())\n",
"step-4": "import os\nimport json\nimport codecs\nimport markdown\nfrom flask import current_app\n\n\ndef get_json_file(filename, lang='en'):\n \"\"\"\n Get the contents of a JSON file.\n \"\"\"\n filepath = os.path.join(current_app.config['APP_PATH'], 'data', filename)\n with open(filepath, 'r') as f:\n return json.loads(f.read())\n\n\ndef get_markdown_file(name, lang='en'):\n \"\"\"\n Get the contents of a markdown file.\n \"\"\"\n filename_temp = '{0}_{1}.markdown'\n md_dir = os.path.join(current_app.config['APP_PATH'], 'markdown')\n filepath = os.path.join(md_dir, filename_temp.format(name, lang))\n if not os.path.isfile(filepath) and lang == 'fr':\n filepath = os.path.join(md_dir, filename_temp.format(name, 'en'))\n if not os.path.isfile(filepath):\n return None\n with codecs.open(filepath, mode='r', encoding='utf-8') as f:\n return markdown.markdown(f.read())\n",
"step-5": "import os\nimport json\nimport codecs\n\nimport markdown\n\nfrom flask import current_app\n\n\ndef get_json_file(filename, lang='en'):\n \"\"\"\n Get the contents of a JSON file.\n \"\"\"\n\n filepath = os.path.join(current_app.config['APP_PATH'], 'data', filename)\n\n with open(filepath, 'r') as f:\n return json.loads(f.read())\n\n\ndef get_markdown_file(name, lang='en'):\n \"\"\"\n Get the contents of a markdown file.\n \"\"\"\n\n filename_temp = \"{0}_{1}.markdown\"\n\n md_dir = os.path.join(current_app.config['APP_PATH'], 'markdown')\n\n filepath = os.path.join(md_dir, filename_temp.format(name, lang))\n\n if not os.path.isfile(filepath) and lang == 'fr':\n filepath = os.path.join(md_dir, filename_temp.format(name, 'en'))\n\n if not os.path.isfile(filepath):\n return None\n\n with codecs.open(filepath, mode='r', encoding=\"utf-8\") as f:\n return markdown.markdown(f.read())\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
'''
swea 2806 N-Queen
'''
def nqueen(depth, n, history):
global cnt
if depth == n:
cnt += 1
else:
for i in range(n):
if i not in history:
for index, value in enumerate(history):
if abs(depth - index) == abs(i - value):
break
else:
history.append(i)
nqueen(depth + 1, n, history)
history.remove(i)
for t in range(int(input())):
cnt = 0
nqueen(0, int(input()), [])
print("#{} {}".format(t+1, cnt))
|
normal
|
{
"blob_id": "b35686f7feec2c4a905007f3c105b6fa05b87297",
"index": 5365,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef nqueen(depth, n, history):\n global cnt\n if depth == n:\n cnt += 1\n else:\n for i in range(n):\n if i not in history:\n for index, value in enumerate(history):\n if abs(depth - index) == abs(i - value):\n break\n else:\n history.append(i)\n nqueen(depth + 1, n, history)\n history.remove(i)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef nqueen(depth, n, history):\n global cnt\n if depth == n:\n cnt += 1\n else:\n for i in range(n):\n if i not in history:\n for index, value in enumerate(history):\n if abs(depth - index) == abs(i - value):\n break\n else:\n history.append(i)\n nqueen(depth + 1, n, history)\n history.remove(i)\n\n\nfor t in range(int(input())):\n cnt = 0\n nqueen(0, int(input()), [])\n print('#{} {}'.format(t + 1, cnt))\n",
"step-4": "'''\nswea 2806 N-Queen\n'''\ndef nqueen(depth, n, history):\n global cnt\n if depth == n:\n cnt += 1\n else:\n for i in range(n):\n if i not in history:\n for index, value in enumerate(history):\n if abs(depth - index) == abs(i - value):\n break\n else:\n history.append(i)\n nqueen(depth + 1, n, history)\n history.remove(i)\n\n\nfor t in range(int(input())):\n cnt = 0\n nqueen(0, int(input()), [])\n print(\"#{} {}\".format(t+1, cnt))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 2.2.3 on 2019-07-27 10:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('beerFriends', '0006_auto_20190726_1504'),
]
operations = [
migrations.AlterField(
model_name='beer',
name='rating',
field=models.FloatField(blank=True, null=True),
),
]
|
normal
|
{
"blob_id": "68f3d3fce52d08381adc522ee032ef3181aec82a",
"index": 400,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('beerFriends', '0006_auto_20190726_1504')]\n operations = [migrations.AlterField(model_name='beer', name='rating',\n field=models.FloatField(blank=True, null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('beerFriends', '0006_auto_20190726_1504')]\n operations = [migrations.AlterField(model_name='beer', name='rating',\n field=models.FloatField(blank=True, null=True))]\n",
"step-5": "# Generated by Django 2.2.3 on 2019-07-27 10:41\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('beerFriends', '0006_auto_20190726_1504'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='beer',\n name='rating',\n field=models.FloatField(blank=True, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import text
nan=""
section_words = {'start': -1, '1.1': 17, '1.2': 38, '1.3': 55, '1.4': 76, '1.5': 95, '1.6': 114, '1.7': 133, '1.8': 151, '1.9': 170, '1.10': 190, '1.11': 209, '1.12': 233, '1.13': 257, '1.14': 277, '1.15': 299, '1.16': 320, '1.17': 341, '1.18': 364, '1.19': 385, '1.20': 405, '1.21': 428, '2.1': 451, '2.2': 474, '2.3': 495, '2.4': 521, '2.5': 542, '2.6': 564, '2.7': 587, '2.8': 611, '2.9': 633, '2.10': 653, '2.11': 674, '2.12': 695, '2.13': 718, '2.14': 740, '2.15': 760, 'end': -2}
the_text = [('AGRICOLA', 0, 'agricola', 'farmer', '', '1_1', 1), ('AMBVLO', 1, 'ambulo', 'to walk', '', '1_1', 2), ('AMO', 2, 'amo', 'to love', '', '1_1', 2), ('AQVA', 3, 'aqua', 'water', '', '1_1', 1), ('ATHLETA', 4, 'athleta', 'athlete', '', '1_1', 1), ('BENE', 5, 'bene', 'well', '', '1_1', 1), ('CVRO', 6, 'curo', 'to take care for/of', '', '1_1', 2), ('ET/2', 7, 'et', 'and', '', '1_1', 1), ('FILIA', 8, 'filia', 'daughter', '', '1_1', 1), ('ITAQVE', 9, 'itaque', 'and so', '', '1_1', 1), ('LVPA', 10, 'lupa', 'she–wolf', '', '1_1', 1), ('NAVTA', 11, 'nauta', 'sailor', '', '1_1', 1), ('POETA', 12, 'poeta', 'poet', '', '1_1', 1), ('POSTEA', 13, 'postea', 'afterwards', '', '1_1', 1), ('PVELLA', 14, 'puella', 'girl', '', '1_1', 1), ('ROMA/N', 15, 'Roma', 'Rome', '', '1_1', 1), ('SVM/1', 16, 'sum', 'to be', '', '1_1', 3), ('TERRA', 17, 'terra', 'land', '', '1_1', 1), ('AMBVLO', 18, 'ambulo', 'to walk', '', '1_2', 2), ('AMO', 19, 'amo', 'to love', '', '1_2', 2), ('CVRO', 20, 'curo', 'to take care for/of', '', '1_2', 2), ('SVM/1', 21, 'sum', 'to be', '', '1_2', 3), ('DEBEO', 22, 'debeo', 'ought, must, should; to owe', '', '1_2', 1), ('DIV', 23, 'diu', 'for a long time', '', '1_2', 1), ('EGO', 24, 'ego', 'I', '', '1_2', 3), ('EXSPECTO', 25, 'exspecto', 'to wait for, await, expect', '', '1_2', 1), ('FABVLA/1', 26, 'fabula', 'story', '', '1_2', 1), ('FORMA', 27, 'forma', 'form,appearance', '', '1_2', 1), ('HABEO', 28, 'habeo', 'to have', '', '1_2', 1), ('HABITO', 29, 'habito', 'to live, dwell', '', '1_2', 1), ('NARRO', 30, 'narro', 'to tell', '', '1_2', 1), ('NON', 31, 'non', 'not', '', '1_2', 1), ('NVNC', 32, 'nunc', 'now', '', '1_2', 1), ('PARO/2', 33, 'paro', 'to prepare, get ready, design', '', '1_2', 2), ('PATRIA', 34, 'patria', 'fatherland', '', '1_2', 1), ('TENEO', 35, 'teneo', 'to hold', '', '1_2', 1), ('TV', 36, 'tu', 'you', '', '1_2', 3), ('VIDEO', 37, 'video', 'to see', '', '1_2', 1), ('VOCO', 38, 'voco', 'to call', '', '1_2', 1), ('EGO', 39, 'ego', 'I', '', '1_3', 3), ('TV', 40, 'tu', 'you', '', '1_3', 3), ('AGER', 41, 'ager', 'field', '', '1_3', 1), ('AMICVS/1', 42, 'amicus', 'friend', '', '1_3', 1), ('ANIMVS', 43, 'animus', 'spirit, soul, mind', '', '1_3', 1), ('CASA', 44, 'casa', 'little house, cottage', '', '1_3', 1), ('CVM/2', 45, 'cum', 'with (w/ abl.)', '', '1_3', 1), ('DEINDE', 46, 'deinde', 'then', '', '1_3', 1), ('DOMVS', 47, 'domus', 'home', '', '1_3', 2), ('FILIVS', 48, 'filius', 'son', '', '1_3', 1), ('IN', 49, 'in', 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_3', 2), ('PVER', 50, 'puer', 'boy', '', '1_3', 1), ('RIVVS', 51, 'rivus', 'brook, stream', '', '1_3', 1), ('TIMEO', 52, 'timeo', 'to fear, to be afraid', '', '1_3', 1), ('VALDE', 53, 'valde', 'very, exceedingly', '', '1_3', 1), ('VIA', 54, 'via', 'road', '', '1_3', 1), ('VIR', 55, 'vir', 'man', '', '1_3', 1), ('IN', 56, 'in', 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_4', 2), ('AD/2', 57, 'ad', 'into, towards, to (w/ acc.)', '', '1_4', 1), ('ARMATVS/2', 58, 'armatus', 'armed', '', '1_4', 1), ('AVTEM', 59, 'autem', 'however', '', '1_4', 1), ('BELLVM', 60, 'bellum', 'war', '', '1_4', 1), ('BONVS', 61, 'bonus', 'good', '', '1_4', 1), ('CASTRA/2', 62, 'castra', 'camp', '', '1_4', 1), ('DO', 63, 'do', 'to give', '', '1_4', 1), ('DOLVS', 64, 'dolus', 'trickery, deception', '', '1_4', 1), ('EX', 65, 'e', 'from, out of (w/ abl.)', '', '1_4', 1), ('INTRO/2', 66, 'intro', 'to enter', '', '1_4', 1), ('IVBEO', 67, 'iubeo', 'to order somebody (acc.) to do something (inf.)', '', '1_4', 1), ('IVSTVS', 68, 'iustus', 'legitimate, open, just', '', '1_4', 1), ('MAGNVS', 69, 'magnus', 'large, great, important', '', '1_4', 1), ('MALVS/3', 70, 'malus', 'bad', '', '1_4', 1), ('PRAECLARVS', 71, 'praeclarus', 'famous, distinguished', '', '1_4', 1), ('PRAEMIVM', 72, 'praemium', 'reward', '', '1_4', 1), ('ROMANVS/A', 73, 'Romanus', 'Roman; the Romans (pl.)', '', '1_4', 1), ('SED', 74, 'sed', 'but', '', '1_4', 1), ('VENENVM', 75, 'venenum', 'poison', '', '1_4', 1), ('VINCVLVM', 76, 'vinculum', 'chain, fetter', '', '1_4', 1), ('PARO/2', 77, 'paro', 'to prepare, get ready, design', '', '1_5', 2), ('AB', 78, 'a', 'by, from (w/ abl.)', '', '1_5', 1), ('AVXILIVM', 79, 'auxilium', 'help', '', '1_5', 1), ('COGITO', 80, 'cogito', 'to think', '', '1_5', 1), ('CONSILIVM', 81, 'consilium', 'plan, advice', '', '1_5', 2), ('DE', 82, 'de', 'about, concerning, down from (w/ abl.)', '', '1_5', 1), ('DOLEO', 83, 'doleo', 'to feel pain, to be hurt', '', '1_5', 1), ('EPISTOLA', 84, 'epistula', 'letter', '', '1_5', 1), ('FAMILIA', 85, 'familia', 'family, household', '', '1_5', 1), ('GAVDIVM', 86, 'gaudium', 'joy', '', '1_5', 1), ('LACRIMA', 87, 'lacrima', 'tear', '', '1_5', 1), ('LONGE', 88, 'longe', 'far', '', '1_5', 1), ('LONGVS', 89, 'longus', 'long', '', '1_5', 1), ('MISER', 90, 'miser', 'wretched, sad, miserable', '', '1_5', 1), ('NAM', 91, 'nam', 'for, in fact', '', '1_5', 1), ('NONSOLVM', 92, 'non', 'not only…, but also…', '', '1_5', 1), ('PVLCHER', 93, 'pulcher', 'beautiful, nice', '', '1_5', 1), ('SEMPER', 94, 'semper', 'always', '', '1_5', 1), ('TAMEN', 95, 'tamen', 'however', '', '1_5', 2), ('SVM/1', 96, 'sum', 'to be', '', '1_6', 3), ('DOCEO', 97, 'doceo', 'to teach', '', '1_6', 1), ('DVM/2', 98, 'dum', 'while', '', '1_6', 1), ('EXEMPLVM', 99, 'exemplum', 'example', '', '1_6', 1), ('FIRMO', 100, 'firmo', 'to strengthen', '', '1_6', 1), ('IACEO', 101, 'iaceo', 'to lie down, to be inert', '', '1_6', 1), ('IVDICO', 102, 'iudico', 'to judge', '', '1_6', 1), ('LIBER/1', 103, 'liber', 'book', '', '1_6', 1), ('LITTERA', 104, 'littera', 'letter of the alphabet (sing.); literature, letter (pl.)', '', '1_6', 1), ('MANEO', 105, 'maneo', 'to remain', '', '1_6', 1), ('MEMORIA', 106, 'memoria', 'memory', '', '1_6', 1), ('MVLTVS', 107, 'multus', 'much, many', '', '1_6', 1), ('POSSVM/1', 108, 'possum', 'to be able, can', '', '1_6', 1), ('PROPTER/2', 109, 'propter', 'because of, on account of (w/ acc.)', '', '1_6', 1), ('SAEPE', 110, 'saepe', 'often', '', '1_6', 1), ('SERVO', 111, 'servo', 'to save, preserve', '', '1_6', 1), ('SOLEO', 112, 'soleo', 'to be accustomed (w/ inf.)', '', '1_6', 1), ('TENEBRAE', 113, 'tenebrae', 'shadows, darkness (pl.)', '', '1_6', 1), ('VITA', 114, 'vita', 'life', '', '1_6', 1), ('AESTIMO', 115, 'aestimo', 'to regard, esteem', '', '1_7', 1), ('AESTIMOVNIVSASSIS', 116, 'aestimo', 'I do not care a bit ', '', '1_7', 1), ('AMOR', 117, 'amor', 'love', '', '1_7', 1), ('DELICIA/1', 118, 'delicia', 'delight, pet', '', '1_7', 1), ('DIGITVS', 119, 'digitus', 'finger', '', '1_7', 1), ('DOMINA', 120, 'domina', 'mistress', '', '1_7', 1), ('GREMIVM', 121, 'gremium', 'lap', '', '1_7', 1), ('INVIDEO', 122, 'invideo', 'to envy', '', '1_7', 1), ('MEVS', 123, 'meus', 'my', '', '1_7', 1), ('OCVLVS', 124, 'oculus', 'eye', '', '1_7', 1), ('PASSER', 125, 'passer', 'sparrow', '', '1_7', 1), ('PAX', 126, 'pax', 'peace; favor', '', '1_7', 1), ('PVTO', 127, 'puto', 'to think', '', '1_7', 1), ('SENEX/1', 128, 'senex', 'old man', '', '1_7', 1), ('SESESE', 129, 'se', 'him/her/itself', '', '1_7', 1), ('SEVERVS', 130, 'severus', 'serious', '', '1_7', 1), ('SOROR', 131, 'soror', 'sister', '', '1_7', 1), ('SVI/1', 132, 'sui', 'him–/her–/itself', '', '1_7', 1), ('VERBVM', 133, 'verbum', 'word', '', '1_7', 1), ('CONTRA/2', 134, 'contra', 'against (w/ acc.)', '', '1_8', 1), ('DECERNO', 135, 'decerno', 'to decide, determine (often w/ inf.)', '', '1_8', 1), ('DICO/2', 136, 'dico', 'to say', '', '1_8', 1), ('DVX', 137, 'dux', 'leader, general', '', '1_8', 1), ('FORTITVDO', 138, 'fortitudo', 'courage', '', '1_8', 1), ('HOMO', 139, 'homo', 'man, human being; people (pl.)', '', '1_8', 1), ('INTELLIGO', 140, 'intellego', 'to understand', '', '1_8', 1), ('LIBERO', 141, 'libero', 'to free someone (acc.) from something (abl.)', '', '1_8', 1), ('MILES', 142, 'miles', 'soldier', '', '1_8', 1), ('NAVIGO', 143, 'navigo', 'to sail, navigate', '', '1_8', 1), ('ORACVLVM', 144, 'oraculum', 'oracle, prophecy', '', '1_8', 1), ('PETO', 145, 'peto', 'to seek', '', '1_8', 1), ('REX', 146, 'rex', 'king', '', '1_8', 1), ('TANDEM', 147, 'tandem', 'finally', '', '1_8', 1), ('TEMPLVM', 148, 'templum', 'temple', '', '1_8', 1), ('TIMOR', 149, 'timor', 'fear', '', '1_8', 1), ('TVM', 150, 'tum', 'then, at that time', '', '1_8', 2), ('VINCO', 151, 'vinco', 'to conquer', '', '1_8', 1), ('ANIMAL', 152, 'animal', 'animal', '', '1_9', 1), ('ARMA', 153, 'arma', 'weapons (pl.)', '', '1_9', 1), ('AVDIO', 154, 'audio', 'to hear, listen', '', '1_9', 1), ('CAPVT', 155, 'caput', 'head', '', '1_9', 1), ('CIVIS', 156, 'civis', 'citizen', '', '1_9', 1), ('CONSVL', 157, 'consul', 'consul', '', '1_9', 1), ('CORPVS', 158, 'corpus', 'body', '', '1_9', 1), ('CREDO', 159, 'credo', 'to believe somebody (w/ dat.)', '', '1_9', 1), ('EXEMPLAR', 160, 'exemplar', 'example', '', '1_9', 1), ('GERO', 161, 'gero', 'to carry; to behave (w/ se)', '', '1_9', 2), ('MARE', 162, 'mare', 'sea', '', '1_9', 1), ('MORS', 163, 'mors', 'death', '', '1_9', 1), ('MVLIER', 164, 'mulier', 'woman', '', '1_9', 1), ('ORATIO', 165, 'oratio', 'oration, speech', '', '1_9', 1), ('SCIO', 166, 'scio', 'to know', '', '1_9', 1), ('SENTIO', 167, 'sentio', 'to perceive', '', '1_9', 1), ('TEMPVS/1', 168, 'tempus', 'time', '', '1_9', 1), ('VENIO', 169, 'venio', 'to come', '', '1_9', 1), ('VRBS', 170, 'urbs', 'city', '', '1_9', 1), ('ACER/2', 171, 'acer', 'keen, fierce', '', '1_10', 1), ('AEDIFICO', 172, 'aedifico', 'to build', '', '1_10', 1), ('CAPIO/2', 173, 'capio', 'to take, adopt, capture', '', '1_10', 1), ('CELEBER', 174, 'celeber', 'renowned, well–known, crowded', '', '1_10', 1), ('CVPIO', 175, 'cupio', 'to desire, want', '', '1_10', 1), ('DELEO', 176, 'deleo', 'to destroy', '', '1_10', 1), ('DEVS', 177, 'deus', 'god', '', '1_10', 1), ('DONVM', 178, 'donum', 'gift', '', '1_10', 1), ('EQVVS', 179, 'equus', 'horse', '', '1_10', 1), ('FELIX', 180, 'felix', 'fortunate, happy', '', '1_10', 1), ('FLAMMA', 181, 'flamma', 'flame', '', '1_10', 1), ('FORTIS', 182, 'fortis', 'brave, strong', '', '1_10', 1), ('FVGIO', 183, 'fugio', 'to flee, run away', '', '1_10', 1), ('HOSTIS', 184, 'hostis', 'enemy', '', '1_10', 1), ('MOVEO', 185, 'moveo', 'to move', '', '1_10', 1), ('NEC/2', 186, 'nec', 'nor; and not', '', '1_10', 2), ('NOX', 187, 'nox', 'night', '', '1_10', 1), ('PAVCI', 188, 'paucus', 'few', '', '1_10', 1), ('PERICVLVM', 189, 'periculum', 'danger', '', '1_10', 1), ('PVGNO', 190, 'pugno', 'to fight', '', '1_10', 1), ('AGO', 191, 'ago', 'to drive, lead, do, behave', '', '1_11', 1), ('ARDEO', 192, 'ardeo', 'to burn, be on fire', '', '1_11', 1), ('CONSPICIO', 193, 'conspicio', 'to look at, observe', '', '1_11', 1), ('CRVDELIS', 194, 'crudelis', 'cruel', '', '1_11', 1), ('DOLOR', 195, 'dolor', 'grief, pain', '', '1_11', 1), ('ITA', 196, 'ita', 'yes', '', '1_11', 2), ('MINIME', 197, 'minime', 'No', '', '1_11', 1), ('MITTO', 198, 'mitto', 'to send', '', '1_11', 1), ('NE/2', 199, 'ne', '(added to the first word of a question', '', '1_11', 1), ('NOVVS', 200, 'novus', 'new', '', '1_11', 1), ('PARVM/2', 201, 'parum', 'too little', '', '1_11', 2), ('QVE', 202, 'que', 'and', '', '1_11', 1), ('QVOQVE', 203, 'quoque', 'also', '', '1_11', 1), ('REGINA', 204, 'regina', 'queen', '', '1_11', 1), ('RELINQVO', 205, 'relinquo', 'to abandon', '', '1_11', 1), ('SILVA', 206, 'silva', 'forest', '', '1_11', 1), ('SPELVNCA', 207, 'spelunca', 'cave', '', '1_11', 1), ('TEMPESTAS', 208, 'tempestas', 'season', '', '1_11', 1), ('VNA', 209, 'una', 'together', '', '1_11', 1), ('BELLVMGERO', 210, 'bellum', 'to wage war', '', '1_12', 1), ('CONSVMO', 211, 'consumo', 'to consume', '', '1_12', 1), ('DEXTERA', 212, 'dextera', 'right hand', '', '1_12', 1), ('FACIO', 213, 'facio', 'to do, make', '', '1_12', 1), ('IBI', 214, 'ibi', 'there', '', '1_12', 1), ('IGNIS', 215, 'ignis', 'fire', '', '1_12', 1), ('INQVIO', 216, 'inquam', 'to say (used with direct speech)', '', '1_12', 3), ('IRA', 217, 'ira', 'anger', '', '1_12', 1), ('IS', 218, 'is', 's/he/it, this, that', '', '1_12', 1), ('NOMEN', 219, 'nomen', 'name', '', '1_12', 1), ('NOS', 220, 'nos', 'we; us', '', '1_12', 1), ('NOSTER', 221, 'noster', 'our, ours', '', '1_12', 1), ('OCCIDO/2', 222, 'occido', 'to strike down, knock down', '', '1_12', 1), ('OSTENDO', 223, 'ostendo', 'to show', '', '1_12', 1), ('PONO', 224, 'pono', 'to place', '', '1_12', 1), ('PROPE/2', 225, 'prope', 'near', '', '1_12', 2), ('PROVIRIBVS', 226, 'pro', 'with all one’s might', '', '1_12', 1), ('SIMILIS', 227, 'similis', 'similar', '', '1_12', 1), ('STATIM', 228, 'statim', 'immediately', '', '1_12', 1), ('TANTVS', 229, 'tantus', 'so much', '', '1_12', 1), ('TVVS', 230, 'tuus', 'your', '', '1_12', 1), ('VESTER', 231, 'vester', 'your', '', '1_12', 1), ('VIS', 232, 'vis', 'force', '', '1_12', 1), ('VOS', 233, 'vos', 'you', '', '1_12', 1), ('EGO', 234, 'ego', 'I', '', '1_13', 3), ('TV', 235, 'tu', 'you', '', '1_13', 3), ('TVM', 236, 'tum', 'then, at that time', '', '1_13', 2), ('ALIVS', 237, 'alius', 'another, other', '', '1_13', 1), ('APVD', 238, 'apud', 'at the house of (w/ acc.)', '', '1_13', 1), ('ATQVE/1', 239, 'atque', 'as', '', '1_13', 2), ('DISCEDO/1', 240, 'discedo', 'to leave, withdraw, go away', '', '1_13', 1), ('DIVES', 241, 'dives', 'rich', '', '1_13', 1), ('DOCTVS', 242, 'doctus', 'learned', '', '1_13', 1), ('DVCO', 243, 'duco', 'to lead, take', '', '1_13', 2), ('ENIM/2', 244, 'enim', 'for, in fact', '', '1_13', 1), ('IVDEX', 245, 'iudex', 'judge', '', '1_13', 1), ('LICET/1', 246, 'licet', 'it is allowed, permitted (for someone)(to do something)(w/ dat. and inf.) ', '', '1_13', 1), ('NIHIL', 247, 'nihil', 'nothing', '', '1_13', 1), ('NOLO', 248, 'nolo', 'not to want, to be unwilling', '', '1_13', 2), ('OMNIS', 249, 'omnis', 'each, every, all', '', '1_13', 1), ('PRO/1', 250, 'pro', 'for, on behalf of (w/ abl.)', '', '1_13', 1), ('QVID', 251, 'quid', 'what; why', '', '1_13', 1), ('RESPONDEO', 252, 'respondeo', 'to answer', '', '1_13', 1), ('ROGO', 253, 'rogo', 'to ask', '', '1_13', 1), ('SVVS', 254, 'suus', 'his, her, its, their', '', '1_13', 2), ('TANTVM/2', 255, 'tantum', 'only', '', '1_13', 1), ('VALE', 256, 'vale', 'to greetings! farewell!', '', '1_13', 1), ('VALEO', 257, 'valeo', 'to be able (w/ inf.); to be in good health', '', '1_13', 3), ('ALBVS', 258, 'albus', 'white', '', '1_14', 1), ('ARBOR', 259, 'arbor', 'tree', '', '1_14', 1), ('CADO', 260, 'cado', 'to fall', '', '1_14', 1), ('COMEDO/2', 261, 'comedo', 'to eat', '', '1_14', 1), ('CONVENIO', 262, 'convenio', 'to meet', '', '1_14', 1), ('FLVO', 263, 'fluo', 'to flow', '', '1_14', 1), ('GLADIVS', 264, 'gladius', 'sword', '', '1_14', 1), ('IAM', 265, 'iam', 'already, yet', '', '1_14', 1), ('MOX', 266, 'mox', 'soon', '', '1_14', 1), ('ODIVM', 267, 'odium', 'hatred', '', '1_14', 2), ('OS/1', 268, 'os', 'mouth', '', '1_14', 1), ('PARENS/1', 269, 'parens', 'parent', '', '1_14', 1), ('PECTVS', 270, 'pectus', 'chest', '', '1_14', 1), ('PER', 271, 'per', 'through (w/ acc.)', '', '1_14', 1), ('PRIMVS', 272, 'primus', 'first', '', '1_14', 1), ('QVI/1', 273, 'qui', 'who, which (rel. pronoun); what? which? (inter. adj.) ', '', '1_14', 2), ('RVBER', 274, 'ruber', 'red', '', '1_14', 1), ('SANGVIS', 275, 'sanguis', 'blood', '', '1_14', 1), ('SEPARO/2', 276, 'separo', 'to separate, divide', '', '1_14', 1), ('TANGO', 277, 'tango', 'to touch', '', '1_14', 1), ('INQVIO', 278, 'inquam', 'to say (used with direct speech)', '', '1_15', 3), ('QVI/1', 279, 'qui', 'who, which (rel. pronoun); what? which? (inter. adj.) ', '', '1_15', 2), ('ANTE/2', 280, 'ante', 'in front of (w/ acc.)', '', '1_15', 1), ('ARGVMENTVM', 281, 'argumentum', 'proof, indication, argument', '', '1_15', 1), ('CVR/1', 282, 'cur', 'why', '', '1_15', 1), ('DIFFICILIS', 283, 'difficilis', 'difficult', '', '1_15', 1), ('ECCE', 284, 'ecce', 'look here!', '', '1_15', 1), ('ETIAM', 285, 'etiam', 'even, also', '', '1_15', 1), ('FORSITAN', 286, 'forsan', 'perhaps', '', '1_15', 1), ('NEGLIGO', 287, 'neglego', 'to neglect', '', '1_15', 1), ('PARVVS/2', 288, 'parvus', 'small', '', '1_15', 1), ('QVIS/1', 289, 'quis', 'who? which? (inter. pronoun)', '', '1_15', 1), ('RVSTICVS/2', 290, 'rusticus', 'rural, rustic', '', '1_15', 1), ('SAXVM', 291, 'saxum', 'stone, rock', '', '1_15', 1), ('SENECTVS/1', 292, 'senectus', 'old age', '', '1_15', 1), ('SICVT/1', 293, 'sicut', 'just as', '', '1_15', 1), ('STO', 294, 'sto', 'stand', '', '1_15', 1), ('VBIQVE', 295, 'ubique', 'everywhere', '', '1_15', 1), ('VERVS', 296, 'verus', 'real, true', '', '1_15', 1), ('VETVSTVS', 297, 'vetustus', 'old', '', '1_15', 1), ('VILLA', 298, 'villa', 'estate', '', '1_15', 1), ('VMQVAM', 299, 'umquam', 'ever', '', '1_15', 1), ('AVVNCVLVS', 300, 'avunculus', 'uncle', '', '1_16', 1), ('CAELVM/1', 301, 'caelum', 'sky, heaven, weather', '', '1_16', 1), ('CAVSA', 302, 'causa', 'cause, reason', '', '1_16', 1), ('CINIS', 303, 'cinis', 'ash', '', '1_16', 1), ('CLADES', 304, 'clades', 'disaster', '', '1_16', 1), ('CLASSIS', 305, 'classis', 'fleet, class (of people)', '', '1_16', 1), ('FEMINA', 306, 'femina', 'woman', '', '1_16', 1), ('FVMVS', 307, 'fumus', 'smoke', '', '1_16', 1), ('FVNESTVS', 308, 'funestus', 'deadly', '', '1_16', 1), ('IGITVR', 309, 'igitur', 'therefore', '', '1_16', 1), ('INCENDIVM', 310, 'incendium', 'conflagration, eruption', '', '1_16', 1), ('LEGO/2', 311, 'lego', 'to read, choose', '', '1_16', 1), ('LITVS/2', 312, 'litus', 'shore', '', '1_16', 1), ('MATER', 313, 'mater', 'mother', '', '1_16', 1), ('MONS', 314, 'mons', 'mountain', '', '1_16', 1), ('NAVIS', 315, 'navis', 'ship', '', '1_16', 1), ('NVBES', 316, 'nubes', 'cloud', '', '1_16', 1), ('NVMQVAM', 317, 'numquam', 'never', '', '1_16', 1), ('OPPRIMO', 318, 'opprimo', 'to overwhelm, suppress', '', '1_16', 1), ('PARS', 319, 'pars', 'part', '', '1_16', 1), ('STVDEO', 320, 'studeo', 'to study, be eager for, be interested in (w/ dat.)', '', '1_16', 1), ('DOMVS', 321, 'domus', 'home', '', '1_17', 2), ('ALO', 322, 'alo', 'to feed, nourish', '', '1_17', 1), ('AMITTO', 323, 'amitto', 'to lose', '', '1_17', 1), ('CORNV', 324, 'cornu', 'horn', '', '1_17', 1), ('CORRIPIO', 325, 'corripio', 'to seize, occupy, engulf', '', '1_17', 1), ('CVRRO', 326, 'curro', 'to run', '', '1_17', 1), ('DEVASTO', 327, 'devasto', 'to lay waste', '', '1_17', 1), ('EXSTINGVO', 328, 'exstinguo', 'to extinguish', '', '1_17', 1), ('FACILE', 329, 'facile', 'easliy', '', '1_17', 1), ('IACIO', 330, 'iacio', 'to throw', '', '1_17', 1), ('IMPERATOR', 331, 'imperator', 'general, emperor', '', '1_17', 1), ('IMPETVS', 332, 'impetus', 'impetus, force, attack', '', '1_17', 1), ('INITIVM', 333, 'initium', 'beginning', '', '1_17', 1), ('IVSSVS', 334, 'iussus', 'order', '', '1_17', 1), ('LOCVS', 335, 'locus', 'place (sing.); passages of a book (m. pl.); geographical places(n. pl.)', '', '1_17', 1), ('MANVS/1', 336, 'manus', 'hand', '', '1_17', 1), ('MVRVS', 337, 'murus', 'wall', '', '1_17', 1), ('SINE', 338, 'sine', 'without (w/ abl.)', '', '1_17', 1), ('TEMPTO', 339, 'tempto', 'to try', '', '1_17', 1), ('TVMVLTVS', 340, 'tumultus', 'confusion', '', '1_17', 1), ('VENTVS', 341, 'ventus', 'wind', '', '1_17', 1), ('DVCO', 342, 'duco', 'to lead, take', '', '1_18', 2), ('ITA', 343, 'ita', 'yes', '', '1_18', 2), ('COLO/2', 344, 'colo', 'to worship, cultivate', '', '1_18', 1), ('CVM/3', 345, 'cum', 'when, after', '', '1_18', 2), ('DEA', 346, 'dea', 'goddess', '', '1_18', 1), ('DIES', 347, 'dies', 'day', '', '1_18', 1), ('DORMIO', 348, 'dormio', 'to sleep', '', '1_18', 1), ('EXCITO/1', 349, 'excito', 'to awaken, rouse, stir up', '', '1_18', 1), ('EXCLAMO', 350, 'exclamo', 'to exclaim', '', '1_18', 1), ('FACIES', 351, 'facies', 'face', '', '1_18', 1), ('FATVM', 352, 'fatum', 'fate, destiny', '', '1_18', 1), ('MARITVS/1', 353, 'maritus', 'husband', '', '1_18', 1), ('MERIDIES', 354, 'meridies', 'midday', '', '1_18', 2), ('MVLTVM/2', 355, 'multum', 'much', '', '1_18', 1), ('OCCVLTO', 356, 'occulto', 'to hide', '', '1_18', 1), ('PATER', 357, 'pater', 'father', '', '1_18', 2), ('POST/2', 358, 'post', 'after (w/ acc.)', '', '1_18', 1), ('QVAERO', 359, 'quaero', 'to look for, search', '', '1_18', 1), ('RES', 360, 'res', 'thing, matter', '', '1_18', 1), ('SI/2', 361, 'si', 'if', '', '1_18', 1), ('SOMNVS', 362, 'somnus', 'sleep', '', '1_18', 1), ('TAM', 363, 'tam', 'so ', '', '1_18', 1), ('VXOR', 364, 'uxor', 'wife', '', '1_18', 2), ('BARBA', 365, 'barba', 'beard', '', '1_19', 1), ('CARO/1', 366, 'caro', 'meat, flesh', '', '1_19', 1), ('CELERITER', 367, 'celeriter', 'swiftly', '', '1_19', 1), ('COQVO', 368, 'coquo', 'to cook', '', '1_19', 1), ('CRESCO', 369, 'cresco', 'to grow', '', '1_19', 1), ('FEROX', 370, 'ferox', 'fierce, ferocious', '', '1_19', 1), ('FORIS/2', 371, 'foris', 'outside, in the open', '', '1_19', 1), ('HERBA', 372, 'herba', 'plant, vegetation', '', '1_19', 1), ('HIC/1', 373, 'hic', 'this', '', '1_19', 1), ('INTER', 374, 'inter', 'between, among (w/ acc.)', '', '1_19', 1), ('PELLIS', 375, 'pellis', 'skin, hide', '', '1_19', 1), ('POSTQVAM', 376, 'postquam', 'after', '', '1_19', 1), ('PROELIVM', 377, 'proelium', 'battle, combat', '', '1_19', 1), ('SANO', 378, 'sano', 'to heal', '', '1_19', 1), ('SEDEO', 379, 'sedeo', 'to sit', '', '1_19', 1), ('TERO', 380, 'tero', 'to wear out, rub', '', '1_19', 1), ('TERRIBILIS', 381, 'terribilis', 'terrifying', '', '1_19', 1), ('VESTIMENTVM', 382, 'vestimentum', 'garment, clothes (pl.)', '', '1_19', 1), ('VIVO', 383, 'vivo', 'to live', '', '1_19', 1), ('VVLNERO', 384, 'vulnero', 'to wound', '', '1_19', 1), ('VVLNVS', 385, 'vulnus', 'wound', '', '1_19', 1), ('ABVNDO', 386, 'abundo', 'to abound with (w/ abl.)', '', '1_20', 1), ('ADOLESCENS/2', 387, 'adulescens', 'young man, young lady', '', '1_20', 1), ('AEQVVS', 388, 'aequus', 'even', '', '1_20', 1), ('COR', 389, 'cor', 'heart', '', '1_20', 1), ('DELECTO', 390, 'delecto', 'to delight, please', '', '1_20', 1), ('DIVINVS/2', 391, 'divinus', 'divine', '', '1_20', 1), ('EGEO', 392, 'egeo', 'to lack something (abl.)', '', '1_20', 1), ('FVR', 393, 'fur', 'thief', '', '1_20', 1), ('FVRTVM', 394, 'furtum', 'theft', '', '1_20', 1), ('HVMANVS', 395, 'humanus', 'human', '', '1_20', 1), ('ILLE', 396, 'ille', 'that', '', '1_20', 1), ('INIQVITAS', 397, 'iniquitas', 'injustice', '', '1_20', 1), ('LEX', 398, 'lex', 'law', '', '1_20', 1), ('LVDO', 399, 'ludo', 'to play', '', '1_20', 1), ('NOCTV', 400, 'noctu', 'during the night', '', '1_20', 1), ('PAENE', 401, 'paene', 'almost', '', '1_20', 1), ('PAVPER', 402, 'pauper', 'poor', '', '1_20', 1), ('PLENVS', 403, 'plenus', 'full of (w/ gen. or abl.)', '', '1_20', 1), ('POMVM', 404, 'pomum', 'fruit', '', '1_20', 1), ('PVNIO', 405, 'punio', 'to punish', '', '1_20', 1), ('ACCIPIO', 406, 'accipio', 'to accept, receive', '', '1_21', 1), ('ACCVSO', 407, 'accuso', 'to accuse someone (acc.) of something (gen.)', '', '1_21', 1), ('ALIENVS/2', 408, 'alienus', 'foreign to, inconsistent with (w/ a/ab and abl.)', '', '1_21', 1), ('AXIS', 409, 'axis', 'axle, axis', '', '1_21', 1), ('CIRCVM/2', 410, 'circum', 'around (w/ acc.)', '', '1_21', 1), ('CONSTANTIA', 411, 'constantia', 'constancy', '', '1_21', 1), ('DESCENDO', 412, 'descendo', 'to descend', '', '1_21', 1), ('DIVITIAE', 413, 'divitia', 'wealth, riches (pl.)', '', '1_21', 1), ('ERIPIO', 414, 'eripio', 'to snatch away', '', '1_21', 1), ('ERRO/2', 415, 'erro', 'to wander, make a mistake', '', '1_21', 1), ('EXTERNVS', 416, 'externus', 'outward, external', '', '1_21', 1), ('FORTVNA', 417, 'fortuna', 'fortune, the goddess Fortune', '', '1_21', 1), ('FVTVRVS', 418, 'futurus', 'about to be (from sum)', '', '1_21', 1), ('HONOR', 419, 'honor', 'honor, public office or distinction', '', '1_21', 1), ('MVTO/2', 420, 'muto', 'to change', '', '1_21', 1), ('POSSIDEO', 421, 'possideo', 'to possess', '', '1_21', 1), ('PROCERTO', 422, 'pro', 'for certain, for sure', '', '1_21', 1), ('RECIPIO', 423, 'recipio', 'to take back', '', '1_21', 2), ('REPREHENDO', 424, 'reprehendo', 'to blame, rebuke', '', '1_21', 1), ('ROTA', 425, 'rota', 'wheel', '', '1_21', 1), ('TOLLO', 426, 'tollo', 'to lift up, raise; to destroy', '', '1_21', 1), ('VERSO', 427, 'verso', 'to turn', '', '1_21', 1), ('VLLVS', 428, 'ullus', 'any', '', '1_21', 1), ('CONSILIVM', 429, 'consilium', 'plan, advice', '', '2_1', 2), ('MERIDIES', 430, 'meridies', 'midday', '', '2_1', 2), ('PROPE/2', 431, 'prope', 'near', '', '2_1', 2), ('ASPICIO', 432, 'aspicio', 'to look at, catch a glimpse of', '', '2_1', 1), ('ETET', 433, 'et', 'both…and…', '', '2_1', 1), ('GENS', 434, 'gens', 'tribe, population', '', '2_1', 1), ('GIGNO', 435, 'gigno', 'to give birth, produce', '', '2_1', 1), ('HODIE', 436, 'hodie', 'today', '', '2_1', 1), ('INCOLA', 437, 'incola', 'inhabitant', '', '2_1', 1), ('INSVLA', 438, 'insula', 'island', '', '2_1', 2), ('INVENIO', 439, 'invenio', 'to come upon, find', '', '2_1', 1), ('MOS', 440, 'mos', 'custom, habit; morals (pl.)', '', '2_1', 1), ('MVNDVS/1', 441, 'mundus', 'world', '', '2_1', 1), ('NE/4', 442, 'ne', 'that not, not to, lest ', '', '2_1', 3), ('OCCVPO/2', 443, 'occupo', 'to occupy', '', '2_1', 1), ('ORTVS', 444, 'ortus', 'origin, beginning, raising', '', '2_1', 1), ('PISCIS', 445, 'piscis', 'a fish', '', '2_1', 1), ('PROCVL', 446, 'procul', 'far, far away', '', '2_1', 1), ('PROMITTO', 447, 'promitto', 'to promise', '', '2_1', 1), ('SEPTENTRIONALIS', 448, 'septentrionalis', 'northern', '', '2_1', 1), ('SITVS/2', 449, 'situs', 'located, situated', '', '2_1', 1), ('SOL', 450, 'sol', 'sun', '', '2_1', 1), ('VTINAM', 451, 'utinam', 'if only', '', '2_1', 2), ('GERO', 452, 'gero', 'to carry; to behave (w/ se)', '', '2_2', 2), ('ODIVM', 453, 'odium', 'hatred', '', '2_2', 2), ('VALEO', 454, 'valeo', 'to be able (w/ inf.); to be in good health', '', '2_2', 3), ('ALTVS', 455, 'altus', 'tall, deep', '', '2_2', 1), ('ANNVS', 456, 'annus', 'year', '', '2_2', 1), ('ARGENTVM', 457, 'argentum', 'silver', '', '2_2', 1), ('AVRVM', 458, 'aurum', 'gold', '', '2_2', 1), ('BREVIS', 459, 'brevis', 'short', '', '2_2', 1), ('CLARVS', 460, 'clarus', 'clear, distinguished', '', '2_2', 1), ('CVSTOS', 461, 'custos', 'guard', '', '2_2', 1), ('EQVES', 462, 'eques', 'horseman', '', '2_2', 1), ('FINIS', 463, 'finis', 'end', '', '2_2', 1), ('GRAVIS', 464, 'gravis', 'serious, heavy', '', '2_2', 1), ('INTERDVM', 465, 'interdum', 'sometimes', '', '2_2', 1), ('LIS', 466, 'lis', 'dispute, quarrel', '', '2_2', 1), ('MANE/2', 467, 'mane', 'in the morning', '', '2_2', 1), ('ODIOHABEO', 468, 'odio', 'to hate somebody', '', '2_2', 1), ('SINO', 469, 'sino', 'to allow somebody (acc.) to do something (inf.)', '', '2_2', 1), ('VEL/1', 470, 'vel', 'or', '', '2_2', 1), ('VESTIS', 471, 'vestis', 'clothes, attire', '', '2_2', 1), ('VOX', 472, 'vox', 'voice', '', '2_2', 1), ('VT/4', 473, 'ut', 'that, to, in order to, so that', '', '2_2', 4), ('VVLTVS', 474, 'vultus', 'face', '', '2_2', 1), ('VXOR', 475, 'uxor', 'wife', '', '2_3', 2), ('AT/2', 476, 'at', 'but', '', '2_3', 1), ('CONIVX', 477, 'coniunx', 'spouse', '', '2_3', 1), ('DISCIPVLA', 478, 'discipula', 'student', '', '2_3', 1), ('DISCO', 479, 'disco', 'to learn', '', '2_3', 1), ('DOMINVS', 480, 'dominus', 'master, lord', '', '2_3', 1), ('FAMA', 481, 'fama', 'fame, name, reputation', '', '2_3', 1), ('FRATER', 482, 'frater', 'brother', '', '2_3', 1), ('IMPROBVS', 483, 'improbus', 'wicked, bad', '', '2_3', 1), ('IVNGO', 484, 'iungo', 'to join', '', '2_3', 1), ('MAGISTER', 485, 'magister', 'teacher', '', '2_3', 1), ('MATRIMONIVM', 486, 'matrimonium', 'marriage', '', '2_3', 1), ('NE/4', 487, 'ne', 'that not, not to, lest ', '', '2_3', 3), ('NVSQVAM', 488, 'nusquam', 'nowhere', '', '2_3', 1), ('PARIO/2', 489, 'pario', 'to give birth to', '', '2_3', 1), ('PERDO', 490, 'perdo', 'to lose, waste', '', '2_3', 1), ('SALVS', 491, 'salus', 'health, welfare', '', '2_3', 1), ('SALVTEMDICERE', 492, 'salutem', 'to greet (customary opening to letter) ', '', '2_3', 1), ('SCRIBO', 493, 'scribo', 'to write', '', '2_3', 1), ('VT/4', 494, 'ut', 'that, to, in order to, so that', '', '2_3', 4), ('VXOREMDEDVCERE', 495, 'uxorem', 'to marry a woman, to take as a wife', '', '2_3', 1), ('NEC/2', 496, 'nec', 'nor; and not', '', '2_4', 2), ('RECIPIO', 497, 'recipio', 'to take back', '', '2_4', 2), ('AGMEN', 498, 'agmen', 'marching column', '', '2_4', 1), ('APERIO', 499, 'aperio', 'to open', '', '2_4', 1), ('COEPIO', 500, 'coepi', 'to begin (w/ inf.)', '', '2_4', 1), ('DEFENDO', 501, 'defendo', 'to defend', '', '2_4', 1), ('EDO/1', 502, 'edo', 'to produce, give forth', '', '2_4', 1), ('EXTRA/2', 503, 'extra', 'outside of (w/ acc.)', '', '2_4', 1), ('FVRO', 504, 'furo', 'to rage, be insane', '', '2_4', 1), ('INGENS', 505, 'ingens', 'huge', '', '2_4', 1), ('INVADO/2', 506, 'invado', 'to burst in', '', '2_4', 1), ('LIGNEVS', 507, 'ligneus', 'made of wood', '', '2_4', 1), ('NEQVENEC', 508, 'neque', 'neither..nor…', '', '2_4', 1), ('PARCO', 509, 'parco', 'to spare somebody/thing (w/ dat.)', '', '2_4', 1), ('PONS', 510, 'pons', 'bridge', '', '2_4', 1), ('PORTA', 511, 'porta', 'gate', '', '2_4', 1), ('PRIMO', 512, 'primo', 'at first', '', '2_4', 1), ('QVAM/1', 513, 'quam', 'than (w/ comp. words)', '', '2_4', 2), ('QVANTVS/1', 514, 'quantus', 'how great, how much (inter. or rel. adj.)', '', '2_4', 1), ('RESISTO', 515, 'resisto', 'to resist (w/ dat.)', '', '2_4', 1), ('SIMVL/1', 516, 'simul', 'at the same time', '', '2_4', 1), ('TVTVS', 517, 'tutus', 'safe', '', '2_4', 1), ('VACVVS', 518, 'vacuus', 'empty of (w/ abl.)', '', '2_4', 1), ('VALEO', 519, 'valeo', 'to be able (w/ inf.); to be in good health', '', '2_4', 3), ('VICTOR', 520, 'victor', 'victor', '', '2_4', 1), ('VTINAM', 521, 'utinam', 'if only', '', '2_4', 2), ('BIBO/2', 522, 'bibo', 'to drink', '', '2_5', 1), ('CARMEN/1', 523, 'carmen', 'song, poem', '', '2_5', 1), ('CIBVS', 524, 'cibus', 'food', '', '2_5', 1), ('DVLCIS', 525, 'dulcis', 'sweet', '', '2_5', 1), ('FLVMEN', 526, 'flumen', 'river', '', '2_5', 1), ('IMMEMOR', 527, 'immemor', 'forgetful of (w/ gen.)', '', '2_5', 1), ('IOCVS', 528, 'iocus', 'joke', '', '2_5', 1), ('IVVENTVS', 529, 'iuventus', 'youth', '', '2_5', 1), ('LEVIS/1', 530, 'levis', 'light', '', '2_5', 1), ('MENS', 531, 'mens', 'mind, spirit', '', '2_5', 1), ('NE/4', 532, 'ne', 'that not, not to, lest ', '', '2_5', 3), ('ORO', 533, 'oro', 'to ask, entreat', '', '2_5', 1), ('PLACEO', 534, 'placeo', 'to please, be agreeable to somebody', '', '2_5', 1), ('PROXIMVS/2', 535, 'proximus', 'nearest', '', '2_5', 1), ('TAMQVAM/2', 536, 'tam', 'so…as…', '', '2_5', 1), ('VEHEMENS', 537, 'vehemens', 'violent, vehement', '', '2_5', 1), ('VETVS', 538, 'vetus', 'old', '', '2_5', 1), ('VINVM', 539, 'vinum', 'wine', '', '2_5', 1), ('VIRTVS', 540, 'virtus', 'courage, virtue', '', '2_5', 1), ('VITIVM', 541, 'vitium', 'vice', '', '2_5', 1), ('VT/4', 542, 'ut', 'that, to, in order to, so that', '', '2_5', 4), ('PATER', 543, 'pater', 'father', '', '2_6', 2), ('DECIPIO', 544, 'decipio', 'to deceive', '', '2_6', 1), ('DILIGO/3', 545, 'diligo', 'to love, esteem highly', '', '2_6', 1), ('DVO', 546, 'duo', 'two', '', '2_6', 1), ('EXERCITVS/1', 547, 'exercitus', 'army', '', '2_6', 1), ('FIDELIS/2', 548, 'fidelis', 'faithful, loyal', '', '2_6', 1), ('HERES', 549, 'heres', 'heir', '', '2_6', 1), ('IMPERIVM', 550, 'imperium', 'rule, empire, power', '', '2_6', 1), ('INOPIA', 551, 'inopia', 'helplessness, want', '', '2_6', 1), ('LAVDO', 552, 'laudo', 'to praise', '', '2_6', 1), ('NECESSEEST', 553, 'necesse', 'it is necessary for someone (dat.) to do something (inf.)', '', '2_6', 1), ('NEMO', 554, 'nemo', 'no one', '', '2_6', 1), ('PAVLO', 555, 'paulo', 'a little bit, to a small extent', '', '2_6', 1), ('QVAM/1', 556, 'quam', 'than (w/ comp. words)', '', '2_6', 2), ('QVANTVM/3', 557, 'quantum', 'to what extent, how much', '', '2_6', 1), ('RESTITVO', 558, 'restituo', 'to restore', '', '2_6', 1), ('SATIS/2', 559, 'satis', 'enough, sufficiently', '', '2_6', 1), ('SECVNDVS/1', 560, 'secundus', 'second', '', '2_6', 1), ('TERTIVS', 561, 'tertius', 'third', '', '2_6', 1), ('TRES', 562, 'tres', 'three', '', '2_6', 1), ('TRISTIS', 563, 'tristis', 'sad', '', '2_6', 1), ('VEHEMENTER', 564, 'vehementer', 'strongly, vehemently', '', '2_6', 1), ('NOLO', 565, 'nolo', 'not to want, to be unwilling', '', '2_7', 2), ('AETAS', 566, 'aetas', 'age', '', '2_7', 1), ('FIDES/2', 567, 'fides', 'faith', '', '2_7', 1), ('FVNDO/2', 568, 'fundo', 'to pour', '', '2_7', 1), ('GLORIA', 569, 'gloria', 'glory', '', '2_7', 1), ('LIBERTAS', 570, 'libertas', 'freedom', '', '2_7', 1), ('LVMEN', 571, 'lumen', 'light', '', '2_7', 1), ('MALO', 572, 'malo', 'to prefer', '', '2_7', 1), ('ORNATVS/1', 573, 'ornatus', 'adorned, ornate, elaborate', '', '2_7', 1), ('OTIVM', 574, 'otium', 'leisure, free time', '', '2_7', 1), ('POTENS', 575, 'potens', 'powerful', '', '2_7', 1), ('PVBLICVS/2', 576, 'publicus', 'common', '', '2_7', 1), ('QVALIS/1', 577, 'qualis', 'what sort of? (inter. adj.)', '', '2_7', 1), ('RESPVBLICA', 578, 'res', 'state', '', '2_7', 1), ('STVDIOSVS', 579, 'studiosus', 'fond of (w/ gen.)', '', '2_7', 1), ('TAMQVAM/1', 580, 'tamquam', 'as', '', '2_7', 1), ('TOT', 581, 'tot', 'so many', '', '2_7', 1), ('TRAHO', 582, 'traho', 'to drag, draw', '', '2_7', 1), ('VBI/1', 583, 'ubi', 'where? (inter. adv)', '', '2_7', 1), ('VIX', 584, 'vix', 'hardly', '', '2_7', 1), ('VNVS', 585, 'unus', 'one', '', '2_7', 1), ('VOLO/3', 586, 'volo', 'to want', '', '2_7', 1), ('VTILIS', 587, 'utilis', 'useful', '', '2_7', 1), ('ADHVC', 588, 'adhuc', 'still, up to this time', '', '2_8', 1), ('ANTIQVVS', 589, 'antiquus', 'ancient', '', '2_8', 1), ('ARS', 590, 'ars', 'science, art, skill', '', '2_8', 1), ('DOMINOR', 591, 'dominor', 'to dominate, rule', '', '2_8', 1), ('HORTOR', 592, 'hortor', 'to exhort, urge', '', '2_8', 1), ('LATINE', 593, 'Latine', 'in Latin', '', '2_8', 1), ('LATINVS/A', 594, 'Latinus', 'Latin, pertaining to Latin', '', '2_8', 1), ('LINGVA', 595, 'lingua', 'language; tongue', '', '2_8', 1), ('LOQVOR', 596, 'loquor', 'to speak', '', '2_8', 1), ('MAGIS/2', 597, 'magis', 'more', '', '2_8', 1), ('MAIOR', 598, 'maior', 'bigger; greater', '', '2_8', 1), ('MAXIMVS', 599, 'maximus', 'greatest', '', '2_8', 1), ('MELIOR', 600, 'melior', 'better', '', '2_8', 1), ('MINIMVS', 601, 'minimus', 'smallest', '', '2_8', 1), ('MINVS', 602, 'minus', 'less', '', '2_8', 2), ('OPTIMVS', 603, 'optimus', 'best', '', '2_8', 1), ('PARTIOR', 604, 'partior', 'to divide, distribute', '', '2_8', 1), ('PATIOR', 605, 'patior', 'to endure, tolerate, suffer', '', '2_8', 1), ('PEIOR', 606, 'peior', 'worse', '', '2_8', 1), ('PESSIMVS', 607, 'pessimus', 'worst', '', '2_8', 1), ('PLVRIMVS', 608, 'plurimus', 'most', '', '2_8', 1), ('PLVS', 609, 'plus', 'more', '', '2_8', 1), ('SEQVOR', 610, 'sequor', 'to follow', '', '2_8', 1), ('VEREOR', 611, 'vereor', 'to fear, respect', '', '2_8', 1), ('ADDO', 612, 'addo', 'to add', '', '2_9', 1), ('AVRIS', 613, 'auris', 'ear', '', '2_9', 1), ('CONOR', 614, 'conor', 'to try', '', '2_9', 1), ('DEMITTO', 615, 'demitto', 'to send down', '', '2_9', 1), ('DISSIMILIS', 616, 'dissimilis', 'dissimilar ', '', '2_9', 1), ('FACILIS', 617, 'facilis', 'easy', '', '2_9', 1), ('FERO', 618, 'fero', 'to carry, bear', '', '2_9', 1), ('FIO', 619, 'fio', 'to be made, become; (impersonally) to happen', '', '2_9', 1), ('FRIGVS', 620, 'frigus', 'cold', '', '2_9', 1), ('GENVS/1', 621, 'genus', 'kind', '', '2_9', 1), ('GLACIES', 622, 'glacies', 'ice', '', '2_9', 1), ('GRACILIS', 623, 'gracilis', 'slender', '', '2_9', 1), ('HVMILIS', 624, 'humilis', 'low, humble', '', '2_9', 1), ('ITER', 625, 'iter', 'road, trip ', '', '2_9', 1), ('LABOR/2', 626, 'labor', 'to slide, slip, glide down', '', '2_9', 1), ('MODEROR', 627, 'moderor', 'to manage, direct, guide', '', '2_9', 1), ('NIX', 628, 'nix', 'snow', '', '2_9', 1), ('ONVS', 629, 'onus', 'weight, burden', '', '2_9', 1), ('PERVENIO', 630, 'pervenio', 'to arrive', '', '2_9', 1), ('PROGREDIOR', 631, 'progredior', 'to go forward, proceed', '', '2_9', 1), ('QVOTIENS/2', 632, 'quotiens', 'as often as', '', '2_9', 1), ('SIMVLAC/2', 633, 'simulac', 'as soon as', '', '2_9', 1), ('SVVS', 634, 'suus', 'his, her, its, their', '', '2_10', 2), ('AEDES', 635, 'aedes', 'temple; pl. dwelling, house', '', '2_10', 1), ('EO/1', 636, 'eo', 'to go ', '', '2_10', 1), ('IVCVNDVS', 637, 'iucundus', 'pleasant, nice', '', '2_10', 1), ('LABOR/1', 638, 'labor', 'labor, toil', '', '2_10', 1), ('LAEDO', 639, 'laedo', 'to harm', '', '2_10', 1), ('LIBER/2', 640, 'liber', 'free', '', '2_10', 1), ('LVCRVM', 641, 'lucrum', 'profit, gain', '', '2_10', 1), ('MARITIMVS', 642, 'maritimus', 'maritime', '', '2_10', 1), ('MODVS', 643, 'modus', 'way, method, manner', '', '2_10', 1), ('PAVLISPER', 644, 'paulisper', 'for a little while', '', '2_10', 1), ('PECVNIA', 645, 'pecunia', 'money', '', '2_10', 1), ('PLACIDVS', 646, 'placidus', 'peaceful, calm', '', '2_10', 1), ('POTIVS', 647, 'potius', 'rather', '', '2_10', 1), ('PROSPER', 648, 'prosper', 'fortunate, prosperous', '', '2_10', 1), ('REDDO', 649, 'reddo', 'to give back', '', '2_10', 1), ('SARCINA', 650, 'sarcina', 'burden, baggage', '', '2_10', 1), ('SCELESTVS', 651, 'scelestus', 'wicked', '', '2_10', 1), ('SEMEL', 652, 'semel', 'once', '', '2_10', 1), ('SERENVS', 653, 'serenus', 'calm, clear', '', '2_10', 1), ('PARVM/2', 654, 'parum', 'too little', '', '2_11', 2), ('ALTER', 655, 'alter', 'the other (of two)', '', '2_11', 1), ('GEMMA', 656, 'gemma', 'gem, precious stone', '', '2_11', 1), ('LEGATVS', 657, 'legatus', 'ambassador', '', '2_11', 1), ('MAGNIHABEO', 658, 'magni', 'to esteem a lot', '', '2_11', 1), ('MINVS', 659, 'minus', 'less', '', '2_11', 2), ('NESCIO', 660, 'nescio', 'not to know', '', '2_11', 1), ('NEVTER', 661, 'neuter', 'neither, none (of two)', '', '2_11', 1), ('NVLLVS', 662, 'nullus', 'none', '', '2_11', 1), ('OPERAEPRETIVMEST', 663, 'operae', 'it is worthwhile', '', '2_11', 1), ('POPVLVS/1', 664, 'populus', 'a people, populace', '', '2_11', 1), ('QVOMODO/1', 665, 'quomodo', 'how', '', '2_11', 1), ('SALVTO', 666, 'saluto', 'to greet ', '', '2_11', 1), ('SERVVS/1', 667, 'servus', 'slave, servant', '', '2_11', 1), ('SOLVS', 668, 'solus', 'alone, only', '', '2_11', 1), ('SPECTO', 669, 'specto', 'to watch', '', '2_11', 1), ('TACEO', 670, 'taceo', 'to be silent, keep quiet', '', '2_11', 1), ('TOTVS', 671, 'totus', 'whole, entire', '', '2_11', 1), ('TVRPIS', 672, 'turpis', 'shameful, disgraceful', '', '2_11', 1), ('VTER/4', 673, 'uter', 'who, which (of two)?', '', '2_11', 1), ('VTOR', 674, 'utor', 'to use (w/ abl.)', '', '2_11', 1), ('CVM/3', 675, 'cum', 'when, after', '', '2_12', 2), ('INQVIO', 676, 'inquam', 'to say (used with direct speech)', '', '2_12', 3), ('TAMEN', 677, 'tamen', 'however', '', '2_12', 2), ('CARVS', 678, 'carus', 'dear', '', '2_12', 1), ('INSVLA', 679, 'insula', 'island', '', '2_12', 2), ('MORIOR', 680, 'morior', 'to die', '', '2_12', 1), ('NIMIS', 681, 'nimis', 'too much', '', '2_12', 1), ('NISI', 682, 'nisi', 'if not, unless', '', '2_12', 1), ('OFFICIVM', 683, 'officium', 'duty', '', '2_12', 1), ('ORBIS', 684, 'orbis', 'circle', '', '2_12', 1), ('ORBISTERRARVM', 685, 'orbis', 'the earth, the world', '', '2_12', 1), ('PROBO', 686, 'probo', 'to approve ', '', '2_12', 1), ('QVAMQVAM/2', 687, 'quamquam', 'although', '', '2_12', 1), ('QVAMVIS/1', 688, 'quamvis', 'although', '', '2_12', 1), ('QVIA', 689, 'quia', 'because', '', '2_12', 1), ('QVIDEM', 690, 'quidem', 'indeed', '', '2_12', 1), ('QVOD/1', 691, 'quod', 'because', '', '2_12', 1), ('SENTENTIA', 692, 'sententia', 'opinion, point of view', '', '2_12', 1), ('SORS', 693, 'sors', 'lot', '', '2_12', 1), ('SPERO', 694, 'spero', 'to hope', '', '2_12', 1), ('SPES', 695, 'spes', 'hope', '', '2_12', 1), ('ATQVE/1', 696, 'atque', 'as', '', '2_13', 2), ('ABSENS', 697, 'absens', 'away, absent', '', '2_13', 1), ('ABSVM/1', 698, 'absum', 'to be away', '', '2_13', 1), ('BENEVOLENTIA', 699, 'benevolentia', 'good will', '', '2_13', 1), ('DECLARO', 700, 'declaro', 'to demonstrate, show, make known, reveal', '', '2_13', 1), ('IDEM', 701, 'idem', 'the same', '', '2_13', 1), ('IPSE', 702, 'ipse', 'self', '', '2_13', 1), ('IRASCOR', 703, 'irascor', 'to be angry at (w/ dat.)', '', '2_13', 1), ('ISTE', 704, 'iste', 'that (of yours)', '', '2_13', 1), ('MIROR', 705, 'miror', 'to marvel, be surprised at', '', '2_13', 1), ('MVLTITVDO', 706, 'multitudo', 'crowd, throng', '', '2_13', 1), ('NEGO', 707, 'nego', 'to deny ', '', '2_13', 1), ('NVMERO/1', 708, 'numero', 'to number, count among', '', '2_13', 1), ('OFFENDO', 709, 'offendo', ']to happen upon, offend', '', '2_13', 1), ('REDEO/1', 710, 'redeo', 'to go back, return', '', '2_13', 1), ('REFERO', 711, 'refero', 'to carry back, report', '', '2_13', 1), ('SOCIVS/1', 712, 'socius', 'associate, partner, ally', '', '2_13', 1), ('TALIS', 713, 'talis', 'such a', '', '2_13', 1), ('TVRRIS', 714, 'turris', 'tower', '', '2_13', 1), ('VENIA', 715, 'venia', 'pardon, indulgence, forgiveness', '', '2_13', 1), ('VERSOR', 716, 'versor', 'to be situated in, be occupied in ', '', '2_13', 1), ('VIRGA', 717, 'virga', 'twig, stick', '', '2_13', 1), ('VOLVNTAS', 718, 'voluntas', 'will', '', '2_13', 1), ('AFFIRMO', 719, 'affirmo', 'to assert, maintain', '', '2_14', 1), ('CIRCVMEO/1', 720, 'circumeo', 'to go around', '', '2_14', 1), ('CONTINEO', 721, 'contineo', 'to hold, keep together, contain', '', '2_14', 1), ('COTIDIANVS', 722, 'cottidianus', 'of every day, daily', '', '2_14', 1), ('ELEMENTVM', 723, 'elementum', 'element', '', '2_14', 1), ('ERGO/2', 724, 'ergo', 'therefore', '', '2_14', 1), ('GRAVITAS', 725, 'gravitas', 'weight, gravity', '', '2_14', 1), ('IMMENSVS', 726, 'immensus', 'immeasurable, immense, endless', '', '2_14', 1), ('INFINITVS', 727, 'infinitus', 'boundless, unlimited', '', '2_14', 1), ('MAXIME', 728, 'maxime', 'most', '', '2_14', 1), ('MEDIVS', 729, 'medius', 'middle', '', '2_14', 1), ('MOTVS', 730, 'motus', 'motion, movement', '', '2_14', 1), ('MVLTO/2', 731, 'multo', 'by much', '', '2_14', 1), ('NATVRA', 732, 'natura', 'nature', '', '2_14', 1), ('NECESSARIO', 733, 'necessario', 'necessarily', '', '2_14', 1), ('PERPERAM', 734, 'perperam', 'wrongly, incorrectly', '', '2_14', 1), ('PONDVS', 735, 'pondus', 'weight', '', '2_14', 1), ('PRAESERTIM', 736, 'praesertim', 'especially', '', '2_14', 1), ('QVIES', 737, 'quies', 'rest, repose', '', '2_14', 1), ('VNDIQVE', 738, 'undique', 'from all parts, from everywhere', '', '2_14', 1), ('VOLVO', 739, 'volvo', 'to turn round', '', '2_14', 1), ('VT/4', 740, 'ut', 'that, to, in order to, so that', '', '2_14', 4), ('ANIMADVERTO', 741, 'animadverto', 'to notice', '', '2_15', 1), ('APPROPINQVO', 742, 'appropinquo', 'to approach (w/ dat or ad + acc.)', '', '2_15', 1), ('CERNO', 743, 'cerno', 'to see, distinguish with the eyes', '', '2_15', 1), ('CIRCA/2', 744, 'circa', 'around (w/ acc.)', '', '2_15', 1), ('CLAMO', 745, 'clamo', 'to shout, scream', '', '2_15', 1), ('FINGO', 746, 'fingo', 'to imagine, form in the mind', '', '2_15', 1), ('IMPINGO', 747, 'impingo', 'to push, strike, inflict', '', '2_15', 1), ('INFLIGO', 748, 'infligo', 'to strike on or against, inflict', '', '2_15', 1), ('ITERVM', 749, 'iterum', 'again', '', '2_15', 1), ('OPPIDVM', 750, 'oppidum', 'town', '', '2_15', 1), ('PERCVTIO', 751, 'percutio', 'to strike through ', '', '2_15', 1), ('PRAEDITVS', 752, 'praeditus', 'endowed with, possessed of (w/ abl.)', '', '2_15', 1), ('REPELLO', 753, 'repello', 'to push back, thrust back', '', '2_15', 1), ('RIDEO', 754, 'rideo', 'to laugh', '', '2_15', 1), ('RVMPO', 755, 'rumpo', 'to break, tear', '', '2_15', 1), ('SEDES', 756, 'sedes', 'seat, abode', '', '2_15', 1), ('SIC', 757, 'sic', 'in such a way', '', '2_15', 1), ('SIDVS', 758, 'sidus', 'constellation', '', '2_15', 1), ('TELVM', 759, 'telum', 'spear, javelin', '', '2_15', 1), ('VEHO', 760, 'veho', 'to drive, carry', '', '2_15', 1)]
section_list ={'1.1': 'start', '1.2': '1.1', '1.3': '1.2', '1.4': '1.3', '1.5': '1.4', '1.6': '1.5', '1.7': '1.6', '1.8': '1.7', '1.9': '1.8', '1.10': '1.9', '1.11': '1.10', '1.12': '1.11', '1.13': '1.12', '1.14': '1.13', '1.15': '1.14', '1.16': '1.15', '1.17': '1.16', '1.18': '1.17', '1.19': '1.18', '1.20': '1.19', '1.21': '1.20', '2.1': '1.21', '2.2': '2.1', '2.3': '2.2', '2.4': '2.3', '2.5': '2.4', '2.6': '2.5', '2.7': '2.6', '2.8': '2.7', '2.9': '2.8', '2.10': '2.9', '2.11': '2.10', '2.12': '2.11', '2.13': '2.12', '2.14': '2.13', '2.15': '2.14', 'end': '2.15', 'start': 'start'}
title = "Latin for the New Millennium Vols 1 and 2 (Tunberg-Minkova)"
section_level = 2
language = "Latin"
book = text.Text(title, section_words, the_text, section_list, section_level, language, True, False)
|
normal
|
{
"blob_id": "8a0c0f5ca6a965e07f59a6c88d4dd335310cbdfc",
"index": 9530,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnan = ''\nsection_words = {'start': -1, '1.1': 17, '1.2': 38, '1.3': 55, '1.4': 76,\n '1.5': 95, '1.6': 114, '1.7': 133, '1.8': 151, '1.9': 170, '1.10': 190,\n '1.11': 209, '1.12': 233, '1.13': 257, '1.14': 277, '1.15': 299, '1.16':\n 320, '1.17': 341, '1.18': 364, '1.19': 385, '1.20': 405, '1.21': 428,\n '2.1': 451, '2.2': 474, '2.3': 495, '2.4': 521, '2.5': 542, '2.6': 564,\n '2.7': 587, '2.8': 611, '2.9': 633, '2.10': 653, '2.11': 674, '2.12': \n 695, '2.13': 718, '2.14': 740, '2.15': 760, 'end': -2}\nthe_text = [('AGRICOLA', 0, 'agricola', 'farmer', '', '1_1', 1), ('AMBVLO',\n 1, 'ambulo', 'to walk', '', '1_1', 2), ('AMO', 2, 'amo', 'to love', '',\n '1_1', 2), ('AQVA', 3, 'aqua', 'water', '', '1_1', 1), ('ATHLETA', 4,\n 'athleta', 'athlete', '', '1_1', 1), ('BENE', 5, 'bene', 'well', '',\n '1_1', 1), ('CVRO', 6, 'curo', 'to take care for/of', '', '1_1', 2), (\n 'ET/2', 7, 'et', 'and', '', '1_1', 1), ('FILIA', 8, 'filia', 'daughter',\n '', '1_1', 1), ('ITAQVE', 9, 'itaque', 'and so', '', '1_1', 1), ('LVPA',\n 10, 'lupa', 'she–wolf', '', '1_1', 1), ('NAVTA', 11, 'nauta', 'sailor',\n '', '1_1', 1), ('POETA', 12, 'poeta', 'poet', '', '1_1', 1), ('POSTEA',\n 13, 'postea', 'afterwards', '', '1_1', 1), ('PVELLA', 14, 'puella',\n 'girl', '', '1_1', 1), ('ROMA/N', 15, 'Roma', 'Rome', '', '1_1', 1), (\n 'SVM/1', 16, 'sum', 'to be', '', '1_1', 3), ('TERRA', 17, 'terra',\n 'land', '', '1_1', 1), ('AMBVLO', 18, 'ambulo', 'to walk', '', '1_2', 2\n ), ('AMO', 19, 'amo', 'to love', '', '1_2', 2), ('CVRO', 20, 'curo',\n 'to take care for/of', '', '1_2', 2), ('SVM/1', 21, 'sum', 'to be', '',\n '1_2', 3), ('DEBEO', 22, 'debeo', 'ought, must, should; to owe', '',\n '1_2', 1), ('DIV', 23, 'diu', 'for a long time', '', '1_2', 1), ('EGO',\n 24, 'ego', 'I', '', '1_2', 3), ('EXSPECTO', 25, 'exspecto',\n 'to wait for, await, expect', '', '1_2', 1), ('FABVLA/1', 26, 'fabula',\n 'story', '', '1_2', 1), ('FORMA', 27, 'forma', 'form,appearance', '',\n '1_2', 1), ('HABEO', 28, 'habeo', 'to have', '', '1_2', 1), ('HABITO', \n 29, 'habito', 'to live, dwell', '', '1_2', 1), ('NARRO', 30, 'narro',\n 'to tell', '', '1_2', 1), ('NON', 31, 'non', 'not', '', '1_2', 1), (\n 'NVNC', 32, 'nunc', 'now', '', '1_2', 1), ('PARO/2', 33, 'paro',\n 'to prepare, get ready, design', '', '1_2', 2), ('PATRIA', 34, 'patria',\n 'fatherland', '', '1_2', 1), ('TENEO', 35, 'teneo', 'to hold', '',\n '1_2', 1), ('TV', 36, 'tu', 'you', '', '1_2', 3), ('VIDEO', 37, 'video',\n 'to see', '', '1_2', 1), ('VOCO', 38, 'voco', 'to call', '', '1_2', 1),\n ('EGO', 39, 'ego', 'I', '', '1_3', 3), ('TV', 40, 'tu', 'you', '',\n '1_3', 3), ('AGER', 41, 'ager', 'field', '', '1_3', 1), ('AMICVS/1', 42,\n 'amicus', 'friend', '', '1_3', 1), ('ANIMVS', 43, 'animus',\n 'spirit, soul, mind', '', '1_3', 1), ('CASA', 44, 'casa',\n 'little house, cottage', '', '1_3', 1), ('CVM/2', 45, 'cum',\n 'with (w/ abl.)', '', '1_3', 1), ('DEINDE', 46, 'deinde', 'then', '',\n '1_3', 1), ('DOMVS', 47, 'domus', 'home', '', '1_3', 2), ('FILIVS', 48,\n 'filius', 'son', '', '1_3', 1), ('IN', 49, 'in',\n 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_3', 2), ('PVER',\n 50, 'puer', 'boy', '', '1_3', 1), ('RIVVS', 51, 'rivus',\n 'brook, stream', '', '1_3', 1), ('TIMEO', 52, 'timeo',\n 'to fear, to be afraid', '', '1_3', 1), ('VALDE', 53, 'valde',\n 'very, exceedingly', '', '1_3', 1), ('VIA', 54, 'via', 'road', '',\n '1_3', 1), ('VIR', 55, 'vir', 'man', '', '1_3', 1), ('IN', 56, 'in',\n 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_4', 2), ('AD/2',\n 57, 'ad', 'into, towards, to (w/ acc.)', '', '1_4', 1), ('ARMATVS/2', \n 58, 'armatus', 'armed', '', '1_4', 1), ('AVTEM', 59, 'autem', 'however',\n '', '1_4', 1), ('BELLVM', 60, 'bellum', 'war', '', '1_4', 1), ('BONVS',\n 61, 'bonus', 'good', '', '1_4', 1), ('CASTRA/2', 62, 'castra', 'camp',\n '', '1_4', 1), ('DO', 63, 'do', 'to give', '', '1_4', 1), ('DOLVS', 64,\n 'dolus', 'trickery, deception', '', '1_4', 1), ('EX', 65, 'e',\n 'from, out of (w/ abl.)', '', '1_4', 1), ('INTRO/2', 66, 'intro',\n 'to enter', '', '1_4', 1), ('IVBEO', 67, 'iubeo',\n 'to order somebody (acc.) to do something (inf.)', '', '1_4', 1), (\n 'IVSTVS', 68, 'iustus', 'legitimate, open, just', '', '1_4', 1), (\n 'MAGNVS', 69, 'magnus', 'large, great, important', '', '1_4', 1), (\n 'MALVS/3', 70, 'malus', 'bad', '', '1_4', 1), ('PRAECLARVS', 71,\n 'praeclarus', 'famous, distinguished', '', '1_4', 1), ('PRAEMIVM', 72,\n 'praemium', 'reward', '', '1_4', 1), ('ROMANVS/A', 73, 'Romanus',\n 'Roman; the Romans (pl.)', '', '1_4', 1), ('SED', 74, 'sed', 'but', '',\n '1_4', 1), ('VENENVM', 75, 'venenum', 'poison', '', '1_4', 1), (\n 'VINCVLVM', 76, 'vinculum', 'chain, fetter', '', '1_4', 1), ('PARO/2', \n 77, 'paro', 'to prepare, get ready, design', '', '1_5', 2), ('AB', 78,\n 'a', 'by, from (w/ abl.)', '', '1_5', 1), ('AVXILIVM', 79, 'auxilium',\n 'help', '', '1_5', 1), ('COGITO', 80, 'cogito', 'to think', '', '1_5', \n 1), ('CONSILIVM', 81, 'consilium', 'plan, advice', '', '1_5', 2), ('DE',\n 82, 'de', 'about, concerning, down from (w/ abl.)', '', '1_5', 1), (\n 'DOLEO', 83, 'doleo', 'to feel pain, to be hurt', '', '1_5', 1), (\n 'EPISTOLA', 84, 'epistula', 'letter', '', '1_5', 1), ('FAMILIA', 85,\n 'familia', 'family, household', '', '1_5', 1), ('GAVDIVM', 86,\n 'gaudium', 'joy', '', '1_5', 1), ('LACRIMA', 87, 'lacrima', 'tear', '',\n '1_5', 1), ('LONGE', 88, 'longe', 'far', '', '1_5', 1), ('LONGVS', 89,\n 'longus', 'long', '', '1_5', 1), ('MISER', 90, 'miser',\n 'wretched, sad, miserable', '', '1_5', 1), ('NAM', 91, 'nam',\n 'for, in fact', '', '1_5', 1), ('NONSOLVM', 92, 'non',\n 'not only…, but also…', '', '1_5', 1), ('PVLCHER', 93, 'pulcher',\n 'beautiful, nice', '', '1_5', 1), ('SEMPER', 94, 'semper', 'always', '',\n '1_5', 1), ('TAMEN', 95, 'tamen', 'however', '', '1_5', 2), ('SVM/1', \n 96, 'sum', 'to be', '', '1_6', 3), ('DOCEO', 97, 'doceo', 'to teach',\n '', '1_6', 1), ('DVM/2', 98, 'dum', 'while', '', '1_6', 1), ('EXEMPLVM',\n 99, 'exemplum', 'example', '', '1_6', 1), ('FIRMO', 100, 'firmo',\n 'to strengthen', '', '1_6', 1), ('IACEO', 101, 'iaceo',\n 'to lie down, to be inert', '', '1_6', 1), ('IVDICO', 102, 'iudico',\n 'to judge', '', '1_6', 1), ('LIBER/1', 103, 'liber', 'book', '', '1_6',\n 1), ('LITTERA', 104, 'littera',\n 'letter of the alphabet (sing.); literature, letter (pl.)', '', '1_6', \n 1), ('MANEO', 105, 'maneo', 'to remain', '', '1_6', 1), ('MEMORIA', 106,\n 'memoria', 'memory', '', '1_6', 1), ('MVLTVS', 107, 'multus',\n 'much, many', '', '1_6', 1), ('POSSVM/1', 108, 'possum',\n 'to be able, can', '', '1_6', 1), ('PROPTER/2', 109, 'propter',\n 'because of, on account of (w/ acc.)', '', '1_6', 1), ('SAEPE', 110,\n 'saepe', 'often', '', '1_6', 1), ('SERVO', 111, 'servo',\n 'to save, preserve', '', '1_6', 1), ('SOLEO', 112, 'soleo',\n 'to be accustomed (w/ inf.)', '', '1_6', 1), ('TENEBRAE', 113,\n 'tenebrae', 'shadows, darkness (pl.)', '', '1_6', 1), ('VITA', 114,\n 'vita', 'life', '', '1_6', 1), ('AESTIMO', 115, 'aestimo',\n 'to regard, esteem', '', '1_7', 1), ('AESTIMOVNIVSASSIS', 116,\n 'aestimo', 'I do not care a bit ', '', '1_7', 1), ('AMOR', 117, 'amor',\n 'love', '', '1_7', 1), ('DELICIA/1', 118, 'delicia', 'delight, pet', '',\n '1_7', 1), ('DIGITVS', 119, 'digitus', 'finger', '', '1_7', 1), (\n 'DOMINA', 120, 'domina', 'mistress', '', '1_7', 1), ('GREMIVM', 121,\n 'gremium', 'lap', '', '1_7', 1), ('INVIDEO', 122, 'invideo', 'to envy',\n '', '1_7', 1), ('MEVS', 123, 'meus', 'my', '', '1_7', 1), ('OCVLVS', \n 124, 'oculus', 'eye', '', '1_7', 1), ('PASSER', 125, 'passer',\n 'sparrow', '', '1_7', 1), ('PAX', 126, 'pax', 'peace; favor', '', '1_7',\n 1), ('PVTO', 127, 'puto', 'to think', '', '1_7', 1), ('SENEX/1', 128,\n 'senex', 'old man', '', '1_7', 1), ('SESESE', 129, 'se',\n 'him/her/itself', '', '1_7', 1), ('SEVERVS', 130, 'severus', 'serious',\n '', '1_7', 1), ('SOROR', 131, 'soror', 'sister', '', '1_7', 1), (\n 'SVI/1', 132, 'sui', 'him–/her–/itself', '', '1_7', 1), ('VERBVM', 133,\n 'verbum', 'word', '', '1_7', 1), ('CONTRA/2', 134, 'contra',\n 'against (w/ acc.)', '', '1_8', 1), ('DECERNO', 135, 'decerno',\n 'to decide, determine (often w/ inf.)', '', '1_8', 1), ('DICO/2', 136,\n 'dico', 'to say', '', '1_8', 1), ('DVX', 137, 'dux', 'leader, general',\n '', '1_8', 1), ('FORTITVDO', 138, 'fortitudo', 'courage', '', '1_8', 1),\n ('HOMO', 139, 'homo', 'man, human being; people (pl.)', '', '1_8', 1),\n ('INTELLIGO', 140, 'intellego', 'to understand', '', '1_8', 1), (\n 'LIBERO', 141, 'libero', 'to free someone (acc.) from something (abl.)',\n '', '1_8', 1), ('MILES', 142, 'miles', 'soldier', '', '1_8', 1), (\n 'NAVIGO', 143, 'navigo', 'to sail, navigate', '', '1_8', 1), (\n 'ORACVLVM', 144, 'oraculum', 'oracle, prophecy', '', '1_8', 1), ('PETO',\n 145, 'peto', 'to seek', '', '1_8', 1), ('REX', 146, 'rex', 'king', '',\n '1_8', 1), ('TANDEM', 147, 'tandem', 'finally', '', '1_8', 1), (\n 'TEMPLVM', 148, 'templum', 'temple', '', '1_8', 1), ('TIMOR', 149,\n 'timor', 'fear', '', '1_8', 1), ('TVM', 150, 'tum',\n 'then, at that time', '', '1_8', 2), ('VINCO', 151, 'vinco',\n 'to conquer', '', '1_8', 1), ('ANIMAL', 152, 'animal', 'animal', '',\n '1_9', 1), ('ARMA', 153, 'arma', 'weapons (pl.)', '', '1_9', 1), (\n 'AVDIO', 154, 'audio', 'to hear, listen', '', '1_9', 1), ('CAPVT', 155,\n 'caput', 'head', '', '1_9', 1), ('CIVIS', 156, 'civis', 'citizen', '',\n '1_9', 1), ('CONSVL', 157, 'consul', 'consul', '', '1_9', 1), ('CORPVS',\n 158, 'corpus', 'body', '', '1_9', 1), ('CREDO', 159, 'credo',\n 'to believe somebody (w/ dat.)', '', '1_9', 1), ('EXEMPLAR', 160,\n 'exemplar', 'example', '', '1_9', 1), ('GERO', 161, 'gero',\n 'to carry; to behave (w/ se)', '', '1_9', 2), ('MARE', 162, 'mare',\n 'sea', '', '1_9', 1), ('MORS', 163, 'mors', 'death', '', '1_9', 1), (\n 'MVLIER', 164, 'mulier', 'woman', '', '1_9', 1), ('ORATIO', 165,\n 'oratio', 'oration, speech', '', '1_9', 1), ('SCIO', 166, 'scio',\n 'to know', '', '1_9', 1), ('SENTIO', 167, 'sentio', 'to perceive', '',\n '1_9', 1), ('TEMPVS/1', 168, 'tempus', 'time', '', '1_9', 1), ('VENIO',\n 169, 'venio', 'to come', '', '1_9', 1), ('VRBS', 170, 'urbs', 'city',\n '', '1_9', 1), ('ACER/2', 171, 'acer', 'keen, fierce', '', '1_10', 1),\n ('AEDIFICO', 172, 'aedifico', 'to build', '', '1_10', 1), ('CAPIO/2', \n 173, 'capio', 'to take, adopt, capture', '', '1_10', 1), ('CELEBER', \n 174, 'celeber', 'renowned, well–known, crowded', '', '1_10', 1), (\n 'CVPIO', 175, 'cupio', 'to desire, want', '', '1_10', 1), ('DELEO', 176,\n 'deleo', 'to destroy', '', '1_10', 1), ('DEVS', 177, 'deus', 'god', '',\n '1_10', 1), ('DONVM', 178, 'donum', 'gift', '', '1_10', 1), ('EQVVS', \n 179, 'equus', 'horse', '', '1_10', 1), ('FELIX', 180, 'felix',\n 'fortunate, happy', '', '1_10', 1), ('FLAMMA', 181, 'flamma', 'flame',\n '', '1_10', 1), ('FORTIS', 182, 'fortis', 'brave, strong', '', '1_10', \n 1), ('FVGIO', 183, 'fugio', 'to flee, run away', '', '1_10', 1), (\n 'HOSTIS', 184, 'hostis', 'enemy', '', '1_10', 1), ('MOVEO', 185,\n 'moveo', 'to move', '', '1_10', 1), ('NEC/2', 186, 'nec',\n 'nor; and not', '', '1_10', 2), ('NOX', 187, 'nox', 'night', '', '1_10',\n 1), ('PAVCI', 188, 'paucus', 'few', '', '1_10', 1), ('PERICVLVM', 189,\n 'periculum', 'danger', '', '1_10', 1), ('PVGNO', 190, 'pugno',\n 'to fight', '', '1_10', 1), ('AGO', 191, 'ago',\n 'to drive, lead, do, behave', '', '1_11', 1), ('ARDEO', 192, 'ardeo',\n 'to burn, be on fire', '', '1_11', 1), ('CONSPICIO', 193, 'conspicio',\n 'to look at, observe', '', '1_11', 1), ('CRVDELIS', 194, 'crudelis',\n 'cruel', '', '1_11', 1), ('DOLOR', 195, 'dolor', 'grief, pain', '',\n '1_11', 1), ('ITA', 196, 'ita', 'yes', '', '1_11', 2), ('MINIME', 197,\n 'minime', 'No', '', '1_11', 1), ('MITTO', 198, 'mitto', 'to send', '',\n '1_11', 1), ('NE/2', 199, 'ne',\n '(added to the first word of a question', '', '1_11', 1), ('NOVVS', 200,\n 'novus', 'new', '', '1_11', 1), ('PARVM/2', 201, 'parum', 'too little',\n '', '1_11', 2), ('QVE', 202, 'que', 'and', '', '1_11', 1), ('QVOQVE', \n 203, 'quoque', 'also', '', '1_11', 1), ('REGINA', 204, 'regina',\n 'queen', '', '1_11', 1), ('RELINQVO', 205, 'relinquo', 'to abandon', '',\n '1_11', 1), ('SILVA', 206, 'silva', 'forest', '', '1_11', 1), (\n 'SPELVNCA', 207, 'spelunca', 'cave', '', '1_11', 1), ('TEMPESTAS', 208,\n 'tempestas', 'season', '', '1_11', 1), ('VNA', 209, 'una', 'together',\n '', '1_11', 1), ('BELLVMGERO', 210, 'bellum', 'to wage war', '', '1_12',\n 1), ('CONSVMO', 211, 'consumo', 'to consume', '', '1_12', 1), (\n 'DEXTERA', 212, 'dextera', 'right hand', '', '1_12', 1), ('FACIO', 213,\n 'facio', 'to do, make', '', '1_12', 1), ('IBI', 214, 'ibi', 'there', '',\n '1_12', 1), ('IGNIS', 215, 'ignis', 'fire', '', '1_12', 1), ('INQVIO', \n 216, 'inquam', 'to say (used with direct speech)', '', '1_12', 3), (\n 'IRA', 217, 'ira', 'anger', '', '1_12', 1), ('IS', 218, 'is',\n 's/he/it, this, that', '', '1_12', 1), ('NOMEN', 219, 'nomen', 'name',\n '', '1_12', 1), ('NOS', 220, 'nos', 'we; us', '', '1_12', 1), ('NOSTER',\n 221, 'noster', 'our, ours', '', '1_12', 1), ('OCCIDO/2', 222, 'occido',\n 'to strike down, knock down', '', '1_12', 1), ('OSTENDO', 223,\n 'ostendo', 'to show', '', '1_12', 1), ('PONO', 224, 'pono', 'to place',\n '', '1_12', 1), ('PROPE/2', 225, 'prope', 'near', '', '1_12', 2), (\n 'PROVIRIBVS', 226, 'pro', 'with all one’s might', '', '1_12', 1), (\n 'SIMILIS', 227, 'similis', 'similar', '', '1_12', 1), ('STATIM', 228,\n 'statim', 'immediately', '', '1_12', 1), ('TANTVS', 229, 'tantus',\n 'so much', '', '1_12', 1), ('TVVS', 230, 'tuus', 'your', '', '1_12', 1),\n ('VESTER', 231, 'vester', 'your', '', '1_12', 1), ('VIS', 232, 'vis',\n 'force', '', '1_12', 1), ('VOS', 233, 'vos', 'you', '', '1_12', 1), (\n 'EGO', 234, 'ego', 'I', '', '1_13', 3), ('TV', 235, 'tu', 'you', '',\n '1_13', 3), ('TVM', 236, 'tum', 'then, at that time', '', '1_13', 2), (\n 'ALIVS', 237, 'alius', 'another, other', '', '1_13', 1), ('APVD', 238,\n 'apud', 'at the house of (w/ acc.)', '', '1_13', 1), ('ATQVE/1', 239,\n 'atque', 'as', '', '1_13', 2), ('DISCEDO/1', 240, 'discedo',\n 'to leave, withdraw, go away', '', '1_13', 1), ('DIVES', 241, 'dives',\n 'rich', '', '1_13', 1), ('DOCTVS', 242, 'doctus', 'learned', '', '1_13',\n 1), ('DVCO', 243, 'duco', 'to lead, take', '', '1_13', 2), ('ENIM/2', \n 244, 'enim', 'for, in fact', '', '1_13', 1), ('IVDEX', 245, 'iudex',\n 'judge', '', '1_13', 1), ('LICET/1', 246, 'licet',\n 'it is allowed, permitted (for someone)(to do something)(w/ dat. and inf.) '\n , '', '1_13', 1), ('NIHIL', 247, 'nihil', 'nothing', '', '1_13', 1), (\n 'NOLO', 248, 'nolo', 'not to want, to be unwilling', '', '1_13', 2), (\n 'OMNIS', 249, 'omnis', 'each, every, all', '', '1_13', 1), ('PRO/1', \n 250, 'pro', 'for, on behalf of (w/ abl.)', '', '1_13', 1), ('QVID', 251,\n 'quid', 'what; why', '', '1_13', 1), ('RESPONDEO', 252, 'respondeo',\n 'to answer', '', '1_13', 1), ('ROGO', 253, 'rogo', 'to ask', '', '1_13',\n 1), ('SVVS', 254, 'suus', 'his, her, its, their', '', '1_13', 2), (\n 'TANTVM/2', 255, 'tantum', 'only', '', '1_13', 1), ('VALE', 256, 'vale',\n 'to greetings! farewell!', '', '1_13', 1), ('VALEO', 257, 'valeo',\n 'to be able (w/ inf.); to be in good health', '', '1_13', 3), ('ALBVS',\n 258, 'albus', 'white', '', '1_14', 1), ('ARBOR', 259, 'arbor', 'tree',\n '', '1_14', 1), ('CADO', 260, 'cado', 'to fall', '', '1_14', 1), (\n 'COMEDO/2', 261, 'comedo', 'to eat', '', '1_14', 1), ('CONVENIO', 262,\n 'convenio', 'to meet', '', '1_14', 1), ('FLVO', 263, 'fluo', 'to flow',\n '', '1_14', 1), ('GLADIVS', 264, 'gladius', 'sword', '', '1_14', 1), (\n 'IAM', 265, 'iam', 'already, yet', '', '1_14', 1), ('MOX', 266, 'mox',\n 'soon', '', '1_14', 1), ('ODIVM', 267, 'odium', 'hatred', '', '1_14', 2\n ), ('OS/1', 268, 'os', 'mouth', '', '1_14', 1), ('PARENS/1', 269,\n 'parens', 'parent', '', '1_14', 1), ('PECTVS', 270, 'pectus', 'chest',\n '', '1_14', 1), ('PER', 271, 'per', 'through (w/ acc.)', '', '1_14', 1),\n ('PRIMVS', 272, 'primus', 'first', '', '1_14', 1), ('QVI/1', 273, 'qui',\n 'who, which (rel. pronoun); what? which? (inter. adj.) ', '', '1_14', 2\n ), ('RVBER', 274, 'ruber', 'red', '', '1_14', 1), ('SANGVIS', 275,\n 'sanguis', 'blood', '', '1_14', 1), ('SEPARO/2', 276, 'separo',\n 'to separate, divide', '', '1_14', 1), ('TANGO', 277, 'tango',\n 'to touch', '', '1_14', 1), ('INQVIO', 278, 'inquam',\n 'to say (used with direct speech)', '', '1_15', 3), ('QVI/1', 279,\n 'qui', 'who, which (rel. pronoun); what? which? (inter. adj.) ', '',\n '1_15', 2), ('ANTE/2', 280, 'ante', 'in front of (w/ acc.)', '', '1_15',\n 1), ('ARGVMENTVM', 281, 'argumentum', 'proof, indication, argument', '',\n '1_15', 1), ('CVR/1', 282, 'cur', 'why', '', '1_15', 1), ('DIFFICILIS',\n 283, 'difficilis', 'difficult', '', '1_15', 1), ('ECCE', 284, 'ecce',\n 'look here!', '', '1_15', 1), ('ETIAM', 285, 'etiam', 'even, also', '',\n '1_15', 1), ('FORSITAN', 286, 'forsan', 'perhaps', '', '1_15', 1), (\n 'NEGLIGO', 287, 'neglego', 'to neglect', '', '1_15', 1), ('PARVVS/2', \n 288, 'parvus', 'small', '', '1_15', 1), ('QVIS/1', 289, 'quis',\n 'who? which? (inter. pronoun)', '', '1_15', 1), ('RVSTICVS/2', 290,\n 'rusticus', 'rural, rustic', '', '1_15', 1), ('SAXVM', 291, 'saxum',\n 'stone, rock', '', '1_15', 1), ('SENECTVS/1', 292, 'senectus',\n 'old age', '', '1_15', 1), ('SICVT/1', 293, 'sicut', 'just as', '',\n '1_15', 1), ('STO', 294, 'sto', 'stand', '', '1_15', 1), ('VBIQVE', 295,\n 'ubique', 'everywhere', '', '1_15', 1), ('VERVS', 296, 'verus',\n 'real, true', '', '1_15', 1), ('VETVSTVS', 297, 'vetustus', 'old', '',\n '1_15', 1), ('VILLA', 298, 'villa', 'estate', '', '1_15', 1), ('VMQVAM',\n 299, 'umquam', 'ever', '', '1_15', 1), ('AVVNCVLVS', 300, 'avunculus',\n 'uncle', '', '1_16', 1), ('CAELVM/1', 301, 'caelum',\n 'sky, heaven, weather', '', '1_16', 1), ('CAVSA', 302, 'causa',\n 'cause, reason', '', '1_16', 1), ('CINIS', 303, 'cinis', 'ash', '',\n '1_16', 1), ('CLADES', 304, 'clades', 'disaster', '', '1_16', 1), (\n 'CLASSIS', 305, 'classis', 'fleet, class (of people)', '', '1_16', 1),\n ('FEMINA', 306, 'femina', 'woman', '', '1_16', 1), ('FVMVS', 307,\n 'fumus', 'smoke', '', '1_16', 1), ('FVNESTVS', 308, 'funestus',\n 'deadly', '', '1_16', 1), ('IGITVR', 309, 'igitur', 'therefore', '',\n '1_16', 1), ('INCENDIVM', 310, 'incendium', 'conflagration, eruption',\n '', '1_16', 1), ('LEGO/2', 311, 'lego', 'to read, choose', '', '1_16', \n 1), ('LITVS/2', 312, 'litus', 'shore', '', '1_16', 1), ('MATER', 313,\n 'mater', 'mother', '', '1_16', 1), ('MONS', 314, 'mons', 'mountain', '',\n '1_16', 1), ('NAVIS', 315, 'navis', 'ship', '', '1_16', 1), ('NVBES', \n 316, 'nubes', 'cloud', '', '1_16', 1), ('NVMQVAM', 317, 'numquam',\n 'never', '', '1_16', 1), ('OPPRIMO', 318, 'opprimo',\n 'to overwhelm, suppress', '', '1_16', 1), ('PARS', 319, 'pars', 'part',\n '', '1_16', 1), ('STVDEO', 320, 'studeo',\n 'to study, be eager for, be interested in (w/ dat.)', '', '1_16', 1), (\n 'DOMVS', 321, 'domus', 'home', '', '1_17', 2), ('ALO', 322, 'alo',\n 'to feed, nourish', '', '1_17', 1), ('AMITTO', 323, 'amitto', 'to lose',\n '', '1_17', 1), ('CORNV', 324, 'cornu', 'horn', '', '1_17', 1), (\n 'CORRIPIO', 325, 'corripio', 'to seize, occupy, engulf', '', '1_17', 1),\n ('CVRRO', 326, 'curro', 'to run', '', '1_17', 1), ('DEVASTO', 327,\n 'devasto', 'to lay waste', '', '1_17', 1), ('EXSTINGVO', 328,\n 'exstinguo', 'to extinguish', '', '1_17', 1), ('FACILE', 329, 'facile',\n 'easliy', '', '1_17', 1), ('IACIO', 330, 'iacio', 'to throw', '',\n '1_17', 1), ('IMPERATOR', 331, 'imperator', 'general, emperor', '',\n '1_17', 1), ('IMPETVS', 332, 'impetus', 'impetus, force, attack', '',\n '1_17', 1), ('INITIVM', 333, 'initium', 'beginning', '', '1_17', 1), (\n 'IVSSVS', 334, 'iussus', 'order', '', '1_17', 1), ('LOCVS', 335,\n 'locus',\n 'place (sing.); passages of a book (m. pl.); geographical places(n. pl.)',\n '', '1_17', 1), ('MANVS/1', 336, 'manus', 'hand', '', '1_17', 1), (\n 'MVRVS', 337, 'murus', 'wall', '', '1_17', 1), ('SINE', 338, 'sine',\n 'without (w/ abl.)', '', '1_17', 1), ('TEMPTO', 339, 'tempto', 'to try',\n '', '1_17', 1), ('TVMVLTVS', 340, 'tumultus', 'confusion', '', '1_17', \n 1), ('VENTVS', 341, 'ventus', 'wind', '', '1_17', 1), ('DVCO', 342,\n 'duco', 'to lead, take', '', '1_18', 2), ('ITA', 343, 'ita', 'yes', '',\n '1_18', 2), ('COLO/2', 344, 'colo', 'to worship, cultivate', '', '1_18',\n 1), ('CVM/3', 345, 'cum', 'when, after', '', '1_18', 2), ('DEA', 346,\n 'dea', 'goddess', '', '1_18', 1), ('DIES', 347, 'dies', 'day', '',\n '1_18', 1), ('DORMIO', 348, 'dormio', 'to sleep', '', '1_18', 1), (\n 'EXCITO/1', 349, 'excito', 'to awaken, rouse, stir up', '', '1_18', 1),\n ('EXCLAMO', 350, 'exclamo', 'to exclaim', '', '1_18', 1), ('FACIES', \n 351, 'facies', 'face', '', '1_18', 1), ('FATVM', 352, 'fatum',\n 'fate, destiny', '', '1_18', 1), ('MARITVS/1', 353, 'maritus',\n 'husband', '', '1_18', 1), ('MERIDIES', 354, 'meridies', 'midday', '',\n '1_18', 2), ('MVLTVM/2', 355, 'multum', 'much', '', '1_18', 1), (\n 'OCCVLTO', 356, 'occulto', 'to hide', '', '1_18', 1), ('PATER', 357,\n 'pater', 'father', '', '1_18', 2), ('POST/2', 358, 'post',\n 'after (w/ acc.)', '', '1_18', 1), ('QVAERO', 359, 'quaero',\n 'to look for, search', '', '1_18', 1), ('RES', 360, 'res',\n 'thing, matter', '', '1_18', 1), ('SI/2', 361, 'si', 'if', '', '1_18', \n 1), ('SOMNVS', 362, 'somnus', 'sleep', '', '1_18', 1), ('TAM', 363,\n 'tam', 'so ', '', '1_18', 1), ('VXOR', 364, 'uxor', 'wife', '', '1_18',\n 2), ('BARBA', 365, 'barba', 'beard', '', '1_19', 1), ('CARO/1', 366,\n 'caro', 'meat, flesh', '', '1_19', 1), ('CELERITER', 367, 'celeriter',\n 'swiftly', '', '1_19', 1), ('COQVO', 368, 'coquo', 'to cook', '',\n '1_19', 1), ('CRESCO', 369, 'cresco', 'to grow', '', '1_19', 1), (\n 'FEROX', 370, 'ferox', 'fierce, ferocious', '', '1_19', 1), ('FORIS/2',\n 371, 'foris', 'outside, in the open', '', '1_19', 1), ('HERBA', 372,\n 'herba', 'plant, vegetation', '', '1_19', 1), ('HIC/1', 373, 'hic',\n 'this', '', '1_19', 1), ('INTER', 374, 'inter',\n 'between, among (w/ acc.)', '', '1_19', 1), ('PELLIS', 375, 'pellis',\n 'skin, hide', '', '1_19', 1), ('POSTQVAM', 376, 'postquam', 'after', '',\n '1_19', 1), ('PROELIVM', 377, 'proelium', 'battle, combat', '', '1_19',\n 1), ('SANO', 378, 'sano', 'to heal', '', '1_19', 1), ('SEDEO', 379,\n 'sedeo', 'to sit', '', '1_19', 1), ('TERO', 380, 'tero',\n 'to wear out, rub', '', '1_19', 1), ('TERRIBILIS', 381, 'terribilis',\n 'terrifying', '', '1_19', 1), ('VESTIMENTVM', 382, 'vestimentum',\n 'garment, clothes (pl.)', '', '1_19', 1), ('VIVO', 383, 'vivo',\n 'to live', '', '1_19', 1), ('VVLNERO', 384, 'vulnero', 'to wound', '',\n '1_19', 1), ('VVLNVS', 385, 'vulnus', 'wound', '', '1_19', 1), (\n 'ABVNDO', 386, 'abundo', 'to abound with (w/ abl.)', '', '1_20', 1), (\n 'ADOLESCENS/2', 387, 'adulescens', 'young man, young lady', '', '1_20',\n 1), ('AEQVVS', 388, 'aequus', 'even', '', '1_20', 1), ('COR', 389,\n 'cor', 'heart', '', '1_20', 1), ('DELECTO', 390, 'delecto',\n 'to delight, please', '', '1_20', 1), ('DIVINVS/2', 391, 'divinus',\n 'divine', '', '1_20', 1), ('EGEO', 392, 'egeo',\n 'to lack something (abl.)', '', '1_20', 1), ('FVR', 393, 'fur', 'thief',\n '', '1_20', 1), ('FVRTVM', 394, 'furtum', 'theft', '', '1_20', 1), (\n 'HVMANVS', 395, 'humanus', 'human', '', '1_20', 1), ('ILLE', 396,\n 'ille', 'that', '', '1_20', 1), ('INIQVITAS', 397, 'iniquitas',\n 'injustice', '', '1_20', 1), ('LEX', 398, 'lex', 'law', '', '1_20', 1),\n ('LVDO', 399, 'ludo', 'to play', '', '1_20', 1), ('NOCTV', 400, 'noctu',\n 'during the night', '', '1_20', 1), ('PAENE', 401, 'paene', 'almost',\n '', '1_20', 1), ('PAVPER', 402, 'pauper', 'poor', '', '1_20', 1), (\n 'PLENVS', 403, 'plenus', 'full of (w/ gen. or abl.)', '', '1_20', 1), (\n 'POMVM', 404, 'pomum', 'fruit', '', '1_20', 1), ('PVNIO', 405, 'punio',\n 'to punish', '', '1_20', 1), ('ACCIPIO', 406, 'accipio',\n 'to accept, receive', '', '1_21', 1), ('ACCVSO', 407, 'accuso',\n 'to accuse someone (acc.) of something (gen.)', '', '1_21', 1), (\n 'ALIENVS/2', 408, 'alienus',\n 'foreign to, inconsistent with (w/ a/ab and abl.)', '', '1_21', 1), (\n 'AXIS', 409, 'axis', 'axle, axis', '', '1_21', 1), ('CIRCVM/2', 410,\n 'circum', 'around (w/ acc.)', '', '1_21', 1), ('CONSTANTIA', 411,\n 'constantia', 'constancy', '', '1_21', 1), ('DESCENDO', 412, 'descendo',\n 'to descend', '', '1_21', 1), ('DIVITIAE', 413, 'divitia',\n 'wealth, riches (pl.)', '', '1_21', 1), ('ERIPIO', 414, 'eripio',\n 'to snatch away', '', '1_21', 1), ('ERRO/2', 415, 'erro',\n 'to wander, make a mistake', '', '1_21', 1), ('EXTERNVS', 416,\n 'externus', 'outward, external', '', '1_21', 1), ('FORTVNA', 417,\n 'fortuna', 'fortune, the goddess Fortune', '', '1_21', 1), ('FVTVRVS', \n 418, 'futurus', 'about to be (from sum)', '', '1_21', 1), ('HONOR', 419,\n 'honor', 'honor, public office or distinction', '', '1_21', 1), (\n 'MVTO/2', 420, 'muto', 'to change', '', '1_21', 1), ('POSSIDEO', 421,\n 'possideo', 'to possess', '', '1_21', 1), ('PROCERTO', 422, 'pro',\n 'for certain, for sure', '', '1_21', 1), ('RECIPIO', 423, 'recipio',\n 'to take back', '', '1_21', 2), ('REPREHENDO', 424, 'reprehendo',\n 'to blame, rebuke', '', '1_21', 1), ('ROTA', 425, 'rota', 'wheel', '',\n '1_21', 1), ('TOLLO', 426, 'tollo', 'to lift up, raise; to destroy', '',\n '1_21', 1), ('VERSO', 427, 'verso', 'to turn', '', '1_21', 1), ('VLLVS',\n 428, 'ullus', 'any', '', '1_21', 1), ('CONSILIVM', 429, 'consilium',\n 'plan, advice', '', '2_1', 2), ('MERIDIES', 430, 'meridies', 'midday',\n '', '2_1', 2), ('PROPE/2', 431, 'prope', 'near', '', '2_1', 2), (\n 'ASPICIO', 432, 'aspicio', 'to look at, catch a glimpse of', '', '2_1',\n 1), ('ETET', 433, 'et', 'both…and…', '', '2_1', 1), ('GENS', 434,\n 'gens', 'tribe, population', '', '2_1', 1), ('GIGNO', 435, 'gigno',\n 'to give birth, produce', '', '2_1', 1), ('HODIE', 436, 'hodie',\n 'today', '', '2_1', 1), ('INCOLA', 437, 'incola', 'inhabitant', '',\n '2_1', 1), ('INSVLA', 438, 'insula', 'island', '', '2_1', 2), (\n 'INVENIO', 439, 'invenio', 'to come upon, find', '', '2_1', 1), ('MOS',\n 440, 'mos', 'custom, habit; morals (pl.)', '', '2_1', 1), ('MVNDVS/1', \n 441, 'mundus', 'world', '', '2_1', 1), ('NE/4', 442, 'ne',\n 'that not, not to, lest ', '', '2_1', 3), ('OCCVPO/2', 443, 'occupo',\n 'to occupy', '', '2_1', 1), ('ORTVS', 444, 'ortus',\n 'origin, beginning, raising', '', '2_1', 1), ('PISCIS', 445, 'piscis',\n 'a fish', '', '2_1', 1), ('PROCVL', 446, 'procul', 'far, far away', '',\n '2_1', 1), ('PROMITTO', 447, 'promitto', 'to promise', '', '2_1', 1), (\n 'SEPTENTRIONALIS', 448, 'septentrionalis', 'northern', '', '2_1', 1), (\n 'SITVS/2', 449, 'situs', 'located, situated', '', '2_1', 1), ('SOL', \n 450, 'sol', 'sun', '', '2_1', 1), ('VTINAM', 451, 'utinam', 'if only',\n '', '2_1', 2), ('GERO', 452, 'gero', 'to carry; to behave (w/ se)', '',\n '2_2', 2), ('ODIVM', 453, 'odium', 'hatred', '', '2_2', 2), ('VALEO', \n 454, 'valeo', 'to be able (w/ inf.); to be in good health', '', '2_2',\n 3), ('ALTVS', 455, 'altus', 'tall, deep', '', '2_2', 1), ('ANNVS', 456,\n 'annus', 'year', '', '2_2', 1), ('ARGENTVM', 457, 'argentum', 'silver',\n '', '2_2', 1), ('AVRVM', 458, 'aurum', 'gold', '', '2_2', 1), ('BREVIS',\n 459, 'brevis', 'short', '', '2_2', 1), ('CLARVS', 460, 'clarus',\n 'clear, distinguished', '', '2_2', 1), ('CVSTOS', 461, 'custos',\n 'guard', '', '2_2', 1), ('EQVES', 462, 'eques', 'horseman', '', '2_2', \n 1), ('FINIS', 463, 'finis', 'end', '', '2_2', 1), ('GRAVIS', 464,\n 'gravis', 'serious, heavy', '', '2_2', 1), ('INTERDVM', 465, 'interdum',\n 'sometimes', '', '2_2', 1), ('LIS', 466, 'lis', 'dispute, quarrel', '',\n '2_2', 1), ('MANE/2', 467, 'mane', 'in the morning', '', '2_2', 1), (\n 'ODIOHABEO', 468, 'odio', 'to hate somebody', '', '2_2', 1), ('SINO', \n 469, 'sino', 'to allow somebody (acc.) to do something (inf.)', '',\n '2_2', 1), ('VEL/1', 470, 'vel', 'or', '', '2_2', 1), ('VESTIS', 471,\n 'vestis', 'clothes, attire', '', '2_2', 1), ('VOX', 472, 'vox', 'voice',\n '', '2_2', 1), ('VT/4', 473, 'ut', 'that, to, in order to, so that', '',\n '2_2', 4), ('VVLTVS', 474, 'vultus', 'face', '', '2_2', 1), ('VXOR', \n 475, 'uxor', 'wife', '', '2_3', 2), ('AT/2', 476, 'at', 'but', '',\n '2_3', 1), ('CONIVX', 477, 'coniunx', 'spouse', '', '2_3', 1), (\n 'DISCIPVLA', 478, 'discipula', 'student', '', '2_3', 1), ('DISCO', 479,\n 'disco', 'to learn', '', '2_3', 1), ('DOMINVS', 480, 'dominus',\n 'master, lord', '', '2_3', 1), ('FAMA', 481, 'fama',\n 'fame, name, reputation', '', '2_3', 1), ('FRATER', 482, 'frater',\n 'brother', '', '2_3', 1), ('IMPROBVS', 483, 'improbus', 'wicked, bad',\n '', '2_3', 1), ('IVNGO', 484, 'iungo', 'to join', '', '2_3', 1), (\n 'MAGISTER', 485, 'magister', 'teacher', '', '2_3', 1), ('MATRIMONIVM', \n 486, 'matrimonium', 'marriage', '', '2_3', 1), ('NE/4', 487, 'ne',\n 'that not, not to, lest ', '', '2_3', 3), ('NVSQVAM', 488, 'nusquam',\n 'nowhere', '', '2_3', 1), ('PARIO/2', 489, 'pario', 'to give birth to',\n '', '2_3', 1), ('PERDO', 490, 'perdo', 'to lose, waste', '', '2_3', 1),\n ('SALVS', 491, 'salus', 'health, welfare', '', '2_3', 1), (\n 'SALVTEMDICERE', 492, 'salutem',\n 'to greet (customary opening to letter) ', '', '2_3', 1), ('SCRIBO', \n 493, 'scribo', 'to write', '', '2_3', 1), ('VT/4', 494, 'ut',\n 'that, to, in order to, so that', '', '2_3', 4), ('VXOREMDEDVCERE', 495,\n 'uxorem', 'to marry a woman, to take as a wife', '', '2_3', 1), (\n 'NEC/2', 496, 'nec', 'nor; and not', '', '2_4', 2), ('RECIPIO', 497,\n 'recipio', 'to take back', '', '2_4', 2), ('AGMEN', 498, 'agmen',\n 'marching column', '', '2_4', 1), ('APERIO', 499, 'aperio', 'to open',\n '', '2_4', 1), ('COEPIO', 500, 'coepi', 'to begin (w/ inf.)', '', '2_4',\n 1), ('DEFENDO', 501, 'defendo', 'to defend', '', '2_4', 1), ('EDO/1', \n 502, 'edo', 'to produce, give forth', '', '2_4', 1), ('EXTRA/2', 503,\n 'extra', 'outside of (w/ acc.)', '', '2_4', 1), ('FVRO', 504, 'furo',\n 'to rage, be insane', '', '2_4', 1), ('INGENS', 505, 'ingens', 'huge',\n '', '2_4', 1), ('INVADO/2', 506, 'invado', 'to burst in', '', '2_4', 1),\n ('LIGNEVS', 507, 'ligneus', 'made of wood', '', '2_4', 1), ('NEQVENEC',\n 508, 'neque', 'neither..nor…', '', '2_4', 1), ('PARCO', 509, 'parco',\n 'to spare somebody/thing (w/ dat.)', '', '2_4', 1), ('PONS', 510,\n 'pons', 'bridge', '', '2_4', 1), ('PORTA', 511, 'porta', 'gate', '',\n '2_4', 1), ('PRIMO', 512, 'primo', 'at first', '', '2_4', 1), ('QVAM/1',\n 513, 'quam', 'than (w/ comp. words)', '', '2_4', 2), ('QVANTVS/1', 514,\n 'quantus', 'how great, how much (inter. or rel. adj.)', '', '2_4', 1),\n ('RESISTO', 515, 'resisto', 'to resist (w/ dat.)', '', '2_4', 1), (\n 'SIMVL/1', 516, 'simul', 'at the same time', '', '2_4', 1), ('TVTVS', \n 517, 'tutus', 'safe', '', '2_4', 1), ('VACVVS', 518, 'vacuus',\n 'empty of (w/ abl.)', '', '2_4', 1), ('VALEO', 519, 'valeo',\n 'to be able (w/ inf.); to be in good health', '', '2_4', 3), ('VICTOR',\n 520, 'victor', 'victor', '', '2_4', 1), ('VTINAM', 521, 'utinam',\n 'if only', '', '2_4', 2), ('BIBO/2', 522, 'bibo', 'to drink', '', '2_5',\n 1), ('CARMEN/1', 523, 'carmen', 'song, poem', '', '2_5', 1), ('CIBVS', \n 524, 'cibus', 'food', '', '2_5', 1), ('DVLCIS', 525, 'dulcis', 'sweet',\n '', '2_5', 1), ('FLVMEN', 526, 'flumen', 'river', '', '2_5', 1), (\n 'IMMEMOR', 527, 'immemor', 'forgetful of (w/ gen.)', '', '2_5', 1), (\n 'IOCVS', 528, 'iocus', 'joke', '', '2_5', 1), ('IVVENTVS', 529,\n 'iuventus', 'youth', '', '2_5', 1), ('LEVIS/1', 530, 'levis', 'light',\n '', '2_5', 1), ('MENS', 531, 'mens', 'mind, spirit', '', '2_5', 1), (\n 'NE/4', 532, 'ne', 'that not, not to, lest ', '', '2_5', 3), ('ORO', \n 533, 'oro', 'to ask, entreat', '', '2_5', 1), ('PLACEO', 534, 'placeo',\n 'to please, be agreeable to somebody', '', '2_5', 1), ('PROXIMVS/2', \n 535, 'proximus', 'nearest', '', '2_5', 1), ('TAMQVAM/2', 536, 'tam',\n 'so…as…', '', '2_5', 1), ('VEHEMENS', 537, 'vehemens',\n 'violent, vehement', '', '2_5', 1), ('VETVS', 538, 'vetus', 'old', '',\n '2_5', 1), ('VINVM', 539, 'vinum', 'wine', '', '2_5', 1), ('VIRTVS', \n 540, 'virtus', 'courage, virtue', '', '2_5', 1), ('VITIVM', 541,\n 'vitium', 'vice', '', '2_5', 1), ('VT/4', 542, 'ut',\n 'that, to, in order to, so that', '', '2_5', 4), ('PATER', 543, 'pater',\n 'father', '', '2_6', 2), ('DECIPIO', 544, 'decipio', 'to deceive', '',\n '2_6', 1), ('DILIGO/3', 545, 'diligo', 'to love, esteem highly', '',\n '2_6', 1), ('DVO', 546, 'duo', 'two', '', '2_6', 1), ('EXERCITVS/1', \n 547, 'exercitus', 'army', '', '2_6', 1), ('FIDELIS/2', 548, 'fidelis',\n 'faithful, loyal', '', '2_6', 1), ('HERES', 549, 'heres', 'heir', '',\n '2_6', 1), ('IMPERIVM', 550, 'imperium', 'rule, empire, power', '',\n '2_6', 1), ('INOPIA', 551, 'inopia', 'helplessness, want', '', '2_6', 1\n ), ('LAVDO', 552, 'laudo', 'to praise', '', '2_6', 1), ('NECESSEEST', \n 553, 'necesse',\n 'it is necessary for someone (dat.) to do something (inf.)', '', '2_6',\n 1), ('NEMO', 554, 'nemo', 'no one', '', '2_6', 1), ('PAVLO', 555,\n 'paulo', 'a little bit, to a small extent', '', '2_6', 1), ('QVAM/1', \n 556, 'quam', 'than (w/ comp. words)', '', '2_6', 2), ('QVANTVM/3', 557,\n 'quantum', 'to what extent, how much', '', '2_6', 1), ('RESTITVO', 558,\n 'restituo', 'to restore', '', '2_6', 1), ('SATIS/2', 559, 'satis',\n 'enough, sufficiently', '', '2_6', 1), ('SECVNDVS/1', 560, 'secundus',\n 'second', '', '2_6', 1), ('TERTIVS', 561, 'tertius', 'third', '', '2_6',\n 1), ('TRES', 562, 'tres', 'three', '', '2_6', 1), ('TRISTIS', 563,\n 'tristis', 'sad', '', '2_6', 1), ('VEHEMENTER', 564, 'vehementer',\n 'strongly, vehemently', '', '2_6', 1), ('NOLO', 565, 'nolo',\n 'not to want, to be unwilling', '', '2_7', 2), ('AETAS', 566, 'aetas',\n 'age', '', '2_7', 1), ('FIDES/2', 567, 'fides', 'faith', '', '2_7', 1),\n ('FVNDO/2', 568, 'fundo', 'to pour', '', '2_7', 1), ('GLORIA', 569,\n 'gloria', 'glory', '', '2_7', 1), ('LIBERTAS', 570, 'libertas',\n 'freedom', '', '2_7', 1), ('LVMEN', 571, 'lumen', 'light', '', '2_7', 1\n ), ('MALO', 572, 'malo', 'to prefer', '', '2_7', 1), ('ORNATVS/1', 573,\n 'ornatus', 'adorned, ornate, elaborate', '', '2_7', 1), ('OTIVM', 574,\n 'otium', 'leisure, free time', '', '2_7', 1), ('POTENS', 575, 'potens',\n 'powerful', '', '2_7', 1), ('PVBLICVS/2', 576, 'publicus', 'common', '',\n '2_7', 1), ('QVALIS/1', 577, 'qualis', 'what sort of? (inter. adj.)',\n '', '2_7', 1), ('RESPVBLICA', 578, 'res', 'state', '', '2_7', 1), (\n 'STVDIOSVS', 579, 'studiosus', 'fond of (w/ gen.)', '', '2_7', 1), (\n 'TAMQVAM/1', 580, 'tamquam', 'as', '', '2_7', 1), ('TOT', 581, 'tot',\n 'so many', '', '2_7', 1), ('TRAHO', 582, 'traho', 'to drag, draw', '',\n '2_7', 1), ('VBI/1', 583, 'ubi', 'where? (inter. adv)', '', '2_7', 1),\n ('VIX', 584, 'vix', 'hardly', '', '2_7', 1), ('VNVS', 585, 'unus',\n 'one', '', '2_7', 1), ('VOLO/3', 586, 'volo', 'to want', '', '2_7', 1),\n ('VTILIS', 587, 'utilis', 'useful', '', '2_7', 1), ('ADHVC', 588,\n 'adhuc', 'still, up to this time', '', '2_8', 1), ('ANTIQVVS', 589,\n 'antiquus', 'ancient', '', '2_8', 1), ('ARS', 590, 'ars',\n 'science, art, skill', '', '2_8', 1), ('DOMINOR', 591, 'dominor',\n 'to dominate, rule', '', '2_8', 1), ('HORTOR', 592, 'hortor',\n 'to exhort, urge', '', '2_8', 1), ('LATINE', 593, 'Latine', 'in Latin',\n '', '2_8', 1), ('LATINVS/A', 594, 'Latinus',\n 'Latin, pertaining to Latin', '', '2_8', 1), ('LINGVA', 595, 'lingua',\n 'language; tongue', '', '2_8', 1), ('LOQVOR', 596, 'loquor', 'to speak',\n '', '2_8', 1), ('MAGIS/2', 597, 'magis', 'more', '', '2_8', 1), (\n 'MAIOR', 598, 'maior', 'bigger; greater', '', '2_8', 1), ('MAXIMVS', \n 599, 'maximus', 'greatest', '', '2_8', 1), ('MELIOR', 600, 'melior',\n 'better', '', '2_8', 1), ('MINIMVS', 601, 'minimus', 'smallest', '',\n '2_8', 1), ('MINVS', 602, 'minus', 'less', '', '2_8', 2), ('OPTIMVS', \n 603, 'optimus', 'best', '', '2_8', 1), ('PARTIOR', 604, 'partior',\n 'to divide, distribute', '', '2_8', 1), ('PATIOR', 605, 'patior',\n 'to endure, tolerate, suffer', '', '2_8', 1), ('PEIOR', 606, 'peior',\n 'worse', '', '2_8', 1), ('PESSIMVS', 607, 'pessimus', 'worst', '',\n '2_8', 1), ('PLVRIMVS', 608, 'plurimus', 'most', '', '2_8', 1), ('PLVS',\n 609, 'plus', 'more', '', '2_8', 1), ('SEQVOR', 610, 'sequor',\n 'to follow', '', '2_8', 1), ('VEREOR', 611, 'vereor',\n 'to fear, respect', '', '2_8', 1), ('ADDO', 612, 'addo', 'to add', '',\n '2_9', 1), ('AVRIS', 613, 'auris', 'ear', '', '2_9', 1), ('CONOR', 614,\n 'conor', 'to try', '', '2_9', 1), ('DEMITTO', 615, 'demitto',\n 'to send down', '', '2_9', 1), ('DISSIMILIS', 616, 'dissimilis',\n 'dissimilar ', '', '2_9', 1), ('FACILIS', 617, 'facilis', 'easy', '',\n '2_9', 1), ('FERO', 618, 'fero', 'to carry, bear', '', '2_9', 1), (\n 'FIO', 619, 'fio', 'to be made, become; (impersonally) to happen', '',\n '2_9', 1), ('FRIGVS', 620, 'frigus', 'cold', '', '2_9', 1), ('GENVS/1',\n 621, 'genus', 'kind', '', '2_9', 1), ('GLACIES', 622, 'glacies', 'ice',\n '', '2_9', 1), ('GRACILIS', 623, 'gracilis', 'slender', '', '2_9', 1),\n ('HVMILIS', 624, 'humilis', 'low, humble', '', '2_9', 1), ('ITER', 625,\n 'iter', 'road, trip ', '', '2_9', 1), ('LABOR/2', 626, 'labor',\n 'to slide, slip, glide down', '', '2_9', 1), ('MODEROR', 627, 'moderor',\n 'to manage, direct, guide', '', '2_9', 1), ('NIX', 628, 'nix', 'snow',\n '', '2_9', 1), ('ONVS', 629, 'onus', 'weight, burden', '', '2_9', 1), (\n 'PERVENIO', 630, 'pervenio', 'to arrive', '', '2_9', 1), ('PROGREDIOR',\n 631, 'progredior', 'to go forward, proceed', '', '2_9', 1), (\n 'QVOTIENS/2', 632, 'quotiens', 'as often as', '', '2_9', 1), (\n 'SIMVLAC/2', 633, 'simulac', 'as soon as', '', '2_9', 1), ('SVVS', 634,\n 'suus', 'his, her, its, their', '', '2_10', 2), ('AEDES', 635, 'aedes',\n 'temple; pl. dwelling, house', '', '2_10', 1), ('EO/1', 636, 'eo',\n 'to go ', '', '2_10', 1), ('IVCVNDVS', 637, 'iucundus',\n 'pleasant, nice', '', '2_10', 1), ('LABOR/1', 638, 'labor',\n 'labor, toil', '', '2_10', 1), ('LAEDO', 639, 'laedo', 'to harm', '',\n '2_10', 1), ('LIBER/2', 640, 'liber', 'free', '', '2_10', 1), ('LVCRVM',\n 641, 'lucrum', 'profit, gain', '', '2_10', 1), ('MARITIMVS', 642,\n 'maritimus', 'maritime', '', '2_10', 1), ('MODVS', 643, 'modus',\n 'way, method, manner', '', '2_10', 1), ('PAVLISPER', 644, 'paulisper',\n 'for a little while', '', '2_10', 1), ('PECVNIA', 645, 'pecunia',\n 'money', '', '2_10', 1), ('PLACIDVS', 646, 'placidus', 'peaceful, calm',\n '', '2_10', 1), ('POTIVS', 647, 'potius', 'rather', '', '2_10', 1), (\n 'PROSPER', 648, 'prosper', 'fortunate, prosperous', '', '2_10', 1), (\n 'REDDO', 649, 'reddo', 'to give back', '', '2_10', 1), ('SARCINA', 650,\n 'sarcina', 'burden, baggage', '', '2_10', 1), ('SCELESTVS', 651,\n 'scelestus', 'wicked', '', '2_10', 1), ('SEMEL', 652, 'semel', 'once',\n '', '2_10', 1), ('SERENVS', 653, 'serenus', 'calm, clear', '', '2_10', \n 1), ('PARVM/2', 654, 'parum', 'too little', '', '2_11', 2), ('ALTER', \n 655, 'alter', 'the other (of two)', '', '2_11', 1), ('GEMMA', 656,\n 'gemma', 'gem, precious stone', '', '2_11', 1), ('LEGATVS', 657,\n 'legatus', 'ambassador', '', '2_11', 1), ('MAGNIHABEO', 658, 'magni',\n 'to esteem a lot', '', '2_11', 1), ('MINVS', 659, 'minus', 'less', '',\n '2_11', 2), ('NESCIO', 660, 'nescio', 'not to know', '', '2_11', 1), (\n 'NEVTER', 661, 'neuter', 'neither, none (of two)', '', '2_11', 1), (\n 'NVLLVS', 662, 'nullus', 'none', '', '2_11', 1), ('OPERAEPRETIVMEST', \n 663, 'operae', 'it is worthwhile', '', '2_11', 1), ('POPVLVS/1', 664,\n 'populus', 'a people, populace', '', '2_11', 1), ('QVOMODO/1', 665,\n 'quomodo', 'how', '', '2_11', 1), ('SALVTO', 666, 'saluto', 'to greet ',\n '', '2_11', 1), ('SERVVS/1', 667, 'servus', 'slave, servant', '',\n '2_11', 1), ('SOLVS', 668, 'solus', 'alone, only', '', '2_11', 1), (\n 'SPECTO', 669, 'specto', 'to watch', '', '2_11', 1), ('TACEO', 670,\n 'taceo', 'to be silent, keep quiet', '', '2_11', 1), ('TOTVS', 671,\n 'totus', 'whole, entire', '', '2_11', 1), ('TVRPIS', 672, 'turpis',\n 'shameful, disgraceful', '', '2_11', 1), ('VTER/4', 673, 'uter',\n 'who, which (of two)?', '', '2_11', 1), ('VTOR', 674, 'utor',\n 'to use (w/ abl.)', '', '2_11', 1), ('CVM/3', 675, 'cum', 'when, after',\n '', '2_12', 2), ('INQVIO', 676, 'inquam',\n 'to say (used with direct speech)', '', '2_12', 3), ('TAMEN', 677,\n 'tamen', 'however', '', '2_12', 2), ('CARVS', 678, 'carus', 'dear', '',\n '2_12', 1), ('INSVLA', 679, 'insula', 'island', '', '2_12', 2), (\n 'MORIOR', 680, 'morior', 'to die', '', '2_12', 1), ('NIMIS', 681,\n 'nimis', 'too much', '', '2_12', 1), ('NISI', 682, 'nisi',\n 'if not, unless', '', '2_12', 1), ('OFFICIVM', 683, 'officium', 'duty',\n '', '2_12', 1), ('ORBIS', 684, 'orbis', 'circle', '', '2_12', 1), (\n 'ORBISTERRARVM', 685, 'orbis', 'the earth, the world', '', '2_12', 1),\n ('PROBO', 686, 'probo', 'to approve ', '', '2_12', 1), ('QVAMQVAM/2', \n 687, 'quamquam', 'although', '', '2_12', 1), ('QVAMVIS/1', 688,\n 'quamvis', 'although', '', '2_12', 1), ('QVIA', 689, 'quia', 'because',\n '', '2_12', 1), ('QVIDEM', 690, 'quidem', 'indeed', '', '2_12', 1), (\n 'QVOD/1', 691, 'quod', 'because', '', '2_12', 1), ('SENTENTIA', 692,\n 'sententia', 'opinion, point of view', '', '2_12', 1), ('SORS', 693,\n 'sors', 'lot', '', '2_12', 1), ('SPERO', 694, 'spero', 'to hope', '',\n '2_12', 1), ('SPES', 695, 'spes', 'hope', '', '2_12', 1), ('ATQVE/1', \n 696, 'atque', 'as', '', '2_13', 2), ('ABSENS', 697, 'absens',\n 'away, absent', '', '2_13', 1), ('ABSVM/1', 698, 'absum', 'to be away',\n '', '2_13', 1), ('BENEVOLENTIA', 699, 'benevolentia', 'good will', '',\n '2_13', 1), ('DECLARO', 700, 'declaro',\n 'to demonstrate, show, make known, reveal', '', '2_13', 1), ('IDEM', \n 701, 'idem', 'the same', '', '2_13', 1), ('IPSE', 702, 'ipse', 'self',\n '', '2_13', 1), ('IRASCOR', 703, 'irascor', 'to be angry at (w/ dat.)',\n '', '2_13', 1), ('ISTE', 704, 'iste', 'that (of yours)', '', '2_13', 1),\n ('MIROR', 705, 'miror', 'to marvel, be surprised at', '', '2_13', 1), (\n 'MVLTITVDO', 706, 'multitudo', 'crowd, throng', '', '2_13', 1), ('NEGO',\n 707, 'nego', 'to deny ', '', '2_13', 1), ('NVMERO/1', 708, 'numero',\n 'to number, count among', '', '2_13', 1), ('OFFENDO', 709, 'offendo',\n ']to happen upon, offend', '', '2_13', 1), ('REDEO/1', 710, 'redeo',\n 'to go back, return', '', '2_13', 1), ('REFERO', 711, 'refero',\n 'to carry back, report', '', '2_13', 1), ('SOCIVS/1', 712, 'socius',\n 'associate, partner, ally', '', '2_13', 1), ('TALIS', 713, 'talis',\n 'such a', '', '2_13', 1), ('TVRRIS', 714, 'turris', 'tower', '', '2_13',\n 1), ('VENIA', 715, 'venia', 'pardon, indulgence, forgiveness', '',\n '2_13', 1), ('VERSOR', 716, 'versor',\n 'to be situated in, be occupied in ', '', '2_13', 1), ('VIRGA', 717,\n 'virga', 'twig, stick', '', '2_13', 1), ('VOLVNTAS', 718, 'voluntas',\n 'will', '', '2_13', 1), ('AFFIRMO', 719, 'affirmo',\n 'to assert, maintain', '', '2_14', 1), ('CIRCVMEO/1', 720, 'circumeo',\n 'to go around', '', '2_14', 1), ('CONTINEO', 721, 'contineo',\n 'to hold, keep together, contain', '', '2_14', 1), ('COTIDIANVS', 722,\n 'cottidianus', 'of every day, daily', '', '2_14', 1), ('ELEMENTVM', 723,\n 'elementum', 'element', '', '2_14', 1), ('ERGO/2', 724, 'ergo',\n 'therefore', '', '2_14', 1), ('GRAVITAS', 725, 'gravitas',\n 'weight, gravity', '', '2_14', 1), ('IMMENSVS', 726, 'immensus',\n 'immeasurable, immense, endless', '', '2_14', 1), ('INFINITVS', 727,\n 'infinitus', 'boundless, unlimited', '', '2_14', 1), ('MAXIME', 728,\n 'maxime', 'most', '', '2_14', 1), ('MEDIVS', 729, 'medius', 'middle',\n '', '2_14', 1), ('MOTVS', 730, 'motus', 'motion, movement', '', '2_14',\n 1), ('MVLTO/2', 731, 'multo', 'by much', '', '2_14', 1), ('NATVRA', 732,\n 'natura', 'nature', '', '2_14', 1), ('NECESSARIO', 733, 'necessario',\n 'necessarily', '', '2_14', 1), ('PERPERAM', 734, 'perperam',\n 'wrongly, incorrectly', '', '2_14', 1), ('PONDVS', 735, 'pondus',\n 'weight', '', '2_14', 1), ('PRAESERTIM', 736, 'praesertim',\n 'especially', '', '2_14', 1), ('QVIES', 737, 'quies', 'rest, repose',\n '', '2_14', 1), ('VNDIQVE', 738, 'undique',\n 'from all parts, from everywhere', '', '2_14', 1), ('VOLVO', 739,\n 'volvo', 'to turn round', '', '2_14', 1), ('VT/4', 740, 'ut',\n 'that, to, in order to, so that', '', '2_14', 4), ('ANIMADVERTO', 741,\n 'animadverto', 'to notice', '', '2_15', 1), ('APPROPINQVO', 742,\n 'appropinquo', 'to approach (w/ dat or ad + acc.)', '', '2_15', 1), (\n 'CERNO', 743, 'cerno', 'to see, distinguish with the eyes', '', '2_15',\n 1), ('CIRCA/2', 744, 'circa', 'around (w/ acc.)', '', '2_15', 1), (\n 'CLAMO', 745, 'clamo', 'to shout, scream', '', '2_15', 1), ('FINGO', \n 746, 'fingo', 'to imagine, form in the mind', '', '2_15', 1), (\n 'IMPINGO', 747, 'impingo', 'to push, strike, inflict', '', '2_15', 1),\n ('INFLIGO', 748, 'infligo', 'to strike on or against, inflict', '',\n '2_15', 1), ('ITERVM', 749, 'iterum', 'again', '', '2_15', 1), (\n 'OPPIDVM', 750, 'oppidum', 'town', '', '2_15', 1), ('PERCVTIO', 751,\n 'percutio', 'to strike through ', '', '2_15', 1), ('PRAEDITVS', 752,\n 'praeditus', 'endowed with, possessed of (w/ abl.)', '', '2_15', 1), (\n 'REPELLO', 753, 'repello', 'to push back, thrust back', '', '2_15', 1),\n ('RIDEO', 754, 'rideo', 'to laugh', '', '2_15', 1), ('RVMPO', 755,\n 'rumpo', 'to break, tear', '', '2_15', 1), ('SEDES', 756, 'sedes',\n 'seat, abode', '', '2_15', 1), ('SIC', 757, 'sic', 'in such a way', '',\n '2_15', 1), ('SIDVS', 758, 'sidus', 'constellation', '', '2_15', 1), (\n 'TELVM', 759, 'telum', 'spear, javelin', '', '2_15', 1), ('VEHO', 760,\n 'veho', 'to drive, carry', '', '2_15', 1)]\nsection_list = {'1.1': 'start', '1.2': '1.1', '1.3': '1.2', '1.4': '1.3',\n '1.5': '1.4', '1.6': '1.5', '1.7': '1.6', '1.8': '1.7', '1.9': '1.8',\n '1.10': '1.9', '1.11': '1.10', '1.12': '1.11', '1.13': '1.12', '1.14':\n '1.13', '1.15': '1.14', '1.16': '1.15', '1.17': '1.16', '1.18': '1.17',\n '1.19': '1.18', '1.20': '1.19', '1.21': '1.20', '2.1': '1.21', '2.2':\n '2.1', '2.3': '2.2', '2.4': '2.3', '2.5': '2.4', '2.6': '2.5', '2.7':\n '2.6', '2.8': '2.7', '2.9': '2.8', '2.10': '2.9', '2.11': '2.10',\n '2.12': '2.11', '2.13': '2.12', '2.14': '2.13', '2.15': '2.14', 'end':\n '2.15', 'start': 'start'}\ntitle = 'Latin for the New Millennium Vols 1 and 2 (Tunberg-Minkova)'\nsection_level = 2\nlanguage = 'Latin'\nbook = text.Text(title, section_words, the_text, section_list,\n section_level, language, True, False)\n",
"step-3": "import text\nnan = ''\nsection_words = {'start': -1, '1.1': 17, '1.2': 38, '1.3': 55, '1.4': 76,\n '1.5': 95, '1.6': 114, '1.7': 133, '1.8': 151, '1.9': 170, '1.10': 190,\n '1.11': 209, '1.12': 233, '1.13': 257, '1.14': 277, '1.15': 299, '1.16':\n 320, '1.17': 341, '1.18': 364, '1.19': 385, '1.20': 405, '1.21': 428,\n '2.1': 451, '2.2': 474, '2.3': 495, '2.4': 521, '2.5': 542, '2.6': 564,\n '2.7': 587, '2.8': 611, '2.9': 633, '2.10': 653, '2.11': 674, '2.12': \n 695, '2.13': 718, '2.14': 740, '2.15': 760, 'end': -2}\nthe_text = [('AGRICOLA', 0, 'agricola', 'farmer', '', '1_1', 1), ('AMBVLO',\n 1, 'ambulo', 'to walk', '', '1_1', 2), ('AMO', 2, 'amo', 'to love', '',\n '1_1', 2), ('AQVA', 3, 'aqua', 'water', '', '1_1', 1), ('ATHLETA', 4,\n 'athleta', 'athlete', '', '1_1', 1), ('BENE', 5, 'bene', 'well', '',\n '1_1', 1), ('CVRO', 6, 'curo', 'to take care for/of', '', '1_1', 2), (\n 'ET/2', 7, 'et', 'and', '', '1_1', 1), ('FILIA', 8, 'filia', 'daughter',\n '', '1_1', 1), ('ITAQVE', 9, 'itaque', 'and so', '', '1_1', 1), ('LVPA',\n 10, 'lupa', 'she–wolf', '', '1_1', 1), ('NAVTA', 11, 'nauta', 'sailor',\n '', '1_1', 1), ('POETA', 12, 'poeta', 'poet', '', '1_1', 1), ('POSTEA',\n 13, 'postea', 'afterwards', '', '1_1', 1), ('PVELLA', 14, 'puella',\n 'girl', '', '1_1', 1), ('ROMA/N', 15, 'Roma', 'Rome', '', '1_1', 1), (\n 'SVM/1', 16, 'sum', 'to be', '', '1_1', 3), ('TERRA', 17, 'terra',\n 'land', '', '1_1', 1), ('AMBVLO', 18, 'ambulo', 'to walk', '', '1_2', 2\n ), ('AMO', 19, 'amo', 'to love', '', '1_2', 2), ('CVRO', 20, 'curo',\n 'to take care for/of', '', '1_2', 2), ('SVM/1', 21, 'sum', 'to be', '',\n '1_2', 3), ('DEBEO', 22, 'debeo', 'ought, must, should; to owe', '',\n '1_2', 1), ('DIV', 23, 'diu', 'for a long time', '', '1_2', 1), ('EGO',\n 24, 'ego', 'I', '', '1_2', 3), ('EXSPECTO', 25, 'exspecto',\n 'to wait for, await, expect', '', '1_2', 1), ('FABVLA/1', 26, 'fabula',\n 'story', '', '1_2', 1), ('FORMA', 27, 'forma', 'form,appearance', '',\n '1_2', 1), ('HABEO', 28, 'habeo', 'to have', '', '1_2', 1), ('HABITO', \n 29, 'habito', 'to live, dwell', '', '1_2', 1), ('NARRO', 30, 'narro',\n 'to tell', '', '1_2', 1), ('NON', 31, 'non', 'not', '', '1_2', 1), (\n 'NVNC', 32, 'nunc', 'now', '', '1_2', 1), ('PARO/2', 33, 'paro',\n 'to prepare, get ready, design', '', '1_2', 2), ('PATRIA', 34, 'patria',\n 'fatherland', '', '1_2', 1), ('TENEO', 35, 'teneo', 'to hold', '',\n '1_2', 1), ('TV', 36, 'tu', 'you', '', '1_2', 3), ('VIDEO', 37, 'video',\n 'to see', '', '1_2', 1), ('VOCO', 38, 'voco', 'to call', '', '1_2', 1),\n ('EGO', 39, 'ego', 'I', '', '1_3', 3), ('TV', 40, 'tu', 'you', '',\n '1_3', 3), ('AGER', 41, 'ager', 'field', '', '1_3', 1), ('AMICVS/1', 42,\n 'amicus', 'friend', '', '1_3', 1), ('ANIMVS', 43, 'animus',\n 'spirit, soul, mind', '', '1_3', 1), ('CASA', 44, 'casa',\n 'little house, cottage', '', '1_3', 1), ('CVM/2', 45, 'cum',\n 'with (w/ abl.)', '', '1_3', 1), ('DEINDE', 46, 'deinde', 'then', '',\n '1_3', 1), ('DOMVS', 47, 'domus', 'home', '', '1_3', 2), ('FILIVS', 48,\n 'filius', 'son', '', '1_3', 1), ('IN', 49, 'in',\n 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_3', 2), ('PVER',\n 50, 'puer', 'boy', '', '1_3', 1), ('RIVVS', 51, 'rivus',\n 'brook, stream', '', '1_3', 1), ('TIMEO', 52, 'timeo',\n 'to fear, to be afraid', '', '1_3', 1), ('VALDE', 53, 'valde',\n 'very, exceedingly', '', '1_3', 1), ('VIA', 54, 'via', 'road', '',\n '1_3', 1), ('VIR', 55, 'vir', 'man', '', '1_3', 1), ('IN', 56, 'in',\n 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_4', 2), ('AD/2',\n 57, 'ad', 'into, towards, to (w/ acc.)', '', '1_4', 1), ('ARMATVS/2', \n 58, 'armatus', 'armed', '', '1_4', 1), ('AVTEM', 59, 'autem', 'however',\n '', '1_4', 1), ('BELLVM', 60, 'bellum', 'war', '', '1_4', 1), ('BONVS',\n 61, 'bonus', 'good', '', '1_4', 1), ('CASTRA/2', 62, 'castra', 'camp',\n '', '1_4', 1), ('DO', 63, 'do', 'to give', '', '1_4', 1), ('DOLVS', 64,\n 'dolus', 'trickery, deception', '', '1_4', 1), ('EX', 65, 'e',\n 'from, out of (w/ abl.)', '', '1_4', 1), ('INTRO/2', 66, 'intro',\n 'to enter', '', '1_4', 1), ('IVBEO', 67, 'iubeo',\n 'to order somebody (acc.) to do something (inf.)', '', '1_4', 1), (\n 'IVSTVS', 68, 'iustus', 'legitimate, open, just', '', '1_4', 1), (\n 'MAGNVS', 69, 'magnus', 'large, great, important', '', '1_4', 1), (\n 'MALVS/3', 70, 'malus', 'bad', '', '1_4', 1), ('PRAECLARVS', 71,\n 'praeclarus', 'famous, distinguished', '', '1_4', 1), ('PRAEMIVM', 72,\n 'praemium', 'reward', '', '1_4', 1), ('ROMANVS/A', 73, 'Romanus',\n 'Roman; the Romans (pl.)', '', '1_4', 1), ('SED', 74, 'sed', 'but', '',\n '1_4', 1), ('VENENVM', 75, 'venenum', 'poison', '', '1_4', 1), (\n 'VINCVLVM', 76, 'vinculum', 'chain, fetter', '', '1_4', 1), ('PARO/2', \n 77, 'paro', 'to prepare, get ready, design', '', '1_5', 2), ('AB', 78,\n 'a', 'by, from (w/ abl.)', '', '1_5', 1), ('AVXILIVM', 79, 'auxilium',\n 'help', '', '1_5', 1), ('COGITO', 80, 'cogito', 'to think', '', '1_5', \n 1), ('CONSILIVM', 81, 'consilium', 'plan, advice', '', '1_5', 2), ('DE',\n 82, 'de', 'about, concerning, down from (w/ abl.)', '', '1_5', 1), (\n 'DOLEO', 83, 'doleo', 'to feel pain, to be hurt', '', '1_5', 1), (\n 'EPISTOLA', 84, 'epistula', 'letter', '', '1_5', 1), ('FAMILIA', 85,\n 'familia', 'family, household', '', '1_5', 1), ('GAVDIVM', 86,\n 'gaudium', 'joy', '', '1_5', 1), ('LACRIMA', 87, 'lacrima', 'tear', '',\n '1_5', 1), ('LONGE', 88, 'longe', 'far', '', '1_5', 1), ('LONGVS', 89,\n 'longus', 'long', '', '1_5', 1), ('MISER', 90, 'miser',\n 'wretched, sad, miserable', '', '1_5', 1), ('NAM', 91, 'nam',\n 'for, in fact', '', '1_5', 1), ('NONSOLVM', 92, 'non',\n 'not only…, but also…', '', '1_5', 1), ('PVLCHER', 93, 'pulcher',\n 'beautiful, nice', '', '1_5', 1), ('SEMPER', 94, 'semper', 'always', '',\n '1_5', 1), ('TAMEN', 95, 'tamen', 'however', '', '1_5', 2), ('SVM/1', \n 96, 'sum', 'to be', '', '1_6', 3), ('DOCEO', 97, 'doceo', 'to teach',\n '', '1_6', 1), ('DVM/2', 98, 'dum', 'while', '', '1_6', 1), ('EXEMPLVM',\n 99, 'exemplum', 'example', '', '1_6', 1), ('FIRMO', 100, 'firmo',\n 'to strengthen', '', '1_6', 1), ('IACEO', 101, 'iaceo',\n 'to lie down, to be inert', '', '1_6', 1), ('IVDICO', 102, 'iudico',\n 'to judge', '', '1_6', 1), ('LIBER/1', 103, 'liber', 'book', '', '1_6',\n 1), ('LITTERA', 104, 'littera',\n 'letter of the alphabet (sing.); literature, letter (pl.)', '', '1_6', \n 1), ('MANEO', 105, 'maneo', 'to remain', '', '1_6', 1), ('MEMORIA', 106,\n 'memoria', 'memory', '', '1_6', 1), ('MVLTVS', 107, 'multus',\n 'much, many', '', '1_6', 1), ('POSSVM/1', 108, 'possum',\n 'to be able, can', '', '1_6', 1), ('PROPTER/2', 109, 'propter',\n 'because of, on account of (w/ acc.)', '', '1_6', 1), ('SAEPE', 110,\n 'saepe', 'often', '', '1_6', 1), ('SERVO', 111, 'servo',\n 'to save, preserve', '', '1_6', 1), ('SOLEO', 112, 'soleo',\n 'to be accustomed (w/ inf.)', '', '1_6', 1), ('TENEBRAE', 113,\n 'tenebrae', 'shadows, darkness (pl.)', '', '1_6', 1), ('VITA', 114,\n 'vita', 'life', '', '1_6', 1), ('AESTIMO', 115, 'aestimo',\n 'to regard, esteem', '', '1_7', 1), ('AESTIMOVNIVSASSIS', 116,\n 'aestimo', 'I do not care a bit ', '', '1_7', 1), ('AMOR', 117, 'amor',\n 'love', '', '1_7', 1), ('DELICIA/1', 118, 'delicia', 'delight, pet', '',\n '1_7', 1), ('DIGITVS', 119, 'digitus', 'finger', '', '1_7', 1), (\n 'DOMINA', 120, 'domina', 'mistress', '', '1_7', 1), ('GREMIVM', 121,\n 'gremium', 'lap', '', '1_7', 1), ('INVIDEO', 122, 'invideo', 'to envy',\n '', '1_7', 1), ('MEVS', 123, 'meus', 'my', '', '1_7', 1), ('OCVLVS', \n 124, 'oculus', 'eye', '', '1_7', 1), ('PASSER', 125, 'passer',\n 'sparrow', '', '1_7', 1), ('PAX', 126, 'pax', 'peace; favor', '', '1_7',\n 1), ('PVTO', 127, 'puto', 'to think', '', '1_7', 1), ('SENEX/1', 128,\n 'senex', 'old man', '', '1_7', 1), ('SESESE', 129, 'se',\n 'him/her/itself', '', '1_7', 1), ('SEVERVS', 130, 'severus', 'serious',\n '', '1_7', 1), ('SOROR', 131, 'soror', 'sister', '', '1_7', 1), (\n 'SVI/1', 132, 'sui', 'him–/her–/itself', '', '1_7', 1), ('VERBVM', 133,\n 'verbum', 'word', '', '1_7', 1), ('CONTRA/2', 134, 'contra',\n 'against (w/ acc.)', '', '1_8', 1), ('DECERNO', 135, 'decerno',\n 'to decide, determine (often w/ inf.)', '', '1_8', 1), ('DICO/2', 136,\n 'dico', 'to say', '', '1_8', 1), ('DVX', 137, 'dux', 'leader, general',\n '', '1_8', 1), ('FORTITVDO', 138, 'fortitudo', 'courage', '', '1_8', 1),\n ('HOMO', 139, 'homo', 'man, human being; people (pl.)', '', '1_8', 1),\n ('INTELLIGO', 140, 'intellego', 'to understand', '', '1_8', 1), (\n 'LIBERO', 141, 'libero', 'to free someone (acc.) from something (abl.)',\n '', '1_8', 1), ('MILES', 142, 'miles', 'soldier', '', '1_8', 1), (\n 'NAVIGO', 143, 'navigo', 'to sail, navigate', '', '1_8', 1), (\n 'ORACVLVM', 144, 'oraculum', 'oracle, prophecy', '', '1_8', 1), ('PETO',\n 145, 'peto', 'to seek', '', '1_8', 1), ('REX', 146, 'rex', 'king', '',\n '1_8', 1), ('TANDEM', 147, 'tandem', 'finally', '', '1_8', 1), (\n 'TEMPLVM', 148, 'templum', 'temple', '', '1_8', 1), ('TIMOR', 149,\n 'timor', 'fear', '', '1_8', 1), ('TVM', 150, 'tum',\n 'then, at that time', '', '1_8', 2), ('VINCO', 151, 'vinco',\n 'to conquer', '', '1_8', 1), ('ANIMAL', 152, 'animal', 'animal', '',\n '1_9', 1), ('ARMA', 153, 'arma', 'weapons (pl.)', '', '1_9', 1), (\n 'AVDIO', 154, 'audio', 'to hear, listen', '', '1_9', 1), ('CAPVT', 155,\n 'caput', 'head', '', '1_9', 1), ('CIVIS', 156, 'civis', 'citizen', '',\n '1_9', 1), ('CONSVL', 157, 'consul', 'consul', '', '1_9', 1), ('CORPVS',\n 158, 'corpus', 'body', '', '1_9', 1), ('CREDO', 159, 'credo',\n 'to believe somebody (w/ dat.)', '', '1_9', 1), ('EXEMPLAR', 160,\n 'exemplar', 'example', '', '1_9', 1), ('GERO', 161, 'gero',\n 'to carry; to behave (w/ se)', '', '1_9', 2), ('MARE', 162, 'mare',\n 'sea', '', '1_9', 1), ('MORS', 163, 'mors', 'death', '', '1_9', 1), (\n 'MVLIER', 164, 'mulier', 'woman', '', '1_9', 1), ('ORATIO', 165,\n 'oratio', 'oration, speech', '', '1_9', 1), ('SCIO', 166, 'scio',\n 'to know', '', '1_9', 1), ('SENTIO', 167, 'sentio', 'to perceive', '',\n '1_9', 1), ('TEMPVS/1', 168, 'tempus', 'time', '', '1_9', 1), ('VENIO',\n 169, 'venio', 'to come', '', '1_9', 1), ('VRBS', 170, 'urbs', 'city',\n '', '1_9', 1), ('ACER/2', 171, 'acer', 'keen, fierce', '', '1_10', 1),\n ('AEDIFICO', 172, 'aedifico', 'to build', '', '1_10', 1), ('CAPIO/2', \n 173, 'capio', 'to take, adopt, capture', '', '1_10', 1), ('CELEBER', \n 174, 'celeber', 'renowned, well–known, crowded', '', '1_10', 1), (\n 'CVPIO', 175, 'cupio', 'to desire, want', '', '1_10', 1), ('DELEO', 176,\n 'deleo', 'to destroy', '', '1_10', 1), ('DEVS', 177, 'deus', 'god', '',\n '1_10', 1), ('DONVM', 178, 'donum', 'gift', '', '1_10', 1), ('EQVVS', \n 179, 'equus', 'horse', '', '1_10', 1), ('FELIX', 180, 'felix',\n 'fortunate, happy', '', '1_10', 1), ('FLAMMA', 181, 'flamma', 'flame',\n '', '1_10', 1), ('FORTIS', 182, 'fortis', 'brave, strong', '', '1_10', \n 1), ('FVGIO', 183, 'fugio', 'to flee, run away', '', '1_10', 1), (\n 'HOSTIS', 184, 'hostis', 'enemy', '', '1_10', 1), ('MOVEO', 185,\n 'moveo', 'to move', '', '1_10', 1), ('NEC/2', 186, 'nec',\n 'nor; and not', '', '1_10', 2), ('NOX', 187, 'nox', 'night', '', '1_10',\n 1), ('PAVCI', 188, 'paucus', 'few', '', '1_10', 1), ('PERICVLVM', 189,\n 'periculum', 'danger', '', '1_10', 1), ('PVGNO', 190, 'pugno',\n 'to fight', '', '1_10', 1), ('AGO', 191, 'ago',\n 'to drive, lead, do, behave', '', '1_11', 1), ('ARDEO', 192, 'ardeo',\n 'to burn, be on fire', '', '1_11', 1), ('CONSPICIO', 193, 'conspicio',\n 'to look at, observe', '', '1_11', 1), ('CRVDELIS', 194, 'crudelis',\n 'cruel', '', '1_11', 1), ('DOLOR', 195, 'dolor', 'grief, pain', '',\n '1_11', 1), ('ITA', 196, 'ita', 'yes', '', '1_11', 2), ('MINIME', 197,\n 'minime', 'No', '', '1_11', 1), ('MITTO', 198, 'mitto', 'to send', '',\n '1_11', 1), ('NE/2', 199, 'ne',\n '(added to the first word of a question', '', '1_11', 1), ('NOVVS', 200,\n 'novus', 'new', '', '1_11', 1), ('PARVM/2', 201, 'parum', 'too little',\n '', '1_11', 2), ('QVE', 202, 'que', 'and', '', '1_11', 1), ('QVOQVE', \n 203, 'quoque', 'also', '', '1_11', 1), ('REGINA', 204, 'regina',\n 'queen', '', '1_11', 1), ('RELINQVO', 205, 'relinquo', 'to abandon', '',\n '1_11', 1), ('SILVA', 206, 'silva', 'forest', '', '1_11', 1), (\n 'SPELVNCA', 207, 'spelunca', 'cave', '', '1_11', 1), ('TEMPESTAS', 208,\n 'tempestas', 'season', '', '1_11', 1), ('VNA', 209, 'una', 'together',\n '', '1_11', 1), ('BELLVMGERO', 210, 'bellum', 'to wage war', '', '1_12',\n 1), ('CONSVMO', 211, 'consumo', 'to consume', '', '1_12', 1), (\n 'DEXTERA', 212, 'dextera', 'right hand', '', '1_12', 1), ('FACIO', 213,\n 'facio', 'to do, make', '', '1_12', 1), ('IBI', 214, 'ibi', 'there', '',\n '1_12', 1), ('IGNIS', 215, 'ignis', 'fire', '', '1_12', 1), ('INQVIO', \n 216, 'inquam', 'to say (used with direct speech)', '', '1_12', 3), (\n 'IRA', 217, 'ira', 'anger', '', '1_12', 1), ('IS', 218, 'is',\n 's/he/it, this, that', '', '1_12', 1), ('NOMEN', 219, 'nomen', 'name',\n '', '1_12', 1), ('NOS', 220, 'nos', 'we; us', '', '1_12', 1), ('NOSTER',\n 221, 'noster', 'our, ours', '', '1_12', 1), ('OCCIDO/2', 222, 'occido',\n 'to strike down, knock down', '', '1_12', 1), ('OSTENDO', 223,\n 'ostendo', 'to show', '', '1_12', 1), ('PONO', 224, 'pono', 'to place',\n '', '1_12', 1), ('PROPE/2', 225, 'prope', 'near', '', '1_12', 2), (\n 'PROVIRIBVS', 226, 'pro', 'with all one’s might', '', '1_12', 1), (\n 'SIMILIS', 227, 'similis', 'similar', '', '1_12', 1), ('STATIM', 228,\n 'statim', 'immediately', '', '1_12', 1), ('TANTVS', 229, 'tantus',\n 'so much', '', '1_12', 1), ('TVVS', 230, 'tuus', 'your', '', '1_12', 1),\n ('VESTER', 231, 'vester', 'your', '', '1_12', 1), ('VIS', 232, 'vis',\n 'force', '', '1_12', 1), ('VOS', 233, 'vos', 'you', '', '1_12', 1), (\n 'EGO', 234, 'ego', 'I', '', '1_13', 3), ('TV', 235, 'tu', 'you', '',\n '1_13', 3), ('TVM', 236, 'tum', 'then, at that time', '', '1_13', 2), (\n 'ALIVS', 237, 'alius', 'another, other', '', '1_13', 1), ('APVD', 238,\n 'apud', 'at the house of (w/ acc.)', '', '1_13', 1), ('ATQVE/1', 239,\n 'atque', 'as', '', '1_13', 2), ('DISCEDO/1', 240, 'discedo',\n 'to leave, withdraw, go away', '', '1_13', 1), ('DIVES', 241, 'dives',\n 'rich', '', '1_13', 1), ('DOCTVS', 242, 'doctus', 'learned', '', '1_13',\n 1), ('DVCO', 243, 'duco', 'to lead, take', '', '1_13', 2), ('ENIM/2', \n 244, 'enim', 'for, in fact', '', '1_13', 1), ('IVDEX', 245, 'iudex',\n 'judge', '', '1_13', 1), ('LICET/1', 246, 'licet',\n 'it is allowed, permitted (for someone)(to do something)(w/ dat. and inf.) '\n , '', '1_13', 1), ('NIHIL', 247, 'nihil', 'nothing', '', '1_13', 1), (\n 'NOLO', 248, 'nolo', 'not to want, to be unwilling', '', '1_13', 2), (\n 'OMNIS', 249, 'omnis', 'each, every, all', '', '1_13', 1), ('PRO/1', \n 250, 'pro', 'for, on behalf of (w/ abl.)', '', '1_13', 1), ('QVID', 251,\n 'quid', 'what; why', '', '1_13', 1), ('RESPONDEO', 252, 'respondeo',\n 'to answer', '', '1_13', 1), ('ROGO', 253, 'rogo', 'to ask', '', '1_13',\n 1), ('SVVS', 254, 'suus', 'his, her, its, their', '', '1_13', 2), (\n 'TANTVM/2', 255, 'tantum', 'only', '', '1_13', 1), ('VALE', 256, 'vale',\n 'to greetings! farewell!', '', '1_13', 1), ('VALEO', 257, 'valeo',\n 'to be able (w/ inf.); to be in good health', '', '1_13', 3), ('ALBVS',\n 258, 'albus', 'white', '', '1_14', 1), ('ARBOR', 259, 'arbor', 'tree',\n '', '1_14', 1), ('CADO', 260, 'cado', 'to fall', '', '1_14', 1), (\n 'COMEDO/2', 261, 'comedo', 'to eat', '', '1_14', 1), ('CONVENIO', 262,\n 'convenio', 'to meet', '', '1_14', 1), ('FLVO', 263, 'fluo', 'to flow',\n '', '1_14', 1), ('GLADIVS', 264, 'gladius', 'sword', '', '1_14', 1), (\n 'IAM', 265, 'iam', 'already, yet', '', '1_14', 1), ('MOX', 266, 'mox',\n 'soon', '', '1_14', 1), ('ODIVM', 267, 'odium', 'hatred', '', '1_14', 2\n ), ('OS/1', 268, 'os', 'mouth', '', '1_14', 1), ('PARENS/1', 269,\n 'parens', 'parent', '', '1_14', 1), ('PECTVS', 270, 'pectus', 'chest',\n '', '1_14', 1), ('PER', 271, 'per', 'through (w/ acc.)', '', '1_14', 1),\n ('PRIMVS', 272, 'primus', 'first', '', '1_14', 1), ('QVI/1', 273, 'qui',\n 'who, which (rel. pronoun); what? which? (inter. adj.) ', '', '1_14', 2\n ), ('RVBER', 274, 'ruber', 'red', '', '1_14', 1), ('SANGVIS', 275,\n 'sanguis', 'blood', '', '1_14', 1), ('SEPARO/2', 276, 'separo',\n 'to separate, divide', '', '1_14', 1), ('TANGO', 277, 'tango',\n 'to touch', '', '1_14', 1), ('INQVIO', 278, 'inquam',\n 'to say (used with direct speech)', '', '1_15', 3), ('QVI/1', 279,\n 'qui', 'who, which (rel. pronoun); what? which? (inter. adj.) ', '',\n '1_15', 2), ('ANTE/2', 280, 'ante', 'in front of (w/ acc.)', '', '1_15',\n 1), ('ARGVMENTVM', 281, 'argumentum', 'proof, indication, argument', '',\n '1_15', 1), ('CVR/1', 282, 'cur', 'why', '', '1_15', 1), ('DIFFICILIS',\n 283, 'difficilis', 'difficult', '', '1_15', 1), ('ECCE', 284, 'ecce',\n 'look here!', '', '1_15', 1), ('ETIAM', 285, 'etiam', 'even, also', '',\n '1_15', 1), ('FORSITAN', 286, 'forsan', 'perhaps', '', '1_15', 1), (\n 'NEGLIGO', 287, 'neglego', 'to neglect', '', '1_15', 1), ('PARVVS/2', \n 288, 'parvus', 'small', '', '1_15', 1), ('QVIS/1', 289, 'quis',\n 'who? which? (inter. pronoun)', '', '1_15', 1), ('RVSTICVS/2', 290,\n 'rusticus', 'rural, rustic', '', '1_15', 1), ('SAXVM', 291, 'saxum',\n 'stone, rock', '', '1_15', 1), ('SENECTVS/1', 292, 'senectus',\n 'old age', '', '1_15', 1), ('SICVT/1', 293, 'sicut', 'just as', '',\n '1_15', 1), ('STO', 294, 'sto', 'stand', '', '1_15', 1), ('VBIQVE', 295,\n 'ubique', 'everywhere', '', '1_15', 1), ('VERVS', 296, 'verus',\n 'real, true', '', '1_15', 1), ('VETVSTVS', 297, 'vetustus', 'old', '',\n '1_15', 1), ('VILLA', 298, 'villa', 'estate', '', '1_15', 1), ('VMQVAM',\n 299, 'umquam', 'ever', '', '1_15', 1), ('AVVNCVLVS', 300, 'avunculus',\n 'uncle', '', '1_16', 1), ('CAELVM/1', 301, 'caelum',\n 'sky, heaven, weather', '', '1_16', 1), ('CAVSA', 302, 'causa',\n 'cause, reason', '', '1_16', 1), ('CINIS', 303, 'cinis', 'ash', '',\n '1_16', 1), ('CLADES', 304, 'clades', 'disaster', '', '1_16', 1), (\n 'CLASSIS', 305, 'classis', 'fleet, class (of people)', '', '1_16', 1),\n ('FEMINA', 306, 'femina', 'woman', '', '1_16', 1), ('FVMVS', 307,\n 'fumus', 'smoke', '', '1_16', 1), ('FVNESTVS', 308, 'funestus',\n 'deadly', '', '1_16', 1), ('IGITVR', 309, 'igitur', 'therefore', '',\n '1_16', 1), ('INCENDIVM', 310, 'incendium', 'conflagration, eruption',\n '', '1_16', 1), ('LEGO/2', 311, 'lego', 'to read, choose', '', '1_16', \n 1), ('LITVS/2', 312, 'litus', 'shore', '', '1_16', 1), ('MATER', 313,\n 'mater', 'mother', '', '1_16', 1), ('MONS', 314, 'mons', 'mountain', '',\n '1_16', 1), ('NAVIS', 315, 'navis', 'ship', '', '1_16', 1), ('NVBES', \n 316, 'nubes', 'cloud', '', '1_16', 1), ('NVMQVAM', 317, 'numquam',\n 'never', '', '1_16', 1), ('OPPRIMO', 318, 'opprimo',\n 'to overwhelm, suppress', '', '1_16', 1), ('PARS', 319, 'pars', 'part',\n '', '1_16', 1), ('STVDEO', 320, 'studeo',\n 'to study, be eager for, be interested in (w/ dat.)', '', '1_16', 1), (\n 'DOMVS', 321, 'domus', 'home', '', '1_17', 2), ('ALO', 322, 'alo',\n 'to feed, nourish', '', '1_17', 1), ('AMITTO', 323, 'amitto', 'to lose',\n '', '1_17', 1), ('CORNV', 324, 'cornu', 'horn', '', '1_17', 1), (\n 'CORRIPIO', 325, 'corripio', 'to seize, occupy, engulf', '', '1_17', 1),\n ('CVRRO', 326, 'curro', 'to run', '', '1_17', 1), ('DEVASTO', 327,\n 'devasto', 'to lay waste', '', '1_17', 1), ('EXSTINGVO', 328,\n 'exstinguo', 'to extinguish', '', '1_17', 1), ('FACILE', 329, 'facile',\n 'easliy', '', '1_17', 1), ('IACIO', 330, 'iacio', 'to throw', '',\n '1_17', 1), ('IMPERATOR', 331, 'imperator', 'general, emperor', '',\n '1_17', 1), ('IMPETVS', 332, 'impetus', 'impetus, force, attack', '',\n '1_17', 1), ('INITIVM', 333, 'initium', 'beginning', '', '1_17', 1), (\n 'IVSSVS', 334, 'iussus', 'order', '', '1_17', 1), ('LOCVS', 335,\n 'locus',\n 'place (sing.); passages of a book (m. pl.); geographical places(n. pl.)',\n '', '1_17', 1), ('MANVS/1', 336, 'manus', 'hand', '', '1_17', 1), (\n 'MVRVS', 337, 'murus', 'wall', '', '1_17', 1), ('SINE', 338, 'sine',\n 'without (w/ abl.)', '', '1_17', 1), ('TEMPTO', 339, 'tempto', 'to try',\n '', '1_17', 1), ('TVMVLTVS', 340, 'tumultus', 'confusion', '', '1_17', \n 1), ('VENTVS', 341, 'ventus', 'wind', '', '1_17', 1), ('DVCO', 342,\n 'duco', 'to lead, take', '', '1_18', 2), ('ITA', 343, 'ita', 'yes', '',\n '1_18', 2), ('COLO/2', 344, 'colo', 'to worship, cultivate', '', '1_18',\n 1), ('CVM/3', 345, 'cum', 'when, after', '', '1_18', 2), ('DEA', 346,\n 'dea', 'goddess', '', '1_18', 1), ('DIES', 347, 'dies', 'day', '',\n '1_18', 1), ('DORMIO', 348, 'dormio', 'to sleep', '', '1_18', 1), (\n 'EXCITO/1', 349, 'excito', 'to awaken, rouse, stir up', '', '1_18', 1),\n ('EXCLAMO', 350, 'exclamo', 'to exclaim', '', '1_18', 1), ('FACIES', \n 351, 'facies', 'face', '', '1_18', 1), ('FATVM', 352, 'fatum',\n 'fate, destiny', '', '1_18', 1), ('MARITVS/1', 353, 'maritus',\n 'husband', '', '1_18', 1), ('MERIDIES', 354, 'meridies', 'midday', '',\n '1_18', 2), ('MVLTVM/2', 355, 'multum', 'much', '', '1_18', 1), (\n 'OCCVLTO', 356, 'occulto', 'to hide', '', '1_18', 1), ('PATER', 357,\n 'pater', 'father', '', '1_18', 2), ('POST/2', 358, 'post',\n 'after (w/ acc.)', '', '1_18', 1), ('QVAERO', 359, 'quaero',\n 'to look for, search', '', '1_18', 1), ('RES', 360, 'res',\n 'thing, matter', '', '1_18', 1), ('SI/2', 361, 'si', 'if', '', '1_18', \n 1), ('SOMNVS', 362, 'somnus', 'sleep', '', '1_18', 1), ('TAM', 363,\n 'tam', 'so ', '', '1_18', 1), ('VXOR', 364, 'uxor', 'wife', '', '1_18',\n 2), ('BARBA', 365, 'barba', 'beard', '', '1_19', 1), ('CARO/1', 366,\n 'caro', 'meat, flesh', '', '1_19', 1), ('CELERITER', 367, 'celeriter',\n 'swiftly', '', '1_19', 1), ('COQVO', 368, 'coquo', 'to cook', '',\n '1_19', 1), ('CRESCO', 369, 'cresco', 'to grow', '', '1_19', 1), (\n 'FEROX', 370, 'ferox', 'fierce, ferocious', '', '1_19', 1), ('FORIS/2',\n 371, 'foris', 'outside, in the open', '', '1_19', 1), ('HERBA', 372,\n 'herba', 'plant, vegetation', '', '1_19', 1), ('HIC/1', 373, 'hic',\n 'this', '', '1_19', 1), ('INTER', 374, 'inter',\n 'between, among (w/ acc.)', '', '1_19', 1), ('PELLIS', 375, 'pellis',\n 'skin, hide', '', '1_19', 1), ('POSTQVAM', 376, 'postquam', 'after', '',\n '1_19', 1), ('PROELIVM', 377, 'proelium', 'battle, combat', '', '1_19',\n 1), ('SANO', 378, 'sano', 'to heal', '', '1_19', 1), ('SEDEO', 379,\n 'sedeo', 'to sit', '', '1_19', 1), ('TERO', 380, 'tero',\n 'to wear out, rub', '', '1_19', 1), ('TERRIBILIS', 381, 'terribilis',\n 'terrifying', '', '1_19', 1), ('VESTIMENTVM', 382, 'vestimentum',\n 'garment, clothes (pl.)', '', '1_19', 1), ('VIVO', 383, 'vivo',\n 'to live', '', '1_19', 1), ('VVLNERO', 384, 'vulnero', 'to wound', '',\n '1_19', 1), ('VVLNVS', 385, 'vulnus', 'wound', '', '1_19', 1), (\n 'ABVNDO', 386, 'abundo', 'to abound with (w/ abl.)', '', '1_20', 1), (\n 'ADOLESCENS/2', 387, 'adulescens', 'young man, young lady', '', '1_20',\n 1), ('AEQVVS', 388, 'aequus', 'even', '', '1_20', 1), ('COR', 389,\n 'cor', 'heart', '', '1_20', 1), ('DELECTO', 390, 'delecto',\n 'to delight, please', '', '1_20', 1), ('DIVINVS/2', 391, 'divinus',\n 'divine', '', '1_20', 1), ('EGEO', 392, 'egeo',\n 'to lack something (abl.)', '', '1_20', 1), ('FVR', 393, 'fur', 'thief',\n '', '1_20', 1), ('FVRTVM', 394, 'furtum', 'theft', '', '1_20', 1), (\n 'HVMANVS', 395, 'humanus', 'human', '', '1_20', 1), ('ILLE', 396,\n 'ille', 'that', '', '1_20', 1), ('INIQVITAS', 397, 'iniquitas',\n 'injustice', '', '1_20', 1), ('LEX', 398, 'lex', 'law', '', '1_20', 1),\n ('LVDO', 399, 'ludo', 'to play', '', '1_20', 1), ('NOCTV', 400, 'noctu',\n 'during the night', '', '1_20', 1), ('PAENE', 401, 'paene', 'almost',\n '', '1_20', 1), ('PAVPER', 402, 'pauper', 'poor', '', '1_20', 1), (\n 'PLENVS', 403, 'plenus', 'full of (w/ gen. or abl.)', '', '1_20', 1), (\n 'POMVM', 404, 'pomum', 'fruit', '', '1_20', 1), ('PVNIO', 405, 'punio',\n 'to punish', '', '1_20', 1), ('ACCIPIO', 406, 'accipio',\n 'to accept, receive', '', '1_21', 1), ('ACCVSO', 407, 'accuso',\n 'to accuse someone (acc.) of something (gen.)', '', '1_21', 1), (\n 'ALIENVS/2', 408, 'alienus',\n 'foreign to, inconsistent with (w/ a/ab and abl.)', '', '1_21', 1), (\n 'AXIS', 409, 'axis', 'axle, axis', '', '1_21', 1), ('CIRCVM/2', 410,\n 'circum', 'around (w/ acc.)', '', '1_21', 1), ('CONSTANTIA', 411,\n 'constantia', 'constancy', '', '1_21', 1), ('DESCENDO', 412, 'descendo',\n 'to descend', '', '1_21', 1), ('DIVITIAE', 413, 'divitia',\n 'wealth, riches (pl.)', '', '1_21', 1), ('ERIPIO', 414, 'eripio',\n 'to snatch away', '', '1_21', 1), ('ERRO/2', 415, 'erro',\n 'to wander, make a mistake', '', '1_21', 1), ('EXTERNVS', 416,\n 'externus', 'outward, external', '', '1_21', 1), ('FORTVNA', 417,\n 'fortuna', 'fortune, the goddess Fortune', '', '1_21', 1), ('FVTVRVS', \n 418, 'futurus', 'about to be (from sum)', '', '1_21', 1), ('HONOR', 419,\n 'honor', 'honor, public office or distinction', '', '1_21', 1), (\n 'MVTO/2', 420, 'muto', 'to change', '', '1_21', 1), ('POSSIDEO', 421,\n 'possideo', 'to possess', '', '1_21', 1), ('PROCERTO', 422, 'pro',\n 'for certain, for sure', '', '1_21', 1), ('RECIPIO', 423, 'recipio',\n 'to take back', '', '1_21', 2), ('REPREHENDO', 424, 'reprehendo',\n 'to blame, rebuke', '', '1_21', 1), ('ROTA', 425, 'rota', 'wheel', '',\n '1_21', 1), ('TOLLO', 426, 'tollo', 'to lift up, raise; to destroy', '',\n '1_21', 1), ('VERSO', 427, 'verso', 'to turn', '', '1_21', 1), ('VLLVS',\n 428, 'ullus', 'any', '', '1_21', 1), ('CONSILIVM', 429, 'consilium',\n 'plan, advice', '', '2_1', 2), ('MERIDIES', 430, 'meridies', 'midday',\n '', '2_1', 2), ('PROPE/2', 431, 'prope', 'near', '', '2_1', 2), (\n 'ASPICIO', 432, 'aspicio', 'to look at, catch a glimpse of', '', '2_1',\n 1), ('ETET', 433, 'et', 'both…and…', '', '2_1', 1), ('GENS', 434,\n 'gens', 'tribe, population', '', '2_1', 1), ('GIGNO', 435, 'gigno',\n 'to give birth, produce', '', '2_1', 1), ('HODIE', 436, 'hodie',\n 'today', '', '2_1', 1), ('INCOLA', 437, 'incola', 'inhabitant', '',\n '2_1', 1), ('INSVLA', 438, 'insula', 'island', '', '2_1', 2), (\n 'INVENIO', 439, 'invenio', 'to come upon, find', '', '2_1', 1), ('MOS',\n 440, 'mos', 'custom, habit; morals (pl.)', '', '2_1', 1), ('MVNDVS/1', \n 441, 'mundus', 'world', '', '2_1', 1), ('NE/4', 442, 'ne',\n 'that not, not to, lest ', '', '2_1', 3), ('OCCVPO/2', 443, 'occupo',\n 'to occupy', '', '2_1', 1), ('ORTVS', 444, 'ortus',\n 'origin, beginning, raising', '', '2_1', 1), ('PISCIS', 445, 'piscis',\n 'a fish', '', '2_1', 1), ('PROCVL', 446, 'procul', 'far, far away', '',\n '2_1', 1), ('PROMITTO', 447, 'promitto', 'to promise', '', '2_1', 1), (\n 'SEPTENTRIONALIS', 448, 'septentrionalis', 'northern', '', '2_1', 1), (\n 'SITVS/2', 449, 'situs', 'located, situated', '', '2_1', 1), ('SOL', \n 450, 'sol', 'sun', '', '2_1', 1), ('VTINAM', 451, 'utinam', 'if only',\n '', '2_1', 2), ('GERO', 452, 'gero', 'to carry; to behave (w/ se)', '',\n '2_2', 2), ('ODIVM', 453, 'odium', 'hatred', '', '2_2', 2), ('VALEO', \n 454, 'valeo', 'to be able (w/ inf.); to be in good health', '', '2_2',\n 3), ('ALTVS', 455, 'altus', 'tall, deep', '', '2_2', 1), ('ANNVS', 456,\n 'annus', 'year', '', '2_2', 1), ('ARGENTVM', 457, 'argentum', 'silver',\n '', '2_2', 1), ('AVRVM', 458, 'aurum', 'gold', '', '2_2', 1), ('BREVIS',\n 459, 'brevis', 'short', '', '2_2', 1), ('CLARVS', 460, 'clarus',\n 'clear, distinguished', '', '2_2', 1), ('CVSTOS', 461, 'custos',\n 'guard', '', '2_2', 1), ('EQVES', 462, 'eques', 'horseman', '', '2_2', \n 1), ('FINIS', 463, 'finis', 'end', '', '2_2', 1), ('GRAVIS', 464,\n 'gravis', 'serious, heavy', '', '2_2', 1), ('INTERDVM', 465, 'interdum',\n 'sometimes', '', '2_2', 1), ('LIS', 466, 'lis', 'dispute, quarrel', '',\n '2_2', 1), ('MANE/2', 467, 'mane', 'in the morning', '', '2_2', 1), (\n 'ODIOHABEO', 468, 'odio', 'to hate somebody', '', '2_2', 1), ('SINO', \n 469, 'sino', 'to allow somebody (acc.) to do something (inf.)', '',\n '2_2', 1), ('VEL/1', 470, 'vel', 'or', '', '2_2', 1), ('VESTIS', 471,\n 'vestis', 'clothes, attire', '', '2_2', 1), ('VOX', 472, 'vox', 'voice',\n '', '2_2', 1), ('VT/4', 473, 'ut', 'that, to, in order to, so that', '',\n '2_2', 4), ('VVLTVS', 474, 'vultus', 'face', '', '2_2', 1), ('VXOR', \n 475, 'uxor', 'wife', '', '2_3', 2), ('AT/2', 476, 'at', 'but', '',\n '2_3', 1), ('CONIVX', 477, 'coniunx', 'spouse', '', '2_3', 1), (\n 'DISCIPVLA', 478, 'discipula', 'student', '', '2_3', 1), ('DISCO', 479,\n 'disco', 'to learn', '', '2_3', 1), ('DOMINVS', 480, 'dominus',\n 'master, lord', '', '2_3', 1), ('FAMA', 481, 'fama',\n 'fame, name, reputation', '', '2_3', 1), ('FRATER', 482, 'frater',\n 'brother', '', '2_3', 1), ('IMPROBVS', 483, 'improbus', 'wicked, bad',\n '', '2_3', 1), ('IVNGO', 484, 'iungo', 'to join', '', '2_3', 1), (\n 'MAGISTER', 485, 'magister', 'teacher', '', '2_3', 1), ('MATRIMONIVM', \n 486, 'matrimonium', 'marriage', '', '2_3', 1), ('NE/4', 487, 'ne',\n 'that not, not to, lest ', '', '2_3', 3), ('NVSQVAM', 488, 'nusquam',\n 'nowhere', '', '2_3', 1), ('PARIO/2', 489, 'pario', 'to give birth to',\n '', '2_3', 1), ('PERDO', 490, 'perdo', 'to lose, waste', '', '2_3', 1),\n ('SALVS', 491, 'salus', 'health, welfare', '', '2_3', 1), (\n 'SALVTEMDICERE', 492, 'salutem',\n 'to greet (customary opening to letter) ', '', '2_3', 1), ('SCRIBO', \n 493, 'scribo', 'to write', '', '2_3', 1), ('VT/4', 494, 'ut',\n 'that, to, in order to, so that', '', '2_3', 4), ('VXOREMDEDVCERE', 495,\n 'uxorem', 'to marry a woman, to take as a wife', '', '2_3', 1), (\n 'NEC/2', 496, 'nec', 'nor; and not', '', '2_4', 2), ('RECIPIO', 497,\n 'recipio', 'to take back', '', '2_4', 2), ('AGMEN', 498, 'agmen',\n 'marching column', '', '2_4', 1), ('APERIO', 499, 'aperio', 'to open',\n '', '2_4', 1), ('COEPIO', 500, 'coepi', 'to begin (w/ inf.)', '', '2_4',\n 1), ('DEFENDO', 501, 'defendo', 'to defend', '', '2_4', 1), ('EDO/1', \n 502, 'edo', 'to produce, give forth', '', '2_4', 1), ('EXTRA/2', 503,\n 'extra', 'outside of (w/ acc.)', '', '2_4', 1), ('FVRO', 504, 'furo',\n 'to rage, be insane', '', '2_4', 1), ('INGENS', 505, 'ingens', 'huge',\n '', '2_4', 1), ('INVADO/2', 506, 'invado', 'to burst in', '', '2_4', 1),\n ('LIGNEVS', 507, 'ligneus', 'made of wood', '', '2_4', 1), ('NEQVENEC',\n 508, 'neque', 'neither..nor…', '', '2_4', 1), ('PARCO', 509, 'parco',\n 'to spare somebody/thing (w/ dat.)', '', '2_4', 1), ('PONS', 510,\n 'pons', 'bridge', '', '2_4', 1), ('PORTA', 511, 'porta', 'gate', '',\n '2_4', 1), ('PRIMO', 512, 'primo', 'at first', '', '2_4', 1), ('QVAM/1',\n 513, 'quam', 'than (w/ comp. words)', '', '2_4', 2), ('QVANTVS/1', 514,\n 'quantus', 'how great, how much (inter. or rel. adj.)', '', '2_4', 1),\n ('RESISTO', 515, 'resisto', 'to resist (w/ dat.)', '', '2_4', 1), (\n 'SIMVL/1', 516, 'simul', 'at the same time', '', '2_4', 1), ('TVTVS', \n 517, 'tutus', 'safe', '', '2_4', 1), ('VACVVS', 518, 'vacuus',\n 'empty of (w/ abl.)', '', '2_4', 1), ('VALEO', 519, 'valeo',\n 'to be able (w/ inf.); to be in good health', '', '2_4', 3), ('VICTOR',\n 520, 'victor', 'victor', '', '2_4', 1), ('VTINAM', 521, 'utinam',\n 'if only', '', '2_4', 2), ('BIBO/2', 522, 'bibo', 'to drink', '', '2_5',\n 1), ('CARMEN/1', 523, 'carmen', 'song, poem', '', '2_5', 1), ('CIBVS', \n 524, 'cibus', 'food', '', '2_5', 1), ('DVLCIS', 525, 'dulcis', 'sweet',\n '', '2_5', 1), ('FLVMEN', 526, 'flumen', 'river', '', '2_5', 1), (\n 'IMMEMOR', 527, 'immemor', 'forgetful of (w/ gen.)', '', '2_5', 1), (\n 'IOCVS', 528, 'iocus', 'joke', '', '2_5', 1), ('IVVENTVS', 529,\n 'iuventus', 'youth', '', '2_5', 1), ('LEVIS/1', 530, 'levis', 'light',\n '', '2_5', 1), ('MENS', 531, 'mens', 'mind, spirit', '', '2_5', 1), (\n 'NE/4', 532, 'ne', 'that not, not to, lest ', '', '2_5', 3), ('ORO', \n 533, 'oro', 'to ask, entreat', '', '2_5', 1), ('PLACEO', 534, 'placeo',\n 'to please, be agreeable to somebody', '', '2_5', 1), ('PROXIMVS/2', \n 535, 'proximus', 'nearest', '', '2_5', 1), ('TAMQVAM/2', 536, 'tam',\n 'so…as…', '', '2_5', 1), ('VEHEMENS', 537, 'vehemens',\n 'violent, vehement', '', '2_5', 1), ('VETVS', 538, 'vetus', 'old', '',\n '2_5', 1), ('VINVM', 539, 'vinum', 'wine', '', '2_5', 1), ('VIRTVS', \n 540, 'virtus', 'courage, virtue', '', '2_5', 1), ('VITIVM', 541,\n 'vitium', 'vice', '', '2_5', 1), ('VT/4', 542, 'ut',\n 'that, to, in order to, so that', '', '2_5', 4), ('PATER', 543, 'pater',\n 'father', '', '2_6', 2), ('DECIPIO', 544, 'decipio', 'to deceive', '',\n '2_6', 1), ('DILIGO/3', 545, 'diligo', 'to love, esteem highly', '',\n '2_6', 1), ('DVO', 546, 'duo', 'two', '', '2_6', 1), ('EXERCITVS/1', \n 547, 'exercitus', 'army', '', '2_6', 1), ('FIDELIS/2', 548, 'fidelis',\n 'faithful, loyal', '', '2_6', 1), ('HERES', 549, 'heres', 'heir', '',\n '2_6', 1), ('IMPERIVM', 550, 'imperium', 'rule, empire, power', '',\n '2_6', 1), ('INOPIA', 551, 'inopia', 'helplessness, want', '', '2_6', 1\n ), ('LAVDO', 552, 'laudo', 'to praise', '', '2_6', 1), ('NECESSEEST', \n 553, 'necesse',\n 'it is necessary for someone (dat.) to do something (inf.)', '', '2_6',\n 1), ('NEMO', 554, 'nemo', 'no one', '', '2_6', 1), ('PAVLO', 555,\n 'paulo', 'a little bit, to a small extent', '', '2_6', 1), ('QVAM/1', \n 556, 'quam', 'than (w/ comp. words)', '', '2_6', 2), ('QVANTVM/3', 557,\n 'quantum', 'to what extent, how much', '', '2_6', 1), ('RESTITVO', 558,\n 'restituo', 'to restore', '', '2_6', 1), ('SATIS/2', 559, 'satis',\n 'enough, sufficiently', '', '2_6', 1), ('SECVNDVS/1', 560, 'secundus',\n 'second', '', '2_6', 1), ('TERTIVS', 561, 'tertius', 'third', '', '2_6',\n 1), ('TRES', 562, 'tres', 'three', '', '2_6', 1), ('TRISTIS', 563,\n 'tristis', 'sad', '', '2_6', 1), ('VEHEMENTER', 564, 'vehementer',\n 'strongly, vehemently', '', '2_6', 1), ('NOLO', 565, 'nolo',\n 'not to want, to be unwilling', '', '2_7', 2), ('AETAS', 566, 'aetas',\n 'age', '', '2_7', 1), ('FIDES/2', 567, 'fides', 'faith', '', '2_7', 1),\n ('FVNDO/2', 568, 'fundo', 'to pour', '', '2_7', 1), ('GLORIA', 569,\n 'gloria', 'glory', '', '2_7', 1), ('LIBERTAS', 570, 'libertas',\n 'freedom', '', '2_7', 1), ('LVMEN', 571, 'lumen', 'light', '', '2_7', 1\n ), ('MALO', 572, 'malo', 'to prefer', '', '2_7', 1), ('ORNATVS/1', 573,\n 'ornatus', 'adorned, ornate, elaborate', '', '2_7', 1), ('OTIVM', 574,\n 'otium', 'leisure, free time', '', '2_7', 1), ('POTENS', 575, 'potens',\n 'powerful', '', '2_7', 1), ('PVBLICVS/2', 576, 'publicus', 'common', '',\n '2_7', 1), ('QVALIS/1', 577, 'qualis', 'what sort of? (inter. adj.)',\n '', '2_7', 1), ('RESPVBLICA', 578, 'res', 'state', '', '2_7', 1), (\n 'STVDIOSVS', 579, 'studiosus', 'fond of (w/ gen.)', '', '2_7', 1), (\n 'TAMQVAM/1', 580, 'tamquam', 'as', '', '2_7', 1), ('TOT', 581, 'tot',\n 'so many', '', '2_7', 1), ('TRAHO', 582, 'traho', 'to drag, draw', '',\n '2_7', 1), ('VBI/1', 583, 'ubi', 'where? (inter. adv)', '', '2_7', 1),\n ('VIX', 584, 'vix', 'hardly', '', '2_7', 1), ('VNVS', 585, 'unus',\n 'one', '', '2_7', 1), ('VOLO/3', 586, 'volo', 'to want', '', '2_7', 1),\n ('VTILIS', 587, 'utilis', 'useful', '', '2_7', 1), ('ADHVC', 588,\n 'adhuc', 'still, up to this time', '', '2_8', 1), ('ANTIQVVS', 589,\n 'antiquus', 'ancient', '', '2_8', 1), ('ARS', 590, 'ars',\n 'science, art, skill', '', '2_8', 1), ('DOMINOR', 591, 'dominor',\n 'to dominate, rule', '', '2_8', 1), ('HORTOR', 592, 'hortor',\n 'to exhort, urge', '', '2_8', 1), ('LATINE', 593, 'Latine', 'in Latin',\n '', '2_8', 1), ('LATINVS/A', 594, 'Latinus',\n 'Latin, pertaining to Latin', '', '2_8', 1), ('LINGVA', 595, 'lingua',\n 'language; tongue', '', '2_8', 1), ('LOQVOR', 596, 'loquor', 'to speak',\n '', '2_8', 1), ('MAGIS/2', 597, 'magis', 'more', '', '2_8', 1), (\n 'MAIOR', 598, 'maior', 'bigger; greater', '', '2_8', 1), ('MAXIMVS', \n 599, 'maximus', 'greatest', '', '2_8', 1), ('MELIOR', 600, 'melior',\n 'better', '', '2_8', 1), ('MINIMVS', 601, 'minimus', 'smallest', '',\n '2_8', 1), ('MINVS', 602, 'minus', 'less', '', '2_8', 2), ('OPTIMVS', \n 603, 'optimus', 'best', '', '2_8', 1), ('PARTIOR', 604, 'partior',\n 'to divide, distribute', '', '2_8', 1), ('PATIOR', 605, 'patior',\n 'to endure, tolerate, suffer', '', '2_8', 1), ('PEIOR', 606, 'peior',\n 'worse', '', '2_8', 1), ('PESSIMVS', 607, 'pessimus', 'worst', '',\n '2_8', 1), ('PLVRIMVS', 608, 'plurimus', 'most', '', '2_8', 1), ('PLVS',\n 609, 'plus', 'more', '', '2_8', 1), ('SEQVOR', 610, 'sequor',\n 'to follow', '', '2_8', 1), ('VEREOR', 611, 'vereor',\n 'to fear, respect', '', '2_8', 1), ('ADDO', 612, 'addo', 'to add', '',\n '2_9', 1), ('AVRIS', 613, 'auris', 'ear', '', '2_9', 1), ('CONOR', 614,\n 'conor', 'to try', '', '2_9', 1), ('DEMITTO', 615, 'demitto',\n 'to send down', '', '2_9', 1), ('DISSIMILIS', 616, 'dissimilis',\n 'dissimilar ', '', '2_9', 1), ('FACILIS', 617, 'facilis', 'easy', '',\n '2_9', 1), ('FERO', 618, 'fero', 'to carry, bear', '', '2_9', 1), (\n 'FIO', 619, 'fio', 'to be made, become; (impersonally) to happen', '',\n '2_9', 1), ('FRIGVS', 620, 'frigus', 'cold', '', '2_9', 1), ('GENVS/1',\n 621, 'genus', 'kind', '', '2_9', 1), ('GLACIES', 622, 'glacies', 'ice',\n '', '2_9', 1), ('GRACILIS', 623, 'gracilis', 'slender', '', '2_9', 1),\n ('HVMILIS', 624, 'humilis', 'low, humble', '', '2_9', 1), ('ITER', 625,\n 'iter', 'road, trip ', '', '2_9', 1), ('LABOR/2', 626, 'labor',\n 'to slide, slip, glide down', '', '2_9', 1), ('MODEROR', 627, 'moderor',\n 'to manage, direct, guide', '', '2_9', 1), ('NIX', 628, 'nix', 'snow',\n '', '2_9', 1), ('ONVS', 629, 'onus', 'weight, burden', '', '2_9', 1), (\n 'PERVENIO', 630, 'pervenio', 'to arrive', '', '2_9', 1), ('PROGREDIOR',\n 631, 'progredior', 'to go forward, proceed', '', '2_9', 1), (\n 'QVOTIENS/2', 632, 'quotiens', 'as often as', '', '2_9', 1), (\n 'SIMVLAC/2', 633, 'simulac', 'as soon as', '', '2_9', 1), ('SVVS', 634,\n 'suus', 'his, her, its, their', '', '2_10', 2), ('AEDES', 635, 'aedes',\n 'temple; pl. dwelling, house', '', '2_10', 1), ('EO/1', 636, 'eo',\n 'to go ', '', '2_10', 1), ('IVCVNDVS', 637, 'iucundus',\n 'pleasant, nice', '', '2_10', 1), ('LABOR/1', 638, 'labor',\n 'labor, toil', '', '2_10', 1), ('LAEDO', 639, 'laedo', 'to harm', '',\n '2_10', 1), ('LIBER/2', 640, 'liber', 'free', '', '2_10', 1), ('LVCRVM',\n 641, 'lucrum', 'profit, gain', '', '2_10', 1), ('MARITIMVS', 642,\n 'maritimus', 'maritime', '', '2_10', 1), ('MODVS', 643, 'modus',\n 'way, method, manner', '', '2_10', 1), ('PAVLISPER', 644, 'paulisper',\n 'for a little while', '', '2_10', 1), ('PECVNIA', 645, 'pecunia',\n 'money', '', '2_10', 1), ('PLACIDVS', 646, 'placidus', 'peaceful, calm',\n '', '2_10', 1), ('POTIVS', 647, 'potius', 'rather', '', '2_10', 1), (\n 'PROSPER', 648, 'prosper', 'fortunate, prosperous', '', '2_10', 1), (\n 'REDDO', 649, 'reddo', 'to give back', '', '2_10', 1), ('SARCINA', 650,\n 'sarcina', 'burden, baggage', '', '2_10', 1), ('SCELESTVS', 651,\n 'scelestus', 'wicked', '', '2_10', 1), ('SEMEL', 652, 'semel', 'once',\n '', '2_10', 1), ('SERENVS', 653, 'serenus', 'calm, clear', '', '2_10', \n 1), ('PARVM/2', 654, 'parum', 'too little', '', '2_11', 2), ('ALTER', \n 655, 'alter', 'the other (of two)', '', '2_11', 1), ('GEMMA', 656,\n 'gemma', 'gem, precious stone', '', '2_11', 1), ('LEGATVS', 657,\n 'legatus', 'ambassador', '', '2_11', 1), ('MAGNIHABEO', 658, 'magni',\n 'to esteem a lot', '', '2_11', 1), ('MINVS', 659, 'minus', 'less', '',\n '2_11', 2), ('NESCIO', 660, 'nescio', 'not to know', '', '2_11', 1), (\n 'NEVTER', 661, 'neuter', 'neither, none (of two)', '', '2_11', 1), (\n 'NVLLVS', 662, 'nullus', 'none', '', '2_11', 1), ('OPERAEPRETIVMEST', \n 663, 'operae', 'it is worthwhile', '', '2_11', 1), ('POPVLVS/1', 664,\n 'populus', 'a people, populace', '', '2_11', 1), ('QVOMODO/1', 665,\n 'quomodo', 'how', '', '2_11', 1), ('SALVTO', 666, 'saluto', 'to greet ',\n '', '2_11', 1), ('SERVVS/1', 667, 'servus', 'slave, servant', '',\n '2_11', 1), ('SOLVS', 668, 'solus', 'alone, only', '', '2_11', 1), (\n 'SPECTO', 669, 'specto', 'to watch', '', '2_11', 1), ('TACEO', 670,\n 'taceo', 'to be silent, keep quiet', '', '2_11', 1), ('TOTVS', 671,\n 'totus', 'whole, entire', '', '2_11', 1), ('TVRPIS', 672, 'turpis',\n 'shameful, disgraceful', '', '2_11', 1), ('VTER/4', 673, 'uter',\n 'who, which (of two)?', '', '2_11', 1), ('VTOR', 674, 'utor',\n 'to use (w/ abl.)', '', '2_11', 1), ('CVM/3', 675, 'cum', 'when, after',\n '', '2_12', 2), ('INQVIO', 676, 'inquam',\n 'to say (used with direct speech)', '', '2_12', 3), ('TAMEN', 677,\n 'tamen', 'however', '', '2_12', 2), ('CARVS', 678, 'carus', 'dear', '',\n '2_12', 1), ('INSVLA', 679, 'insula', 'island', '', '2_12', 2), (\n 'MORIOR', 680, 'morior', 'to die', '', '2_12', 1), ('NIMIS', 681,\n 'nimis', 'too much', '', '2_12', 1), ('NISI', 682, 'nisi',\n 'if not, unless', '', '2_12', 1), ('OFFICIVM', 683, 'officium', 'duty',\n '', '2_12', 1), ('ORBIS', 684, 'orbis', 'circle', '', '2_12', 1), (\n 'ORBISTERRARVM', 685, 'orbis', 'the earth, the world', '', '2_12', 1),\n ('PROBO', 686, 'probo', 'to approve ', '', '2_12', 1), ('QVAMQVAM/2', \n 687, 'quamquam', 'although', '', '2_12', 1), ('QVAMVIS/1', 688,\n 'quamvis', 'although', '', '2_12', 1), ('QVIA', 689, 'quia', 'because',\n '', '2_12', 1), ('QVIDEM', 690, 'quidem', 'indeed', '', '2_12', 1), (\n 'QVOD/1', 691, 'quod', 'because', '', '2_12', 1), ('SENTENTIA', 692,\n 'sententia', 'opinion, point of view', '', '2_12', 1), ('SORS', 693,\n 'sors', 'lot', '', '2_12', 1), ('SPERO', 694, 'spero', 'to hope', '',\n '2_12', 1), ('SPES', 695, 'spes', 'hope', '', '2_12', 1), ('ATQVE/1', \n 696, 'atque', 'as', '', '2_13', 2), ('ABSENS', 697, 'absens',\n 'away, absent', '', '2_13', 1), ('ABSVM/1', 698, 'absum', 'to be away',\n '', '2_13', 1), ('BENEVOLENTIA', 699, 'benevolentia', 'good will', '',\n '2_13', 1), ('DECLARO', 700, 'declaro',\n 'to demonstrate, show, make known, reveal', '', '2_13', 1), ('IDEM', \n 701, 'idem', 'the same', '', '2_13', 1), ('IPSE', 702, 'ipse', 'self',\n '', '2_13', 1), ('IRASCOR', 703, 'irascor', 'to be angry at (w/ dat.)',\n '', '2_13', 1), ('ISTE', 704, 'iste', 'that (of yours)', '', '2_13', 1),\n ('MIROR', 705, 'miror', 'to marvel, be surprised at', '', '2_13', 1), (\n 'MVLTITVDO', 706, 'multitudo', 'crowd, throng', '', '2_13', 1), ('NEGO',\n 707, 'nego', 'to deny ', '', '2_13', 1), ('NVMERO/1', 708, 'numero',\n 'to number, count among', '', '2_13', 1), ('OFFENDO', 709, 'offendo',\n ']to happen upon, offend', '', '2_13', 1), ('REDEO/1', 710, 'redeo',\n 'to go back, return', '', '2_13', 1), ('REFERO', 711, 'refero',\n 'to carry back, report', '', '2_13', 1), ('SOCIVS/1', 712, 'socius',\n 'associate, partner, ally', '', '2_13', 1), ('TALIS', 713, 'talis',\n 'such a', '', '2_13', 1), ('TVRRIS', 714, 'turris', 'tower', '', '2_13',\n 1), ('VENIA', 715, 'venia', 'pardon, indulgence, forgiveness', '',\n '2_13', 1), ('VERSOR', 716, 'versor',\n 'to be situated in, be occupied in ', '', '2_13', 1), ('VIRGA', 717,\n 'virga', 'twig, stick', '', '2_13', 1), ('VOLVNTAS', 718, 'voluntas',\n 'will', '', '2_13', 1), ('AFFIRMO', 719, 'affirmo',\n 'to assert, maintain', '', '2_14', 1), ('CIRCVMEO/1', 720, 'circumeo',\n 'to go around', '', '2_14', 1), ('CONTINEO', 721, 'contineo',\n 'to hold, keep together, contain', '', '2_14', 1), ('COTIDIANVS', 722,\n 'cottidianus', 'of every day, daily', '', '2_14', 1), ('ELEMENTVM', 723,\n 'elementum', 'element', '', '2_14', 1), ('ERGO/2', 724, 'ergo',\n 'therefore', '', '2_14', 1), ('GRAVITAS', 725, 'gravitas',\n 'weight, gravity', '', '2_14', 1), ('IMMENSVS', 726, 'immensus',\n 'immeasurable, immense, endless', '', '2_14', 1), ('INFINITVS', 727,\n 'infinitus', 'boundless, unlimited', '', '2_14', 1), ('MAXIME', 728,\n 'maxime', 'most', '', '2_14', 1), ('MEDIVS', 729, 'medius', 'middle',\n '', '2_14', 1), ('MOTVS', 730, 'motus', 'motion, movement', '', '2_14',\n 1), ('MVLTO/2', 731, 'multo', 'by much', '', '2_14', 1), ('NATVRA', 732,\n 'natura', 'nature', '', '2_14', 1), ('NECESSARIO', 733, 'necessario',\n 'necessarily', '', '2_14', 1), ('PERPERAM', 734, 'perperam',\n 'wrongly, incorrectly', '', '2_14', 1), ('PONDVS', 735, 'pondus',\n 'weight', '', '2_14', 1), ('PRAESERTIM', 736, 'praesertim',\n 'especially', '', '2_14', 1), ('QVIES', 737, 'quies', 'rest, repose',\n '', '2_14', 1), ('VNDIQVE', 738, 'undique',\n 'from all parts, from everywhere', '', '2_14', 1), ('VOLVO', 739,\n 'volvo', 'to turn round', '', '2_14', 1), ('VT/4', 740, 'ut',\n 'that, to, in order to, so that', '', '2_14', 4), ('ANIMADVERTO', 741,\n 'animadverto', 'to notice', '', '2_15', 1), ('APPROPINQVO', 742,\n 'appropinquo', 'to approach (w/ dat or ad + acc.)', '', '2_15', 1), (\n 'CERNO', 743, 'cerno', 'to see, distinguish with the eyes', '', '2_15',\n 1), ('CIRCA/2', 744, 'circa', 'around (w/ acc.)', '', '2_15', 1), (\n 'CLAMO', 745, 'clamo', 'to shout, scream', '', '2_15', 1), ('FINGO', \n 746, 'fingo', 'to imagine, form in the mind', '', '2_15', 1), (\n 'IMPINGO', 747, 'impingo', 'to push, strike, inflict', '', '2_15', 1),\n ('INFLIGO', 748, 'infligo', 'to strike on or against, inflict', '',\n '2_15', 1), ('ITERVM', 749, 'iterum', 'again', '', '2_15', 1), (\n 'OPPIDVM', 750, 'oppidum', 'town', '', '2_15', 1), ('PERCVTIO', 751,\n 'percutio', 'to strike through ', '', '2_15', 1), ('PRAEDITVS', 752,\n 'praeditus', 'endowed with, possessed of (w/ abl.)', '', '2_15', 1), (\n 'REPELLO', 753, 'repello', 'to push back, thrust back', '', '2_15', 1),\n ('RIDEO', 754, 'rideo', 'to laugh', '', '2_15', 1), ('RVMPO', 755,\n 'rumpo', 'to break, tear', '', '2_15', 1), ('SEDES', 756, 'sedes',\n 'seat, abode', '', '2_15', 1), ('SIC', 757, 'sic', 'in such a way', '',\n '2_15', 1), ('SIDVS', 758, 'sidus', 'constellation', '', '2_15', 1), (\n 'TELVM', 759, 'telum', 'spear, javelin', '', '2_15', 1), ('VEHO', 760,\n 'veho', 'to drive, carry', '', '2_15', 1)]\nsection_list = {'1.1': 'start', '1.2': '1.1', '1.3': '1.2', '1.4': '1.3',\n '1.5': '1.4', '1.6': '1.5', '1.7': '1.6', '1.8': '1.7', '1.9': '1.8',\n '1.10': '1.9', '1.11': '1.10', '1.12': '1.11', '1.13': '1.12', '1.14':\n '1.13', '1.15': '1.14', '1.16': '1.15', '1.17': '1.16', '1.18': '1.17',\n '1.19': '1.18', '1.20': '1.19', '1.21': '1.20', '2.1': '1.21', '2.2':\n '2.1', '2.3': '2.2', '2.4': '2.3', '2.5': '2.4', '2.6': '2.5', '2.7':\n '2.6', '2.8': '2.7', '2.9': '2.8', '2.10': '2.9', '2.11': '2.10',\n '2.12': '2.11', '2.13': '2.12', '2.14': '2.13', '2.15': '2.14', 'end':\n '2.15', 'start': 'start'}\ntitle = 'Latin for the New Millennium Vols 1 and 2 (Tunberg-Minkova)'\nsection_level = 2\nlanguage = 'Latin'\nbook = text.Text(title, section_words, the_text, section_list,\n section_level, language, True, False)\n",
"step-4": "import text\nnan=\"\"\nsection_words = {'start': -1, '1.1': 17, '1.2': 38, '1.3': 55, '1.4': 76, '1.5': 95, '1.6': 114, '1.7': 133, '1.8': 151, '1.9': 170, '1.10': 190, '1.11': 209, '1.12': 233, '1.13': 257, '1.14': 277, '1.15': 299, '1.16': 320, '1.17': 341, '1.18': 364, '1.19': 385, '1.20': 405, '1.21': 428, '2.1': 451, '2.2': 474, '2.3': 495, '2.4': 521, '2.5': 542, '2.6': 564, '2.7': 587, '2.8': 611, '2.9': 633, '2.10': 653, '2.11': 674, '2.12': 695, '2.13': 718, '2.14': 740, '2.15': 760, 'end': -2}\nthe_text = [('AGRICOLA', 0, 'agricola', 'farmer', '', '1_1', 1), ('AMBVLO', 1, 'ambulo', 'to walk', '', '1_1', 2), ('AMO', 2, 'amo', 'to love', '', '1_1', 2), ('AQVA', 3, 'aqua', 'water', '', '1_1', 1), ('ATHLETA', 4, 'athleta', 'athlete', '', '1_1', 1), ('BENE', 5, 'bene', 'well', '', '1_1', 1), ('CVRO', 6, 'curo', 'to take care for/of', '', '1_1', 2), ('ET/2', 7, 'et', 'and', '', '1_1', 1), ('FILIA', 8, 'filia', 'daughter', '', '1_1', 1), ('ITAQVE', 9, 'itaque', 'and so', '', '1_1', 1), ('LVPA', 10, 'lupa', 'she–wolf', '', '1_1', 1), ('NAVTA', 11, 'nauta', 'sailor', '', '1_1', 1), ('POETA', 12, 'poeta', 'poet', '', '1_1', 1), ('POSTEA', 13, 'postea', 'afterwards', '', '1_1', 1), ('PVELLA', 14, 'puella', 'girl', '', '1_1', 1), ('ROMA/N', 15, 'Roma', 'Rome', '', '1_1', 1), ('SVM/1', 16, 'sum', 'to be', '', '1_1', 3), ('TERRA', 17, 'terra', 'land', '', '1_1', 1), ('AMBVLO', 18, 'ambulo', 'to walk', '', '1_2', 2), ('AMO', 19, 'amo', 'to love', '', '1_2', 2), ('CVRO', 20, 'curo', 'to take care for/of', '', '1_2', 2), ('SVM/1', 21, 'sum', 'to be', '', '1_2', 3), ('DEBEO', 22, 'debeo', 'ought, must, should; to owe', '', '1_2', 1), ('DIV', 23, 'diu', 'for a long time', '', '1_2', 1), ('EGO', 24, 'ego', 'I', '', '1_2', 3), ('EXSPECTO', 25, 'exspecto', 'to wait for, await, expect', '', '1_2', 1), ('FABVLA/1', 26, 'fabula', 'story', '', '1_2', 1), ('FORMA', 27, 'forma', 'form,appearance', '', '1_2', 1), ('HABEO', 28, 'habeo', 'to have', '', '1_2', 1), ('HABITO', 29, 'habito', 'to live, dwell', '', '1_2', 1), ('NARRO', 30, 'narro', 'to tell', '', '1_2', 1), ('NON', 31, 'non', 'not', '', '1_2', 1), ('NVNC', 32, 'nunc', 'now', '', '1_2', 1), ('PARO/2', 33, 'paro', 'to prepare, get ready, design', '', '1_2', 2), ('PATRIA', 34, 'patria', 'fatherland', '', '1_2', 1), ('TENEO', 35, 'teneo', 'to hold', '', '1_2', 1), ('TV', 36, 'tu', 'you', '', '1_2', 3), ('VIDEO', 37, 'video', 'to see', '', '1_2', 1), ('VOCO', 38, 'voco', 'to call', '', '1_2', 1), ('EGO', 39, 'ego', 'I', '', '1_3', 3), ('TV', 40, 'tu', 'you', '', '1_3', 3), ('AGER', 41, 'ager', 'field', '', '1_3', 1), ('AMICVS/1', 42, 'amicus', 'friend', '', '1_3', 1), ('ANIMVS', 43, 'animus', 'spirit, soul, mind', '', '1_3', 1), ('CASA', 44, 'casa', 'little house, cottage', '', '1_3', 1), ('CVM/2', 45, 'cum', 'with (w/ abl.)', '', '1_3', 1), ('DEINDE', 46, 'deinde', 'then', '', '1_3', 1), ('DOMVS', 47, 'domus', 'home', '', '1_3', 2), ('FILIVS', 48, 'filius', 'son', '', '1_3', 1), ('IN', 49, 'in', 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_3', 2), ('PVER', 50, 'puer', 'boy', '', '1_3', 1), ('RIVVS', 51, 'rivus', 'brook, stream', '', '1_3', 1), ('TIMEO', 52, 'timeo', 'to fear, to be afraid', '', '1_3', 1), ('VALDE', 53, 'valde', 'very, exceedingly', '', '1_3', 1), ('VIA', 54, 'via', 'road', '', '1_3', 1), ('VIR', 55, 'vir', 'man', '', '1_3', 1), ('IN', 56, 'in', 'in, on (w/ abl.); into, to, against (w/ acc.)', '', '1_4', 2), ('AD/2', 57, 'ad', 'into, towards, to (w/ acc.)', '', '1_4', 1), ('ARMATVS/2', 58, 'armatus', 'armed', '', '1_4', 1), ('AVTEM', 59, 'autem', 'however', '', '1_4', 1), ('BELLVM', 60, 'bellum', 'war', '', '1_4', 1), ('BONVS', 61, 'bonus', 'good', '', '1_4', 1), ('CASTRA/2', 62, 'castra', 'camp', '', '1_4', 1), ('DO', 63, 'do', 'to give', '', '1_4', 1), ('DOLVS', 64, 'dolus', 'trickery, deception', '', '1_4', 1), ('EX', 65, 'e', 'from, out of (w/ abl.)', '', '1_4', 1), ('INTRO/2', 66, 'intro', 'to enter', '', '1_4', 1), ('IVBEO', 67, 'iubeo', 'to order somebody (acc.) to do something (inf.)', '', '1_4', 1), ('IVSTVS', 68, 'iustus', 'legitimate, open, just', '', '1_4', 1), ('MAGNVS', 69, 'magnus', 'large, great, important', '', '1_4', 1), ('MALVS/3', 70, 'malus', 'bad', '', '1_4', 1), ('PRAECLARVS', 71, 'praeclarus', 'famous, distinguished', '', '1_4', 1), ('PRAEMIVM', 72, 'praemium', 'reward', '', '1_4', 1), ('ROMANVS/A', 73, 'Romanus', 'Roman; the Romans (pl.)', '', '1_4', 1), ('SED', 74, 'sed', 'but', '', '1_4', 1), ('VENENVM', 75, 'venenum', 'poison', '', '1_4', 1), ('VINCVLVM', 76, 'vinculum', 'chain, fetter', '', '1_4', 1), ('PARO/2', 77, 'paro', 'to prepare, get ready, design', '', '1_5', 2), ('AB', 78, 'a', 'by, from (w/ abl.)', '', '1_5', 1), ('AVXILIVM', 79, 'auxilium', 'help', '', '1_5', 1), ('COGITO', 80, 'cogito', 'to think', '', '1_5', 1), ('CONSILIVM', 81, 'consilium', 'plan, advice', '', '1_5', 2), ('DE', 82, 'de', 'about, concerning, down from (w/ abl.)', '', '1_5', 1), ('DOLEO', 83, 'doleo', 'to feel pain, to be hurt', '', '1_5', 1), ('EPISTOLA', 84, 'epistula', 'letter', '', '1_5', 1), ('FAMILIA', 85, 'familia', 'family, household', '', '1_5', 1), ('GAVDIVM', 86, 'gaudium', 'joy', '', '1_5', 1), ('LACRIMA', 87, 'lacrima', 'tear', '', '1_5', 1), ('LONGE', 88, 'longe', 'far', '', '1_5', 1), ('LONGVS', 89, 'longus', 'long', '', '1_5', 1), ('MISER', 90, 'miser', 'wretched, sad, miserable', '', '1_5', 1), ('NAM', 91, 'nam', 'for, in fact', '', '1_5', 1), ('NONSOLVM', 92, 'non', 'not only…, but also…', '', '1_5', 1), ('PVLCHER', 93, 'pulcher', 'beautiful, nice', '', '1_5', 1), ('SEMPER', 94, 'semper', 'always', '', '1_5', 1), ('TAMEN', 95, 'tamen', 'however', '', '1_5', 2), ('SVM/1', 96, 'sum', 'to be', '', '1_6', 3), ('DOCEO', 97, 'doceo', 'to teach', '', '1_6', 1), ('DVM/2', 98, 'dum', 'while', '', '1_6', 1), ('EXEMPLVM', 99, 'exemplum', 'example', '', '1_6', 1), ('FIRMO', 100, 'firmo', 'to strengthen', '', '1_6', 1), ('IACEO', 101, 'iaceo', 'to lie down, to be inert', '', '1_6', 1), ('IVDICO', 102, 'iudico', 'to judge', '', '1_6', 1), ('LIBER/1', 103, 'liber', 'book', '', '1_6', 1), ('LITTERA', 104, 'littera', 'letter of the alphabet (sing.); literature, letter (pl.)', '', '1_6', 1), ('MANEO', 105, 'maneo', 'to remain', '', '1_6', 1), ('MEMORIA', 106, 'memoria', 'memory', '', '1_6', 1), ('MVLTVS', 107, 'multus', 'much, many', '', '1_6', 1), ('POSSVM/1', 108, 'possum', 'to be able, can', '', '1_6', 1), ('PROPTER/2', 109, 'propter', 'because of, on account of (w/ acc.)', '', '1_6', 1), ('SAEPE', 110, 'saepe', 'often', '', '1_6', 1), ('SERVO', 111, 'servo', 'to save, preserve', '', '1_6', 1), ('SOLEO', 112, 'soleo', 'to be accustomed (w/ inf.)', '', '1_6', 1), ('TENEBRAE', 113, 'tenebrae', 'shadows, darkness (pl.)', '', '1_6', 1), ('VITA', 114, 'vita', 'life', '', '1_6', 1), ('AESTIMO', 115, 'aestimo', 'to regard, esteem', '', '1_7', 1), ('AESTIMOVNIVSASSIS', 116, 'aestimo', 'I do not care a bit ', '', '1_7', 1), ('AMOR', 117, 'amor', 'love', '', '1_7', 1), ('DELICIA/1', 118, 'delicia', 'delight, pet', '', '1_7', 1), ('DIGITVS', 119, 'digitus', 'finger', '', '1_7', 1), ('DOMINA', 120, 'domina', 'mistress', '', '1_7', 1), ('GREMIVM', 121, 'gremium', 'lap', '', '1_7', 1), ('INVIDEO', 122, 'invideo', 'to envy', '', '1_7', 1), ('MEVS', 123, 'meus', 'my', '', '1_7', 1), ('OCVLVS', 124, 'oculus', 'eye', '', '1_7', 1), ('PASSER', 125, 'passer', 'sparrow', '', '1_7', 1), ('PAX', 126, 'pax', 'peace; favor', '', '1_7', 1), ('PVTO', 127, 'puto', 'to think', '', '1_7', 1), ('SENEX/1', 128, 'senex', 'old man', '', '1_7', 1), ('SESESE', 129, 'se', 'him/her/itself', '', '1_7', 1), ('SEVERVS', 130, 'severus', 'serious', '', '1_7', 1), ('SOROR', 131, 'soror', 'sister', '', '1_7', 1), ('SVI/1', 132, 'sui', 'him–/her–/itself', '', '1_7', 1), ('VERBVM', 133, 'verbum', 'word', '', '1_7', 1), ('CONTRA/2', 134, 'contra', 'against (w/ acc.)', '', '1_8', 1), ('DECERNO', 135, 'decerno', 'to decide, determine (often w/ inf.)', '', '1_8', 1), ('DICO/2', 136, 'dico', 'to say', '', '1_8', 1), ('DVX', 137, 'dux', 'leader, general', '', '1_8', 1), ('FORTITVDO', 138, 'fortitudo', 'courage', '', '1_8', 1), ('HOMO', 139, 'homo', 'man, human being; people (pl.)', '', '1_8', 1), ('INTELLIGO', 140, 'intellego', 'to understand', '', '1_8', 1), ('LIBERO', 141, 'libero', 'to free someone (acc.) from something (abl.)', '', '1_8', 1), ('MILES', 142, 'miles', 'soldier', '', '1_8', 1), ('NAVIGO', 143, 'navigo', 'to sail, navigate', '', '1_8', 1), ('ORACVLVM', 144, 'oraculum', 'oracle, prophecy', '', '1_8', 1), ('PETO', 145, 'peto', 'to seek', '', '1_8', 1), ('REX', 146, 'rex', 'king', '', '1_8', 1), ('TANDEM', 147, 'tandem', 'finally', '', '1_8', 1), ('TEMPLVM', 148, 'templum', 'temple', '', '1_8', 1), ('TIMOR', 149, 'timor', 'fear', '', '1_8', 1), ('TVM', 150, 'tum', 'then, at that time', '', '1_8', 2), ('VINCO', 151, 'vinco', 'to conquer', '', '1_8', 1), ('ANIMAL', 152, 'animal', 'animal', '', '1_9', 1), ('ARMA', 153, 'arma', 'weapons (pl.)', '', '1_9', 1), ('AVDIO', 154, 'audio', 'to hear, listen', '', '1_9', 1), ('CAPVT', 155, 'caput', 'head', '', '1_9', 1), ('CIVIS', 156, 'civis', 'citizen', '', '1_9', 1), ('CONSVL', 157, 'consul', 'consul', '', '1_9', 1), ('CORPVS', 158, 'corpus', 'body', '', '1_9', 1), ('CREDO', 159, 'credo', 'to believe somebody (w/ dat.)', '', '1_9', 1), ('EXEMPLAR', 160, 'exemplar', 'example', '', '1_9', 1), ('GERO', 161, 'gero', 'to carry; to behave (w/ se)', '', '1_9', 2), ('MARE', 162, 'mare', 'sea', '', '1_9', 1), ('MORS', 163, 'mors', 'death', '', '1_9', 1), ('MVLIER', 164, 'mulier', 'woman', '', '1_9', 1), ('ORATIO', 165, 'oratio', 'oration, speech', '', '1_9', 1), ('SCIO', 166, 'scio', 'to know', '', '1_9', 1), ('SENTIO', 167, 'sentio', 'to perceive', '', '1_9', 1), ('TEMPVS/1', 168, 'tempus', 'time', '', '1_9', 1), ('VENIO', 169, 'venio', 'to come', '', '1_9', 1), ('VRBS', 170, 'urbs', 'city', '', '1_9', 1), ('ACER/2', 171, 'acer', 'keen, fierce', '', '1_10', 1), ('AEDIFICO', 172, 'aedifico', 'to build', '', '1_10', 1), ('CAPIO/2', 173, 'capio', 'to take, adopt, capture', '', '1_10', 1), ('CELEBER', 174, 'celeber', 'renowned, well–known, crowded', '', '1_10', 1), ('CVPIO', 175, 'cupio', 'to desire, want', '', '1_10', 1), ('DELEO', 176, 'deleo', 'to destroy', '', '1_10', 1), ('DEVS', 177, 'deus', 'god', '', '1_10', 1), ('DONVM', 178, 'donum', 'gift', '', '1_10', 1), ('EQVVS', 179, 'equus', 'horse', '', '1_10', 1), ('FELIX', 180, 'felix', 'fortunate, happy', '', '1_10', 1), ('FLAMMA', 181, 'flamma', 'flame', '', '1_10', 1), ('FORTIS', 182, 'fortis', 'brave, strong', '', '1_10', 1), ('FVGIO', 183, 'fugio', 'to flee, run away', '', '1_10', 1), ('HOSTIS', 184, 'hostis', 'enemy', '', '1_10', 1), ('MOVEO', 185, 'moveo', 'to move', '', '1_10', 1), ('NEC/2', 186, 'nec', 'nor; and not', '', '1_10', 2), ('NOX', 187, 'nox', 'night', '', '1_10', 1), ('PAVCI', 188, 'paucus', 'few', '', '1_10', 1), ('PERICVLVM', 189, 'periculum', 'danger', '', '1_10', 1), ('PVGNO', 190, 'pugno', 'to fight', '', '1_10', 1), ('AGO', 191, 'ago', 'to drive, lead, do, behave', '', '1_11', 1), ('ARDEO', 192, 'ardeo', 'to burn, be on fire', '', '1_11', 1), ('CONSPICIO', 193, 'conspicio', 'to look at, observe', '', '1_11', 1), ('CRVDELIS', 194, 'crudelis', 'cruel', '', '1_11', 1), ('DOLOR', 195, 'dolor', 'grief, pain', '', '1_11', 1), ('ITA', 196, 'ita', 'yes', '', '1_11', 2), ('MINIME', 197, 'minime', 'No', '', '1_11', 1), ('MITTO', 198, 'mitto', 'to send', '', '1_11', 1), ('NE/2', 199, 'ne', '(added to the first word of a question', '', '1_11', 1), ('NOVVS', 200, 'novus', 'new', '', '1_11', 1), ('PARVM/2', 201, 'parum', 'too little', '', '1_11', 2), ('QVE', 202, 'que', 'and', '', '1_11', 1), ('QVOQVE', 203, 'quoque', 'also', '', '1_11', 1), ('REGINA', 204, 'regina', 'queen', '', '1_11', 1), ('RELINQVO', 205, 'relinquo', 'to abandon', '', '1_11', 1), ('SILVA', 206, 'silva', 'forest', '', '1_11', 1), ('SPELVNCA', 207, 'spelunca', 'cave', '', '1_11', 1), ('TEMPESTAS', 208, 'tempestas', 'season', '', '1_11', 1), ('VNA', 209, 'una', 'together', '', '1_11', 1), ('BELLVMGERO', 210, 'bellum', 'to wage war', '', '1_12', 1), ('CONSVMO', 211, 'consumo', 'to consume', '', '1_12', 1), ('DEXTERA', 212, 'dextera', 'right hand', '', '1_12', 1), ('FACIO', 213, 'facio', 'to do, make', '', '1_12', 1), ('IBI', 214, 'ibi', 'there', '', '1_12', 1), ('IGNIS', 215, 'ignis', 'fire', '', '1_12', 1), ('INQVIO', 216, 'inquam', 'to say (used with direct speech)', '', '1_12', 3), ('IRA', 217, 'ira', 'anger', '', '1_12', 1), ('IS', 218, 'is', 's/he/it, this, that', '', '1_12', 1), ('NOMEN', 219, 'nomen', 'name', '', '1_12', 1), ('NOS', 220, 'nos', 'we; us', '', '1_12', 1), ('NOSTER', 221, 'noster', 'our, ours', '', '1_12', 1), ('OCCIDO/2', 222, 'occido', 'to strike down, knock down', '', '1_12', 1), ('OSTENDO', 223, 'ostendo', 'to show', '', '1_12', 1), ('PONO', 224, 'pono', 'to place', '', '1_12', 1), ('PROPE/2', 225, 'prope', 'near', '', '1_12', 2), ('PROVIRIBVS', 226, 'pro', 'with all one’s might', '', '1_12', 1), ('SIMILIS', 227, 'similis', 'similar', '', '1_12', 1), ('STATIM', 228, 'statim', 'immediately', '', '1_12', 1), ('TANTVS', 229, 'tantus', 'so much', '', '1_12', 1), ('TVVS', 230, 'tuus', 'your', '', '1_12', 1), ('VESTER', 231, 'vester', 'your', '', '1_12', 1), ('VIS', 232, 'vis', 'force', '', '1_12', 1), ('VOS', 233, 'vos', 'you', '', '1_12', 1), ('EGO', 234, 'ego', 'I', '', '1_13', 3), ('TV', 235, 'tu', 'you', '', '1_13', 3), ('TVM', 236, 'tum', 'then, at that time', '', '1_13', 2), ('ALIVS', 237, 'alius', 'another, other', '', '1_13', 1), ('APVD', 238, 'apud', 'at the house of (w/ acc.)', '', '1_13', 1), ('ATQVE/1', 239, 'atque', 'as', '', '1_13', 2), ('DISCEDO/1', 240, 'discedo', 'to leave, withdraw, go away', '', '1_13', 1), ('DIVES', 241, 'dives', 'rich', '', '1_13', 1), ('DOCTVS', 242, 'doctus', 'learned', '', '1_13', 1), ('DVCO', 243, 'duco', 'to lead, take', '', '1_13', 2), ('ENIM/2', 244, 'enim', 'for, in fact', '', '1_13', 1), ('IVDEX', 245, 'iudex', 'judge', '', '1_13', 1), ('LICET/1', 246, 'licet', 'it is allowed, permitted (for someone)(to do something)(w/ dat. and inf.) ', '', '1_13', 1), ('NIHIL', 247, 'nihil', 'nothing', '', '1_13', 1), ('NOLO', 248, 'nolo', 'not to want, to be unwilling', '', '1_13', 2), ('OMNIS', 249, 'omnis', 'each, every, all', '', '1_13', 1), ('PRO/1', 250, 'pro', 'for, on behalf of (w/ abl.)', '', '1_13', 1), ('QVID', 251, 'quid', 'what; why', '', '1_13', 1), ('RESPONDEO', 252, 'respondeo', 'to answer', '', '1_13', 1), ('ROGO', 253, 'rogo', 'to ask', '', '1_13', 1), ('SVVS', 254, 'suus', 'his, her, its, their', '', '1_13', 2), ('TANTVM/2', 255, 'tantum', 'only', '', '1_13', 1), ('VALE', 256, 'vale', 'to greetings! farewell!', '', '1_13', 1), ('VALEO', 257, 'valeo', 'to be able (w/ inf.); to be in good health', '', '1_13', 3), ('ALBVS', 258, 'albus', 'white', '', '1_14', 1), ('ARBOR', 259, 'arbor', 'tree', '', '1_14', 1), ('CADO', 260, 'cado', 'to fall', '', '1_14', 1), ('COMEDO/2', 261, 'comedo', 'to eat', '', '1_14', 1), ('CONVENIO', 262, 'convenio', 'to meet', '', '1_14', 1), ('FLVO', 263, 'fluo', 'to flow', '', '1_14', 1), ('GLADIVS', 264, 'gladius', 'sword', '', '1_14', 1), ('IAM', 265, 'iam', 'already, yet', '', '1_14', 1), ('MOX', 266, 'mox', 'soon', '', '1_14', 1), ('ODIVM', 267, 'odium', 'hatred', '', '1_14', 2), ('OS/1', 268, 'os', 'mouth', '', '1_14', 1), ('PARENS/1', 269, 'parens', 'parent', '', '1_14', 1), ('PECTVS', 270, 'pectus', 'chest', '', '1_14', 1), ('PER', 271, 'per', 'through (w/ acc.)', '', '1_14', 1), ('PRIMVS', 272, 'primus', 'first', '', '1_14', 1), ('QVI/1', 273, 'qui', 'who, which (rel. pronoun); what? which? (inter. adj.) ', '', '1_14', 2), ('RVBER', 274, 'ruber', 'red', '', '1_14', 1), ('SANGVIS', 275, 'sanguis', 'blood', '', '1_14', 1), ('SEPARO/2', 276, 'separo', 'to separate, divide', '', '1_14', 1), ('TANGO', 277, 'tango', 'to touch', '', '1_14', 1), ('INQVIO', 278, 'inquam', 'to say (used with direct speech)', '', '1_15', 3), ('QVI/1', 279, 'qui', 'who, which (rel. pronoun); what? which? (inter. adj.) ', '', '1_15', 2), ('ANTE/2', 280, 'ante', 'in front of (w/ acc.)', '', '1_15', 1), ('ARGVMENTVM', 281, 'argumentum', 'proof, indication, argument', '', '1_15', 1), ('CVR/1', 282, 'cur', 'why', '', '1_15', 1), ('DIFFICILIS', 283, 'difficilis', 'difficult', '', '1_15', 1), ('ECCE', 284, 'ecce', 'look here!', '', '1_15', 1), ('ETIAM', 285, 'etiam', 'even, also', '', '1_15', 1), ('FORSITAN', 286, 'forsan', 'perhaps', '', '1_15', 1), ('NEGLIGO', 287, 'neglego', 'to neglect', '', '1_15', 1), ('PARVVS/2', 288, 'parvus', 'small', '', '1_15', 1), ('QVIS/1', 289, 'quis', 'who? which? (inter. pronoun)', '', '1_15', 1), ('RVSTICVS/2', 290, 'rusticus', 'rural, rustic', '', '1_15', 1), ('SAXVM', 291, 'saxum', 'stone, rock', '', '1_15', 1), ('SENECTVS/1', 292, 'senectus', 'old age', '', '1_15', 1), ('SICVT/1', 293, 'sicut', 'just as', '', '1_15', 1), ('STO', 294, 'sto', 'stand', '', '1_15', 1), ('VBIQVE', 295, 'ubique', 'everywhere', '', '1_15', 1), ('VERVS', 296, 'verus', 'real, true', '', '1_15', 1), ('VETVSTVS', 297, 'vetustus', 'old', '', '1_15', 1), ('VILLA', 298, 'villa', 'estate', '', '1_15', 1), ('VMQVAM', 299, 'umquam', 'ever', '', '1_15', 1), ('AVVNCVLVS', 300, 'avunculus', 'uncle', '', '1_16', 1), ('CAELVM/1', 301, 'caelum', 'sky, heaven, weather', '', '1_16', 1), ('CAVSA', 302, 'causa', 'cause, reason', '', '1_16', 1), ('CINIS', 303, 'cinis', 'ash', '', '1_16', 1), ('CLADES', 304, 'clades', 'disaster', '', '1_16', 1), ('CLASSIS', 305, 'classis', 'fleet, class (of people)', '', '1_16', 1), ('FEMINA', 306, 'femina', 'woman', '', '1_16', 1), ('FVMVS', 307, 'fumus', 'smoke', '', '1_16', 1), ('FVNESTVS', 308, 'funestus', 'deadly', '', '1_16', 1), ('IGITVR', 309, 'igitur', 'therefore', '', '1_16', 1), ('INCENDIVM', 310, 'incendium', 'conflagration, eruption', '', '1_16', 1), ('LEGO/2', 311, 'lego', 'to read, choose', '', '1_16', 1), ('LITVS/2', 312, 'litus', 'shore', '', '1_16', 1), ('MATER', 313, 'mater', 'mother', '', '1_16', 1), ('MONS', 314, 'mons', 'mountain', '', '1_16', 1), ('NAVIS', 315, 'navis', 'ship', '', '1_16', 1), ('NVBES', 316, 'nubes', 'cloud', '', '1_16', 1), ('NVMQVAM', 317, 'numquam', 'never', '', '1_16', 1), ('OPPRIMO', 318, 'opprimo', 'to overwhelm, suppress', '', '1_16', 1), ('PARS', 319, 'pars', 'part', '', '1_16', 1), ('STVDEO', 320, 'studeo', 'to study, be eager for, be interested in (w/ dat.)', '', '1_16', 1), ('DOMVS', 321, 'domus', 'home', '', '1_17', 2), ('ALO', 322, 'alo', 'to feed, nourish', '', '1_17', 1), ('AMITTO', 323, 'amitto', 'to lose', '', '1_17', 1), ('CORNV', 324, 'cornu', 'horn', '', '1_17', 1), ('CORRIPIO', 325, 'corripio', 'to seize, occupy, engulf', '', '1_17', 1), ('CVRRO', 326, 'curro', 'to run', '', '1_17', 1), ('DEVASTO', 327, 'devasto', 'to lay waste', '', '1_17', 1), ('EXSTINGVO', 328, 'exstinguo', 'to extinguish', '', '1_17', 1), ('FACILE', 329, 'facile', 'easliy', '', '1_17', 1), ('IACIO', 330, 'iacio', 'to throw', '', '1_17', 1), ('IMPERATOR', 331, 'imperator', 'general, emperor', '', '1_17', 1), ('IMPETVS', 332, 'impetus', 'impetus, force, attack', '', '1_17', 1), ('INITIVM', 333, 'initium', 'beginning', '', '1_17', 1), ('IVSSVS', 334, 'iussus', 'order', '', '1_17', 1), ('LOCVS', 335, 'locus', 'place (sing.); passages of a book (m. pl.); geographical places(n. pl.)', '', '1_17', 1), ('MANVS/1', 336, 'manus', 'hand', '', '1_17', 1), ('MVRVS', 337, 'murus', 'wall', '', '1_17', 1), ('SINE', 338, 'sine', 'without (w/ abl.)', '', '1_17', 1), ('TEMPTO', 339, 'tempto', 'to try', '', '1_17', 1), ('TVMVLTVS', 340, 'tumultus', 'confusion', '', '1_17', 1), ('VENTVS', 341, 'ventus', 'wind', '', '1_17', 1), ('DVCO', 342, 'duco', 'to lead, take', '', '1_18', 2), ('ITA', 343, 'ita', 'yes', '', '1_18', 2), ('COLO/2', 344, 'colo', 'to worship, cultivate', '', '1_18', 1), ('CVM/3', 345, 'cum', 'when, after', '', '1_18', 2), ('DEA', 346, 'dea', 'goddess', '', '1_18', 1), ('DIES', 347, 'dies', 'day', '', '1_18', 1), ('DORMIO', 348, 'dormio', 'to sleep', '', '1_18', 1), ('EXCITO/1', 349, 'excito', 'to awaken, rouse, stir up', '', '1_18', 1), ('EXCLAMO', 350, 'exclamo', 'to exclaim', '', '1_18', 1), ('FACIES', 351, 'facies', 'face', '', '1_18', 1), ('FATVM', 352, 'fatum', 'fate, destiny', '', '1_18', 1), ('MARITVS/1', 353, 'maritus', 'husband', '', '1_18', 1), ('MERIDIES', 354, 'meridies', 'midday', '', '1_18', 2), ('MVLTVM/2', 355, 'multum', 'much', '', '1_18', 1), ('OCCVLTO', 356, 'occulto', 'to hide', '', '1_18', 1), ('PATER', 357, 'pater', 'father', '', '1_18', 2), ('POST/2', 358, 'post', 'after (w/ acc.)', '', '1_18', 1), ('QVAERO', 359, 'quaero', 'to look for, search', '', '1_18', 1), ('RES', 360, 'res', 'thing, matter', '', '1_18', 1), ('SI/2', 361, 'si', 'if', '', '1_18', 1), ('SOMNVS', 362, 'somnus', 'sleep', '', '1_18', 1), ('TAM', 363, 'tam', 'so ', '', '1_18', 1), ('VXOR', 364, 'uxor', 'wife', '', '1_18', 2), ('BARBA', 365, 'barba', 'beard', '', '1_19', 1), ('CARO/1', 366, 'caro', 'meat, flesh', '', '1_19', 1), ('CELERITER', 367, 'celeriter', 'swiftly', '', '1_19', 1), ('COQVO', 368, 'coquo', 'to cook', '', '1_19', 1), ('CRESCO', 369, 'cresco', 'to grow', '', '1_19', 1), ('FEROX', 370, 'ferox', 'fierce, ferocious', '', '1_19', 1), ('FORIS/2', 371, 'foris', 'outside, in the open', '', '1_19', 1), ('HERBA', 372, 'herba', 'plant, vegetation', '', '1_19', 1), ('HIC/1', 373, 'hic', 'this', '', '1_19', 1), ('INTER', 374, 'inter', 'between, among (w/ acc.)', '', '1_19', 1), ('PELLIS', 375, 'pellis', 'skin, hide', '', '1_19', 1), ('POSTQVAM', 376, 'postquam', 'after', '', '1_19', 1), ('PROELIVM', 377, 'proelium', 'battle, combat', '', '1_19', 1), ('SANO', 378, 'sano', 'to heal', '', '1_19', 1), ('SEDEO', 379, 'sedeo', 'to sit', '', '1_19', 1), ('TERO', 380, 'tero', 'to wear out, rub', '', '1_19', 1), ('TERRIBILIS', 381, 'terribilis', 'terrifying', '', '1_19', 1), ('VESTIMENTVM', 382, 'vestimentum', 'garment, clothes (pl.)', '', '1_19', 1), ('VIVO', 383, 'vivo', 'to live', '', '1_19', 1), ('VVLNERO', 384, 'vulnero', 'to wound', '', '1_19', 1), ('VVLNVS', 385, 'vulnus', 'wound', '', '1_19', 1), ('ABVNDO', 386, 'abundo', 'to abound with (w/ abl.)', '', '1_20', 1), ('ADOLESCENS/2', 387, 'adulescens', 'young man, young lady', '', '1_20', 1), ('AEQVVS', 388, 'aequus', 'even', '', '1_20', 1), ('COR', 389, 'cor', 'heart', '', '1_20', 1), ('DELECTO', 390, 'delecto', 'to delight, please', '', '1_20', 1), ('DIVINVS/2', 391, 'divinus', 'divine', '', '1_20', 1), ('EGEO', 392, 'egeo', 'to lack something (abl.)', '', '1_20', 1), ('FVR', 393, 'fur', 'thief', '', '1_20', 1), ('FVRTVM', 394, 'furtum', 'theft', '', '1_20', 1), ('HVMANVS', 395, 'humanus', 'human', '', '1_20', 1), ('ILLE', 396, 'ille', 'that', '', '1_20', 1), ('INIQVITAS', 397, 'iniquitas', 'injustice', '', '1_20', 1), ('LEX', 398, 'lex', 'law', '', '1_20', 1), ('LVDO', 399, 'ludo', 'to play', '', '1_20', 1), ('NOCTV', 400, 'noctu', 'during the night', '', '1_20', 1), ('PAENE', 401, 'paene', 'almost', '', '1_20', 1), ('PAVPER', 402, 'pauper', 'poor', '', '1_20', 1), ('PLENVS', 403, 'plenus', 'full of (w/ gen. or abl.)', '', '1_20', 1), ('POMVM', 404, 'pomum', 'fruit', '', '1_20', 1), ('PVNIO', 405, 'punio', 'to punish', '', '1_20', 1), ('ACCIPIO', 406, 'accipio', 'to accept, receive', '', '1_21', 1), ('ACCVSO', 407, 'accuso', 'to accuse someone (acc.) of something (gen.)', '', '1_21', 1), ('ALIENVS/2', 408, 'alienus', 'foreign to, inconsistent with (w/ a/ab and abl.)', '', '1_21', 1), ('AXIS', 409, 'axis', 'axle, axis', '', '1_21', 1), ('CIRCVM/2', 410, 'circum', 'around (w/ acc.)', '', '1_21', 1), ('CONSTANTIA', 411, 'constantia', 'constancy', '', '1_21', 1), ('DESCENDO', 412, 'descendo', 'to descend', '', '1_21', 1), ('DIVITIAE', 413, 'divitia', 'wealth, riches (pl.)', '', '1_21', 1), ('ERIPIO', 414, 'eripio', 'to snatch away', '', '1_21', 1), ('ERRO/2', 415, 'erro', 'to wander, make a mistake', '', '1_21', 1), ('EXTERNVS', 416, 'externus', 'outward, external', '', '1_21', 1), ('FORTVNA', 417, 'fortuna', 'fortune, the goddess Fortune', '', '1_21', 1), ('FVTVRVS', 418, 'futurus', 'about to be (from sum)', '', '1_21', 1), ('HONOR', 419, 'honor', 'honor, public office or distinction', '', '1_21', 1), ('MVTO/2', 420, 'muto', 'to change', '', '1_21', 1), ('POSSIDEO', 421, 'possideo', 'to possess', '', '1_21', 1), ('PROCERTO', 422, 'pro', 'for certain, for sure', '', '1_21', 1), ('RECIPIO', 423, 'recipio', 'to take back', '', '1_21', 2), ('REPREHENDO', 424, 'reprehendo', 'to blame, rebuke', '', '1_21', 1), ('ROTA', 425, 'rota', 'wheel', '', '1_21', 1), ('TOLLO', 426, 'tollo', 'to lift up, raise; to destroy', '', '1_21', 1), ('VERSO', 427, 'verso', 'to turn', '', '1_21', 1), ('VLLVS', 428, 'ullus', 'any', '', '1_21', 1), ('CONSILIVM', 429, 'consilium', 'plan, advice', '', '2_1', 2), ('MERIDIES', 430, 'meridies', 'midday', '', '2_1', 2), ('PROPE/2', 431, 'prope', 'near', '', '2_1', 2), ('ASPICIO', 432, 'aspicio', 'to look at, catch a glimpse of', '', '2_1', 1), ('ETET', 433, 'et', 'both…and…', '', '2_1', 1), ('GENS', 434, 'gens', 'tribe, population', '', '2_1', 1), ('GIGNO', 435, 'gigno', 'to give birth, produce', '', '2_1', 1), ('HODIE', 436, 'hodie', 'today', '', '2_1', 1), ('INCOLA', 437, 'incola', 'inhabitant', '', '2_1', 1), ('INSVLA', 438, 'insula', 'island', '', '2_1', 2), ('INVENIO', 439, 'invenio', 'to come upon, find', '', '2_1', 1), ('MOS', 440, 'mos', 'custom, habit; morals (pl.)', '', '2_1', 1), ('MVNDVS/1', 441, 'mundus', 'world', '', '2_1', 1), ('NE/4', 442, 'ne', 'that not, not to, lest ', '', '2_1', 3), ('OCCVPO/2', 443, 'occupo', 'to occupy', '', '2_1', 1), ('ORTVS', 444, 'ortus', 'origin, beginning, raising', '', '2_1', 1), ('PISCIS', 445, 'piscis', 'a fish', '', '2_1', 1), ('PROCVL', 446, 'procul', 'far, far away', '', '2_1', 1), ('PROMITTO', 447, 'promitto', 'to promise', '', '2_1', 1), ('SEPTENTRIONALIS', 448, 'septentrionalis', 'northern', '', '2_1', 1), ('SITVS/2', 449, 'situs', 'located, situated', '', '2_1', 1), ('SOL', 450, 'sol', 'sun', '', '2_1', 1), ('VTINAM', 451, 'utinam', 'if only', '', '2_1', 2), ('GERO', 452, 'gero', 'to carry; to behave (w/ se)', '', '2_2', 2), ('ODIVM', 453, 'odium', 'hatred', '', '2_2', 2), ('VALEO', 454, 'valeo', 'to be able (w/ inf.); to be in good health', '', '2_2', 3), ('ALTVS', 455, 'altus', 'tall, deep', '', '2_2', 1), ('ANNVS', 456, 'annus', 'year', '', '2_2', 1), ('ARGENTVM', 457, 'argentum', 'silver', '', '2_2', 1), ('AVRVM', 458, 'aurum', 'gold', '', '2_2', 1), ('BREVIS', 459, 'brevis', 'short', '', '2_2', 1), ('CLARVS', 460, 'clarus', 'clear, distinguished', '', '2_2', 1), ('CVSTOS', 461, 'custos', 'guard', '', '2_2', 1), ('EQVES', 462, 'eques', 'horseman', '', '2_2', 1), ('FINIS', 463, 'finis', 'end', '', '2_2', 1), ('GRAVIS', 464, 'gravis', 'serious, heavy', '', '2_2', 1), ('INTERDVM', 465, 'interdum', 'sometimes', '', '2_2', 1), ('LIS', 466, 'lis', 'dispute, quarrel', '', '2_2', 1), ('MANE/2', 467, 'mane', 'in the morning', '', '2_2', 1), ('ODIOHABEO', 468, 'odio', 'to hate somebody', '', '2_2', 1), ('SINO', 469, 'sino', 'to allow somebody (acc.) to do something (inf.)', '', '2_2', 1), ('VEL/1', 470, 'vel', 'or', '', '2_2', 1), ('VESTIS', 471, 'vestis', 'clothes, attire', '', '2_2', 1), ('VOX', 472, 'vox', 'voice', '', '2_2', 1), ('VT/4', 473, 'ut', 'that, to, in order to, so that', '', '2_2', 4), ('VVLTVS', 474, 'vultus', 'face', '', '2_2', 1), ('VXOR', 475, 'uxor', 'wife', '', '2_3', 2), ('AT/2', 476, 'at', 'but', '', '2_3', 1), ('CONIVX', 477, 'coniunx', 'spouse', '', '2_3', 1), ('DISCIPVLA', 478, 'discipula', 'student', '', '2_3', 1), ('DISCO', 479, 'disco', 'to learn', '', '2_3', 1), ('DOMINVS', 480, 'dominus', 'master, lord', '', '2_3', 1), ('FAMA', 481, 'fama', 'fame, name, reputation', '', '2_3', 1), ('FRATER', 482, 'frater', 'brother', '', '2_3', 1), ('IMPROBVS', 483, 'improbus', 'wicked, bad', '', '2_3', 1), ('IVNGO', 484, 'iungo', 'to join', '', '2_3', 1), ('MAGISTER', 485, 'magister', 'teacher', '', '2_3', 1), ('MATRIMONIVM', 486, 'matrimonium', 'marriage', '', '2_3', 1), ('NE/4', 487, 'ne', 'that not, not to, lest ', '', '2_3', 3), ('NVSQVAM', 488, 'nusquam', 'nowhere', '', '2_3', 1), ('PARIO/2', 489, 'pario', 'to give birth to', '', '2_3', 1), ('PERDO', 490, 'perdo', 'to lose, waste', '', '2_3', 1), ('SALVS', 491, 'salus', 'health, welfare', '', '2_3', 1), ('SALVTEMDICERE', 492, 'salutem', 'to greet (customary opening to letter) ', '', '2_3', 1), ('SCRIBO', 493, 'scribo', 'to write', '', '2_3', 1), ('VT/4', 494, 'ut', 'that, to, in order to, so that', '', '2_3', 4), ('VXOREMDEDVCERE', 495, 'uxorem', 'to marry a woman, to take as a wife', '', '2_3', 1), ('NEC/2', 496, 'nec', 'nor; and not', '', '2_4', 2), ('RECIPIO', 497, 'recipio', 'to take back', '', '2_4', 2), ('AGMEN', 498, 'agmen', 'marching column', '', '2_4', 1), ('APERIO', 499, 'aperio', 'to open', '', '2_4', 1), ('COEPIO', 500, 'coepi', 'to begin (w/ inf.)', '', '2_4', 1), ('DEFENDO', 501, 'defendo', 'to defend', '', '2_4', 1), ('EDO/1', 502, 'edo', 'to produce, give forth', '', '2_4', 1), ('EXTRA/2', 503, 'extra', 'outside of (w/ acc.)', '', '2_4', 1), ('FVRO', 504, 'furo', 'to rage, be insane', '', '2_4', 1), ('INGENS', 505, 'ingens', 'huge', '', '2_4', 1), ('INVADO/2', 506, 'invado', 'to burst in', '', '2_4', 1), ('LIGNEVS', 507, 'ligneus', 'made of wood', '', '2_4', 1), ('NEQVENEC', 508, 'neque', 'neither..nor…', '', '2_4', 1), ('PARCO', 509, 'parco', 'to spare somebody/thing (w/ dat.)', '', '2_4', 1), ('PONS', 510, 'pons', 'bridge', '', '2_4', 1), ('PORTA', 511, 'porta', 'gate', '', '2_4', 1), ('PRIMO', 512, 'primo', 'at first', '', '2_4', 1), ('QVAM/1', 513, 'quam', 'than (w/ comp. words)', '', '2_4', 2), ('QVANTVS/1', 514, 'quantus', 'how great, how much (inter. or rel. adj.)', '', '2_4', 1), ('RESISTO', 515, 'resisto', 'to resist (w/ dat.)', '', '2_4', 1), ('SIMVL/1', 516, 'simul', 'at the same time', '', '2_4', 1), ('TVTVS', 517, 'tutus', 'safe', '', '2_4', 1), ('VACVVS', 518, 'vacuus', 'empty of (w/ abl.)', '', '2_4', 1), ('VALEO', 519, 'valeo', 'to be able (w/ inf.); to be in good health', '', '2_4', 3), ('VICTOR', 520, 'victor', 'victor', '', '2_4', 1), ('VTINAM', 521, 'utinam', 'if only', '', '2_4', 2), ('BIBO/2', 522, 'bibo', 'to drink', '', '2_5', 1), ('CARMEN/1', 523, 'carmen', 'song, poem', '', '2_5', 1), ('CIBVS', 524, 'cibus', 'food', '', '2_5', 1), ('DVLCIS', 525, 'dulcis', 'sweet', '', '2_5', 1), ('FLVMEN', 526, 'flumen', 'river', '', '2_5', 1), ('IMMEMOR', 527, 'immemor', 'forgetful of (w/ gen.)', '', '2_5', 1), ('IOCVS', 528, 'iocus', 'joke', '', '2_5', 1), ('IVVENTVS', 529, 'iuventus', 'youth', '', '2_5', 1), ('LEVIS/1', 530, 'levis', 'light', '', '2_5', 1), ('MENS', 531, 'mens', 'mind, spirit', '', '2_5', 1), ('NE/4', 532, 'ne', 'that not, not to, lest ', '', '2_5', 3), ('ORO', 533, 'oro', 'to ask, entreat', '', '2_5', 1), ('PLACEO', 534, 'placeo', 'to please, be agreeable to somebody', '', '2_5', 1), ('PROXIMVS/2', 535, 'proximus', 'nearest', '', '2_5', 1), ('TAMQVAM/2', 536, 'tam', 'so…as…', '', '2_5', 1), ('VEHEMENS', 537, 'vehemens', 'violent, vehement', '', '2_5', 1), ('VETVS', 538, 'vetus', 'old', '', '2_5', 1), ('VINVM', 539, 'vinum', 'wine', '', '2_5', 1), ('VIRTVS', 540, 'virtus', 'courage, virtue', '', '2_5', 1), ('VITIVM', 541, 'vitium', 'vice', '', '2_5', 1), ('VT/4', 542, 'ut', 'that, to, in order to, so that', '', '2_5', 4), ('PATER', 543, 'pater', 'father', '', '2_6', 2), ('DECIPIO', 544, 'decipio', 'to deceive', '', '2_6', 1), ('DILIGO/3', 545, 'diligo', 'to love, esteem highly', '', '2_6', 1), ('DVO', 546, 'duo', 'two', '', '2_6', 1), ('EXERCITVS/1', 547, 'exercitus', 'army', '', '2_6', 1), ('FIDELIS/2', 548, 'fidelis', 'faithful, loyal', '', '2_6', 1), ('HERES', 549, 'heres', 'heir', '', '2_6', 1), ('IMPERIVM', 550, 'imperium', 'rule, empire, power', '', '2_6', 1), ('INOPIA', 551, 'inopia', 'helplessness, want', '', '2_6', 1), ('LAVDO', 552, 'laudo', 'to praise', '', '2_6', 1), ('NECESSEEST', 553, 'necesse', 'it is necessary for someone (dat.) to do something (inf.)', '', '2_6', 1), ('NEMO', 554, 'nemo', 'no one', '', '2_6', 1), ('PAVLO', 555, 'paulo', 'a little bit, to a small extent', '', '2_6', 1), ('QVAM/1', 556, 'quam', 'than (w/ comp. words)', '', '2_6', 2), ('QVANTVM/3', 557, 'quantum', 'to what extent, how much', '', '2_6', 1), ('RESTITVO', 558, 'restituo', 'to restore', '', '2_6', 1), ('SATIS/2', 559, 'satis', 'enough, sufficiently', '', '2_6', 1), ('SECVNDVS/1', 560, 'secundus', 'second', '', '2_6', 1), ('TERTIVS', 561, 'tertius', 'third', '', '2_6', 1), ('TRES', 562, 'tres', 'three', '', '2_6', 1), ('TRISTIS', 563, 'tristis', 'sad', '', '2_6', 1), ('VEHEMENTER', 564, 'vehementer', 'strongly, vehemently', '', '2_6', 1), ('NOLO', 565, 'nolo', 'not to want, to be unwilling', '', '2_7', 2), ('AETAS', 566, 'aetas', 'age', '', '2_7', 1), ('FIDES/2', 567, 'fides', 'faith', '', '2_7', 1), ('FVNDO/2', 568, 'fundo', 'to pour', '', '2_7', 1), ('GLORIA', 569, 'gloria', 'glory', '', '2_7', 1), ('LIBERTAS', 570, 'libertas', 'freedom', '', '2_7', 1), ('LVMEN', 571, 'lumen', 'light', '', '2_7', 1), ('MALO', 572, 'malo', 'to prefer', '', '2_7', 1), ('ORNATVS/1', 573, 'ornatus', 'adorned, ornate, elaborate', '', '2_7', 1), ('OTIVM', 574, 'otium', 'leisure, free time', '', '2_7', 1), ('POTENS', 575, 'potens', 'powerful', '', '2_7', 1), ('PVBLICVS/2', 576, 'publicus', 'common', '', '2_7', 1), ('QVALIS/1', 577, 'qualis', 'what sort of? (inter. adj.)', '', '2_7', 1), ('RESPVBLICA', 578, 'res', 'state', '', '2_7', 1), ('STVDIOSVS', 579, 'studiosus', 'fond of (w/ gen.)', '', '2_7', 1), ('TAMQVAM/1', 580, 'tamquam', 'as', '', '2_7', 1), ('TOT', 581, 'tot', 'so many', '', '2_7', 1), ('TRAHO', 582, 'traho', 'to drag, draw', '', '2_7', 1), ('VBI/1', 583, 'ubi', 'where? (inter. adv)', '', '2_7', 1), ('VIX', 584, 'vix', 'hardly', '', '2_7', 1), ('VNVS', 585, 'unus', 'one', '', '2_7', 1), ('VOLO/3', 586, 'volo', 'to want', '', '2_7', 1), ('VTILIS', 587, 'utilis', 'useful', '', '2_7', 1), ('ADHVC', 588, 'adhuc', 'still, up to this time', '', '2_8', 1), ('ANTIQVVS', 589, 'antiquus', 'ancient', '', '2_8', 1), ('ARS', 590, 'ars', 'science, art, skill', '', '2_8', 1), ('DOMINOR', 591, 'dominor', 'to dominate, rule', '', '2_8', 1), ('HORTOR', 592, 'hortor', 'to exhort, urge', '', '2_8', 1), ('LATINE', 593, 'Latine', 'in Latin', '', '2_8', 1), ('LATINVS/A', 594, 'Latinus', 'Latin, pertaining to Latin', '', '2_8', 1), ('LINGVA', 595, 'lingua', 'language; tongue', '', '2_8', 1), ('LOQVOR', 596, 'loquor', 'to speak', '', '2_8', 1), ('MAGIS/2', 597, 'magis', 'more', '', '2_8', 1), ('MAIOR', 598, 'maior', 'bigger; greater', '', '2_8', 1), ('MAXIMVS', 599, 'maximus', 'greatest', '', '2_8', 1), ('MELIOR', 600, 'melior', 'better', '', '2_8', 1), ('MINIMVS', 601, 'minimus', 'smallest', '', '2_8', 1), ('MINVS', 602, 'minus', 'less', '', '2_8', 2), ('OPTIMVS', 603, 'optimus', 'best', '', '2_8', 1), ('PARTIOR', 604, 'partior', 'to divide, distribute', '', '2_8', 1), ('PATIOR', 605, 'patior', 'to endure, tolerate, suffer', '', '2_8', 1), ('PEIOR', 606, 'peior', 'worse', '', '2_8', 1), ('PESSIMVS', 607, 'pessimus', 'worst', '', '2_8', 1), ('PLVRIMVS', 608, 'plurimus', 'most', '', '2_8', 1), ('PLVS', 609, 'plus', 'more', '', '2_8', 1), ('SEQVOR', 610, 'sequor', 'to follow', '', '2_8', 1), ('VEREOR', 611, 'vereor', 'to fear, respect', '', '2_8', 1), ('ADDO', 612, 'addo', 'to add', '', '2_9', 1), ('AVRIS', 613, 'auris', 'ear', '', '2_9', 1), ('CONOR', 614, 'conor', 'to try', '', '2_9', 1), ('DEMITTO', 615, 'demitto', 'to send down', '', '2_9', 1), ('DISSIMILIS', 616, 'dissimilis', 'dissimilar ', '', '2_9', 1), ('FACILIS', 617, 'facilis', 'easy', '', '2_9', 1), ('FERO', 618, 'fero', 'to carry, bear', '', '2_9', 1), ('FIO', 619, 'fio', 'to be made, become; (impersonally) to happen', '', '2_9', 1), ('FRIGVS', 620, 'frigus', 'cold', '', '2_9', 1), ('GENVS/1', 621, 'genus', 'kind', '', '2_9', 1), ('GLACIES', 622, 'glacies', 'ice', '', '2_9', 1), ('GRACILIS', 623, 'gracilis', 'slender', '', '2_9', 1), ('HVMILIS', 624, 'humilis', 'low, humble', '', '2_9', 1), ('ITER', 625, 'iter', 'road, trip ', '', '2_9', 1), ('LABOR/2', 626, 'labor', 'to slide, slip, glide down', '', '2_9', 1), ('MODEROR', 627, 'moderor', 'to manage, direct, guide', '', '2_9', 1), ('NIX', 628, 'nix', 'snow', '', '2_9', 1), ('ONVS', 629, 'onus', 'weight, burden', '', '2_9', 1), ('PERVENIO', 630, 'pervenio', 'to arrive', '', '2_9', 1), ('PROGREDIOR', 631, 'progredior', 'to go forward, proceed', '', '2_9', 1), ('QVOTIENS/2', 632, 'quotiens', 'as often as', '', '2_9', 1), ('SIMVLAC/2', 633, 'simulac', 'as soon as', '', '2_9', 1), ('SVVS', 634, 'suus', 'his, her, its, their', '', '2_10', 2), ('AEDES', 635, 'aedes', 'temple; pl. dwelling, house', '', '2_10', 1), ('EO/1', 636, 'eo', 'to go ', '', '2_10', 1), ('IVCVNDVS', 637, 'iucundus', 'pleasant, nice', '', '2_10', 1), ('LABOR/1', 638, 'labor', 'labor, toil', '', '2_10', 1), ('LAEDO', 639, 'laedo', 'to harm', '', '2_10', 1), ('LIBER/2', 640, 'liber', 'free', '', '2_10', 1), ('LVCRVM', 641, 'lucrum', 'profit, gain', '', '2_10', 1), ('MARITIMVS', 642, 'maritimus', 'maritime', '', '2_10', 1), ('MODVS', 643, 'modus', 'way, method, manner', '', '2_10', 1), ('PAVLISPER', 644, 'paulisper', 'for a little while', '', '2_10', 1), ('PECVNIA', 645, 'pecunia', 'money', '', '2_10', 1), ('PLACIDVS', 646, 'placidus', 'peaceful, calm', '', '2_10', 1), ('POTIVS', 647, 'potius', 'rather', '', '2_10', 1), ('PROSPER', 648, 'prosper', 'fortunate, prosperous', '', '2_10', 1), ('REDDO', 649, 'reddo', 'to give back', '', '2_10', 1), ('SARCINA', 650, 'sarcina', 'burden, baggage', '', '2_10', 1), ('SCELESTVS', 651, 'scelestus', 'wicked', '', '2_10', 1), ('SEMEL', 652, 'semel', 'once', '', '2_10', 1), ('SERENVS', 653, 'serenus', 'calm, clear', '', '2_10', 1), ('PARVM/2', 654, 'parum', 'too little', '', '2_11', 2), ('ALTER', 655, 'alter', 'the other (of two)', '', '2_11', 1), ('GEMMA', 656, 'gemma', 'gem, precious stone', '', '2_11', 1), ('LEGATVS', 657, 'legatus', 'ambassador', '', '2_11', 1), ('MAGNIHABEO', 658, 'magni', 'to esteem a lot', '', '2_11', 1), ('MINVS', 659, 'minus', 'less', '', '2_11', 2), ('NESCIO', 660, 'nescio', 'not to know', '', '2_11', 1), ('NEVTER', 661, 'neuter', 'neither, none (of two)', '', '2_11', 1), ('NVLLVS', 662, 'nullus', 'none', '', '2_11', 1), ('OPERAEPRETIVMEST', 663, 'operae', 'it is worthwhile', '', '2_11', 1), ('POPVLVS/1', 664, 'populus', 'a people, populace', '', '2_11', 1), ('QVOMODO/1', 665, 'quomodo', 'how', '', '2_11', 1), ('SALVTO', 666, 'saluto', 'to greet ', '', '2_11', 1), ('SERVVS/1', 667, 'servus', 'slave, servant', '', '2_11', 1), ('SOLVS', 668, 'solus', 'alone, only', '', '2_11', 1), ('SPECTO', 669, 'specto', 'to watch', '', '2_11', 1), ('TACEO', 670, 'taceo', 'to be silent, keep quiet', '', '2_11', 1), ('TOTVS', 671, 'totus', 'whole, entire', '', '2_11', 1), ('TVRPIS', 672, 'turpis', 'shameful, disgraceful', '', '2_11', 1), ('VTER/4', 673, 'uter', 'who, which (of two)?', '', '2_11', 1), ('VTOR', 674, 'utor', 'to use (w/ abl.)', '', '2_11', 1), ('CVM/3', 675, 'cum', 'when, after', '', '2_12', 2), ('INQVIO', 676, 'inquam', 'to say (used with direct speech)', '', '2_12', 3), ('TAMEN', 677, 'tamen', 'however', '', '2_12', 2), ('CARVS', 678, 'carus', 'dear', '', '2_12', 1), ('INSVLA', 679, 'insula', 'island', '', '2_12', 2), ('MORIOR', 680, 'morior', 'to die', '', '2_12', 1), ('NIMIS', 681, 'nimis', 'too much', '', '2_12', 1), ('NISI', 682, 'nisi', 'if not, unless', '', '2_12', 1), ('OFFICIVM', 683, 'officium', 'duty', '', '2_12', 1), ('ORBIS', 684, 'orbis', 'circle', '', '2_12', 1), ('ORBISTERRARVM', 685, 'orbis', 'the earth, the world', '', '2_12', 1), ('PROBO', 686, 'probo', 'to approve ', '', '2_12', 1), ('QVAMQVAM/2', 687, 'quamquam', 'although', '', '2_12', 1), ('QVAMVIS/1', 688, 'quamvis', 'although', '', '2_12', 1), ('QVIA', 689, 'quia', 'because', '', '2_12', 1), ('QVIDEM', 690, 'quidem', 'indeed', '', '2_12', 1), ('QVOD/1', 691, 'quod', 'because', '', '2_12', 1), ('SENTENTIA', 692, 'sententia', 'opinion, point of view', '', '2_12', 1), ('SORS', 693, 'sors', 'lot', '', '2_12', 1), ('SPERO', 694, 'spero', 'to hope', '', '2_12', 1), ('SPES', 695, 'spes', 'hope', '', '2_12', 1), ('ATQVE/1', 696, 'atque', 'as', '', '2_13', 2), ('ABSENS', 697, 'absens', 'away, absent', '', '2_13', 1), ('ABSVM/1', 698, 'absum', 'to be away', '', '2_13', 1), ('BENEVOLENTIA', 699, 'benevolentia', 'good will', '', '2_13', 1), ('DECLARO', 700, 'declaro', 'to demonstrate, show, make known, reveal', '', '2_13', 1), ('IDEM', 701, 'idem', 'the same', '', '2_13', 1), ('IPSE', 702, 'ipse', 'self', '', '2_13', 1), ('IRASCOR', 703, 'irascor', 'to be angry at (w/ dat.)', '', '2_13', 1), ('ISTE', 704, 'iste', 'that (of yours)', '', '2_13', 1), ('MIROR', 705, 'miror', 'to marvel, be surprised at', '', '2_13', 1), ('MVLTITVDO', 706, 'multitudo', 'crowd, throng', '', '2_13', 1), ('NEGO', 707, 'nego', 'to deny ', '', '2_13', 1), ('NVMERO/1', 708, 'numero', 'to number, count among', '', '2_13', 1), ('OFFENDO', 709, 'offendo', ']to happen upon, offend', '', '2_13', 1), ('REDEO/1', 710, 'redeo', 'to go back, return', '', '2_13', 1), ('REFERO', 711, 'refero', 'to carry back, report', '', '2_13', 1), ('SOCIVS/1', 712, 'socius', 'associate, partner, ally', '', '2_13', 1), ('TALIS', 713, 'talis', 'such a', '', '2_13', 1), ('TVRRIS', 714, 'turris', 'tower', '', '2_13', 1), ('VENIA', 715, 'venia', 'pardon, indulgence, forgiveness', '', '2_13', 1), ('VERSOR', 716, 'versor', 'to be situated in, be occupied in ', '', '2_13', 1), ('VIRGA', 717, 'virga', 'twig, stick', '', '2_13', 1), ('VOLVNTAS', 718, 'voluntas', 'will', '', '2_13', 1), ('AFFIRMO', 719, 'affirmo', 'to assert, maintain', '', '2_14', 1), ('CIRCVMEO/1', 720, 'circumeo', 'to go around', '', '2_14', 1), ('CONTINEO', 721, 'contineo', 'to hold, keep together, contain', '', '2_14', 1), ('COTIDIANVS', 722, 'cottidianus', 'of every day, daily', '', '2_14', 1), ('ELEMENTVM', 723, 'elementum', 'element', '', '2_14', 1), ('ERGO/2', 724, 'ergo', 'therefore', '', '2_14', 1), ('GRAVITAS', 725, 'gravitas', 'weight, gravity', '', '2_14', 1), ('IMMENSVS', 726, 'immensus', 'immeasurable, immense, endless', '', '2_14', 1), ('INFINITVS', 727, 'infinitus', 'boundless, unlimited', '', '2_14', 1), ('MAXIME', 728, 'maxime', 'most', '', '2_14', 1), ('MEDIVS', 729, 'medius', 'middle', '', '2_14', 1), ('MOTVS', 730, 'motus', 'motion, movement', '', '2_14', 1), ('MVLTO/2', 731, 'multo', 'by much', '', '2_14', 1), ('NATVRA', 732, 'natura', 'nature', '', '2_14', 1), ('NECESSARIO', 733, 'necessario', 'necessarily', '', '2_14', 1), ('PERPERAM', 734, 'perperam', 'wrongly, incorrectly', '', '2_14', 1), ('PONDVS', 735, 'pondus', 'weight', '', '2_14', 1), ('PRAESERTIM', 736, 'praesertim', 'especially', '', '2_14', 1), ('QVIES', 737, 'quies', 'rest, repose', '', '2_14', 1), ('VNDIQVE', 738, 'undique', 'from all parts, from everywhere', '', '2_14', 1), ('VOLVO', 739, 'volvo', 'to turn round', '', '2_14', 1), ('VT/4', 740, 'ut', 'that, to, in order to, so that', '', '2_14', 4), ('ANIMADVERTO', 741, 'animadverto', 'to notice', '', '2_15', 1), ('APPROPINQVO', 742, 'appropinquo', 'to approach (w/ dat or ad + acc.)', '', '2_15', 1), ('CERNO', 743, 'cerno', 'to see, distinguish with the eyes', '', '2_15', 1), ('CIRCA/2', 744, 'circa', 'around (w/ acc.)', '', '2_15', 1), ('CLAMO', 745, 'clamo', 'to shout, scream', '', '2_15', 1), ('FINGO', 746, 'fingo', 'to imagine, form in the mind', '', '2_15', 1), ('IMPINGO', 747, 'impingo', 'to push, strike, inflict', '', '2_15', 1), ('INFLIGO', 748, 'infligo', 'to strike on or against, inflict', '', '2_15', 1), ('ITERVM', 749, 'iterum', 'again', '', '2_15', 1), ('OPPIDVM', 750, 'oppidum', 'town', '', '2_15', 1), ('PERCVTIO', 751, 'percutio', 'to strike through ', '', '2_15', 1), ('PRAEDITVS', 752, 'praeditus', 'endowed with, possessed of (w/ abl.)', '', '2_15', 1), ('REPELLO', 753, 'repello', 'to push back, thrust back', '', '2_15', 1), ('RIDEO', 754, 'rideo', 'to laugh', '', '2_15', 1), ('RVMPO', 755, 'rumpo', 'to break, tear', '', '2_15', 1), ('SEDES', 756, 'sedes', 'seat, abode', '', '2_15', 1), ('SIC', 757, 'sic', 'in such a way', '', '2_15', 1), ('SIDVS', 758, 'sidus', 'constellation', '', '2_15', 1), ('TELVM', 759, 'telum', 'spear, javelin', '', '2_15', 1), ('VEHO', 760, 'veho', 'to drive, carry', '', '2_15', 1)]\nsection_list ={'1.1': 'start', '1.2': '1.1', '1.3': '1.2', '1.4': '1.3', '1.5': '1.4', '1.6': '1.5', '1.7': '1.6', '1.8': '1.7', '1.9': '1.8', '1.10': '1.9', '1.11': '1.10', '1.12': '1.11', '1.13': '1.12', '1.14': '1.13', '1.15': '1.14', '1.16': '1.15', '1.17': '1.16', '1.18': '1.17', '1.19': '1.18', '1.20': '1.19', '1.21': '1.20', '2.1': '1.21', '2.2': '2.1', '2.3': '2.2', '2.4': '2.3', '2.5': '2.4', '2.6': '2.5', '2.7': '2.6', '2.8': '2.7', '2.9': '2.8', '2.10': '2.9', '2.11': '2.10', '2.12': '2.11', '2.13': '2.12', '2.14': '2.13', '2.15': '2.14', 'end': '2.15', 'start': 'start'}\ntitle = \"Latin for the New Millennium Vols 1 and 2 (Tunberg-Minkova)\"\nsection_level = 2\nlanguage = \"Latin\"\nbook = text.Text(title, section_words, the_text, section_list, section_level, language, True, False)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
"""TODO
"""
import logging
import numpy
import evo.gp.support
import evo.sr
import evo.utils.stats
class RegressionFitness(evo.Fitness):
LOG = logging.getLogger(__name__ + '.RegressionFitness')
def __init__(self, train_inputs, train_output, error_fitness,
handled_errors, stats: evo.utils.stats.Stats=None,
store_bsfs: bool=True,
fitness_measure: evo.sr.ErrorMeasure=evo.sr.ErrorMeasure.R2):
super().__init__(store_bsfs)
self.train_inputs = train_inputs
self.train_output = numpy.array(train_output, copy=False)
self.ssw = numpy.sum(
(self.train_output - self.train_output.mean()) ** 2)
self.error_fitness = error_fitness
self.errors = tuple([evo.UnevaluableError] + handled_errors)
self.stats = stats
self.fitness_measure = fitness_measure
def evaluate_individual(self, individual: evo.gp.support.ForestIndividual,
context=None):
assert individual.genes_num == 1
RegressionFitness.LOG.debug(
'Evaluating individual %s in context %s', individual.__str__(),
str(context))
try:
output = self.get_eval(individual, self.train_inputs)
fitness = self.get_error(output, individual)
individual.set_fitness(fitness)
except self.errors as _:
RegressionFitness.LOG.debug(
'Exception occurred during evaluation, assigning fitness %f',
self.error_fitness, exc_info=True)
fitness = self.error_fitness
individual.set_fitness(fitness)
return individual.get_fitness()
def compare(self, i1: evo.gp.support.ForestIndividual,
i2: evo.gp.support.ForestIndividual, context=None):
f1 = i1.get_fitness()
f2 = i2.get_fitness()
if f1 is None and f2 is not None:
raise ValueError('First individual has no fitness.')
if f1 is not None and f2 is None:
raise ValueError('Second individual has no fitness.')
if f1 is None and f2 is None:
raise ValueError('Neither individual has fitness.')
return self.fitness_cmp(f1, f2)
def get_eval(self, individual: evo.gp.support.ForestIndividual,
args):
return individual.genotype[0].eval(args=args)
def get_error(self, output, individual: evo.gp.support.ForestIndividual):
e = self.train_output - output
ae = numpy.abs(e)
sse = e.dot(e)
r2 = 1 - sse / self.ssw
mse = sse / numpy.alen(e)
mae = numpy.sum(ae) / numpy.alen(e)
worst_case_ae = ae.max()
individual.set_data('R2', r2)
individual.set_data('MSE', mse)
individual.set_data('MAE', mae)
individual.set_data('WORST_CASE_AE', worst_case_ae)
if self.fitness_measure is evo.sr.ErrorMeasure.R2:
return r2
if self.fitness_measure is evo.sr.ErrorMeasure.MSE:
return mse
if self.fitness_measure is evo.sr.ErrorMeasure.MAE:
return mae
if self.fitness_measure is evo.sr.ErrorMeasure.WORST_CASE_AE:
return worst_case_ae
raise ValueError('Invalid value of fitness_measure.')
def fitness_cmp(self, f1, f2):
if self.fitness_measure is evo.sr.ErrorMeasure.R2:
if f1 > f2:
return -1
if f1 < f2:
return 1
else:
if f1 < f2:
return -1
if f1 > f2:
return 1
return 0
def full_model_str(individual: evo.gp.support.ForestIndividual,
**kwargs) -> str:
newline_genes = kwargs.get('newline_genes', False)
strs = []
for g in individual.genotype:
strs.append('{}'.format(g.infix(**kwargs)))
if newline_genes:
return '\n+ '.join(strs)
else:
return ' + '.join(strs)
|
normal
|
{
"blob_id": "e53d4bb853eb54e4dfedf7126480e2c3e1af1378",
"index": 2825,
"step-1": "<mask token>\n\n\nclass RegressionFitness(evo.Fitness):\n <mask token>\n\n def __init__(self, train_inputs, train_output, error_fitness,\n handled_errors, stats: evo.utils.stats.Stats=None, store_bsfs: bool\n =True, fitness_measure: evo.sr.ErrorMeasure=evo.sr.ErrorMeasure.R2):\n super().__init__(store_bsfs)\n self.train_inputs = train_inputs\n self.train_output = numpy.array(train_output, copy=False)\n self.ssw = numpy.sum((self.train_output - self.train_output.mean()) **\n 2)\n self.error_fitness = error_fitness\n self.errors = tuple([evo.UnevaluableError] + handled_errors)\n self.stats = stats\n self.fitness_measure = fitness_measure\n\n def evaluate_individual(self, individual: evo.gp.support.\n ForestIndividual, context=None):\n assert individual.genes_num == 1\n RegressionFitness.LOG.debug('Evaluating individual %s in context %s',\n individual.__str__(), str(context))\n try:\n output = self.get_eval(individual, self.train_inputs)\n fitness = self.get_error(output, individual)\n individual.set_fitness(fitness)\n except self.errors as _:\n RegressionFitness.LOG.debug(\n 'Exception occurred during evaluation, assigning fitness %f',\n self.error_fitness, exc_info=True)\n fitness = self.error_fitness\n individual.set_fitness(fitness)\n return individual.get_fitness()\n <mask token>\n\n def get_eval(self, individual: evo.gp.support.ForestIndividual, args):\n return individual.genotype[0].eval(args=args)\n\n def get_error(self, output, individual: evo.gp.support.ForestIndividual):\n e = self.train_output - output\n ae = numpy.abs(e)\n sse = e.dot(e)\n r2 = 1 - sse / self.ssw\n mse = sse / numpy.alen(e)\n mae = numpy.sum(ae) / numpy.alen(e)\n worst_case_ae = ae.max()\n individual.set_data('R2', r2)\n individual.set_data('MSE', mse)\n individual.set_data('MAE', mae)\n individual.set_data('WORST_CASE_AE', worst_case_ae)\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n return r2\n if self.fitness_measure is evo.sr.ErrorMeasure.MSE:\n return mse\n if self.fitness_measure is evo.sr.ErrorMeasure.MAE:\n return mae\n if self.fitness_measure is evo.sr.ErrorMeasure.WORST_CASE_AE:\n return worst_case_ae\n raise ValueError('Invalid value of fitness_measure.')\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass RegressionFitness(evo.Fitness):\n <mask token>\n\n def __init__(self, train_inputs, train_output, error_fitness,\n handled_errors, stats: evo.utils.stats.Stats=None, store_bsfs: bool\n =True, fitness_measure: evo.sr.ErrorMeasure=evo.sr.ErrorMeasure.R2):\n super().__init__(store_bsfs)\n self.train_inputs = train_inputs\n self.train_output = numpy.array(train_output, copy=False)\n self.ssw = numpy.sum((self.train_output - self.train_output.mean()) **\n 2)\n self.error_fitness = error_fitness\n self.errors = tuple([evo.UnevaluableError] + handled_errors)\n self.stats = stats\n self.fitness_measure = fitness_measure\n\n def evaluate_individual(self, individual: evo.gp.support.\n ForestIndividual, context=None):\n assert individual.genes_num == 1\n RegressionFitness.LOG.debug('Evaluating individual %s in context %s',\n individual.__str__(), str(context))\n try:\n output = self.get_eval(individual, self.train_inputs)\n fitness = self.get_error(output, individual)\n individual.set_fitness(fitness)\n except self.errors as _:\n RegressionFitness.LOG.debug(\n 'Exception occurred during evaluation, assigning fitness %f',\n self.error_fitness, exc_info=True)\n fitness = self.error_fitness\n individual.set_fitness(fitness)\n return individual.get_fitness()\n\n def compare(self, i1: evo.gp.support.ForestIndividual, i2: evo.gp.\n support.ForestIndividual, context=None):\n f1 = i1.get_fitness()\n f2 = i2.get_fitness()\n if f1 is None and f2 is not None:\n raise ValueError('First individual has no fitness.')\n if f1 is not None and f2 is None:\n raise ValueError('Second individual has no fitness.')\n if f1 is None and f2 is None:\n raise ValueError('Neither individual has fitness.')\n return self.fitness_cmp(f1, f2)\n\n def get_eval(self, individual: evo.gp.support.ForestIndividual, args):\n return individual.genotype[0].eval(args=args)\n\n def get_error(self, output, individual: evo.gp.support.ForestIndividual):\n e = self.train_output - output\n ae = numpy.abs(e)\n sse = e.dot(e)\n r2 = 1 - sse / self.ssw\n mse = sse / numpy.alen(e)\n mae = numpy.sum(ae) / numpy.alen(e)\n worst_case_ae = ae.max()\n individual.set_data('R2', r2)\n individual.set_data('MSE', mse)\n individual.set_data('MAE', mae)\n individual.set_data('WORST_CASE_AE', worst_case_ae)\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n return r2\n if self.fitness_measure is evo.sr.ErrorMeasure.MSE:\n return mse\n if self.fitness_measure is evo.sr.ErrorMeasure.MAE:\n return mae\n if self.fitness_measure is evo.sr.ErrorMeasure.WORST_CASE_AE:\n return worst_case_ae\n raise ValueError('Invalid value of fitness_measure.')\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass RegressionFitness(evo.Fitness):\n <mask token>\n\n def __init__(self, train_inputs, train_output, error_fitness,\n handled_errors, stats: evo.utils.stats.Stats=None, store_bsfs: bool\n =True, fitness_measure: evo.sr.ErrorMeasure=evo.sr.ErrorMeasure.R2):\n super().__init__(store_bsfs)\n self.train_inputs = train_inputs\n self.train_output = numpy.array(train_output, copy=False)\n self.ssw = numpy.sum((self.train_output - self.train_output.mean()) **\n 2)\n self.error_fitness = error_fitness\n self.errors = tuple([evo.UnevaluableError] + handled_errors)\n self.stats = stats\n self.fitness_measure = fitness_measure\n\n def evaluate_individual(self, individual: evo.gp.support.\n ForestIndividual, context=None):\n assert individual.genes_num == 1\n RegressionFitness.LOG.debug('Evaluating individual %s in context %s',\n individual.__str__(), str(context))\n try:\n output = self.get_eval(individual, self.train_inputs)\n fitness = self.get_error(output, individual)\n individual.set_fitness(fitness)\n except self.errors as _:\n RegressionFitness.LOG.debug(\n 'Exception occurred during evaluation, assigning fitness %f',\n self.error_fitness, exc_info=True)\n fitness = self.error_fitness\n individual.set_fitness(fitness)\n return individual.get_fitness()\n\n def compare(self, i1: evo.gp.support.ForestIndividual, i2: evo.gp.\n support.ForestIndividual, context=None):\n f1 = i1.get_fitness()\n f2 = i2.get_fitness()\n if f1 is None and f2 is not None:\n raise ValueError('First individual has no fitness.')\n if f1 is not None and f2 is None:\n raise ValueError('Second individual has no fitness.')\n if f1 is None and f2 is None:\n raise ValueError('Neither individual has fitness.')\n return self.fitness_cmp(f1, f2)\n\n def get_eval(self, individual: evo.gp.support.ForestIndividual, args):\n return individual.genotype[0].eval(args=args)\n\n def get_error(self, output, individual: evo.gp.support.ForestIndividual):\n e = self.train_output - output\n ae = numpy.abs(e)\n sse = e.dot(e)\n r2 = 1 - sse / self.ssw\n mse = sse / numpy.alen(e)\n mae = numpy.sum(ae) / numpy.alen(e)\n worst_case_ae = ae.max()\n individual.set_data('R2', r2)\n individual.set_data('MSE', mse)\n individual.set_data('MAE', mae)\n individual.set_data('WORST_CASE_AE', worst_case_ae)\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n return r2\n if self.fitness_measure is evo.sr.ErrorMeasure.MSE:\n return mse\n if self.fitness_measure is evo.sr.ErrorMeasure.MAE:\n return mae\n if self.fitness_measure is evo.sr.ErrorMeasure.WORST_CASE_AE:\n return worst_case_ae\n raise ValueError('Invalid value of fitness_measure.')\n\n def fitness_cmp(self, f1, f2):\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n if f1 > f2:\n return -1\n if f1 < f2:\n return 1\n else:\n if f1 < f2:\n return -1\n if f1 > f2:\n return 1\n return 0\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass RegressionFitness(evo.Fitness):\n LOG = logging.getLogger(__name__ + '.RegressionFitness')\n\n def __init__(self, train_inputs, train_output, error_fitness,\n handled_errors, stats: evo.utils.stats.Stats=None, store_bsfs: bool\n =True, fitness_measure: evo.sr.ErrorMeasure=evo.sr.ErrorMeasure.R2):\n super().__init__(store_bsfs)\n self.train_inputs = train_inputs\n self.train_output = numpy.array(train_output, copy=False)\n self.ssw = numpy.sum((self.train_output - self.train_output.mean()) **\n 2)\n self.error_fitness = error_fitness\n self.errors = tuple([evo.UnevaluableError] + handled_errors)\n self.stats = stats\n self.fitness_measure = fitness_measure\n\n def evaluate_individual(self, individual: evo.gp.support.\n ForestIndividual, context=None):\n assert individual.genes_num == 1\n RegressionFitness.LOG.debug('Evaluating individual %s in context %s',\n individual.__str__(), str(context))\n try:\n output = self.get_eval(individual, self.train_inputs)\n fitness = self.get_error(output, individual)\n individual.set_fitness(fitness)\n except self.errors as _:\n RegressionFitness.LOG.debug(\n 'Exception occurred during evaluation, assigning fitness %f',\n self.error_fitness, exc_info=True)\n fitness = self.error_fitness\n individual.set_fitness(fitness)\n return individual.get_fitness()\n\n def compare(self, i1: evo.gp.support.ForestIndividual, i2: evo.gp.\n support.ForestIndividual, context=None):\n f1 = i1.get_fitness()\n f2 = i2.get_fitness()\n if f1 is None and f2 is not None:\n raise ValueError('First individual has no fitness.')\n if f1 is not None and f2 is None:\n raise ValueError('Second individual has no fitness.')\n if f1 is None and f2 is None:\n raise ValueError('Neither individual has fitness.')\n return self.fitness_cmp(f1, f2)\n\n def get_eval(self, individual: evo.gp.support.ForestIndividual, args):\n return individual.genotype[0].eval(args=args)\n\n def get_error(self, output, individual: evo.gp.support.ForestIndividual):\n e = self.train_output - output\n ae = numpy.abs(e)\n sse = e.dot(e)\n r2 = 1 - sse / self.ssw\n mse = sse / numpy.alen(e)\n mae = numpy.sum(ae) / numpy.alen(e)\n worst_case_ae = ae.max()\n individual.set_data('R2', r2)\n individual.set_data('MSE', mse)\n individual.set_data('MAE', mae)\n individual.set_data('WORST_CASE_AE', worst_case_ae)\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n return r2\n if self.fitness_measure is evo.sr.ErrorMeasure.MSE:\n return mse\n if self.fitness_measure is evo.sr.ErrorMeasure.MAE:\n return mae\n if self.fitness_measure is evo.sr.ErrorMeasure.WORST_CASE_AE:\n return worst_case_ae\n raise ValueError('Invalid value of fitness_measure.')\n\n def fitness_cmp(self, f1, f2):\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n if f1 > f2:\n return -1\n if f1 < f2:\n return 1\n else:\n if f1 < f2:\n return -1\n if f1 > f2:\n return 1\n return 0\n\n\ndef full_model_str(individual: evo.gp.support.ForestIndividual, **kwargs\n ) ->str:\n newline_genes = kwargs.get('newline_genes', False)\n strs = []\n for g in individual.genotype:\n strs.append('{}'.format(g.infix(**kwargs)))\n if newline_genes:\n return '\\n+ '.join(strs)\n else:\n return ' + '.join(strs)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"TODO\n\"\"\"\n\nimport logging\n\nimport numpy\n\nimport evo.gp.support\nimport evo.sr\nimport evo.utils.stats\n\n\nclass RegressionFitness(evo.Fitness):\n LOG = logging.getLogger(__name__ + '.RegressionFitness')\n\n def __init__(self, train_inputs, train_output, error_fitness,\n handled_errors, stats: evo.utils.stats.Stats=None,\n store_bsfs: bool=True,\n fitness_measure: evo.sr.ErrorMeasure=evo.sr.ErrorMeasure.R2):\n super().__init__(store_bsfs)\n self.train_inputs = train_inputs\n self.train_output = numpy.array(train_output, copy=False)\n self.ssw = numpy.sum(\n (self.train_output - self.train_output.mean()) ** 2)\n self.error_fitness = error_fitness\n self.errors = tuple([evo.UnevaluableError] + handled_errors)\n self.stats = stats\n self.fitness_measure = fitness_measure\n\n def evaluate_individual(self, individual: evo.gp.support.ForestIndividual,\n context=None):\n assert individual.genes_num == 1\n RegressionFitness.LOG.debug(\n 'Evaluating individual %s in context %s', individual.__str__(),\n str(context))\n\n try:\n output = self.get_eval(individual, self.train_inputs)\n fitness = self.get_error(output, individual)\n individual.set_fitness(fitness)\n except self.errors as _:\n RegressionFitness.LOG.debug(\n 'Exception occurred during evaluation, assigning fitness %f',\n self.error_fitness, exc_info=True)\n fitness = self.error_fitness\n individual.set_fitness(fitness)\n return individual.get_fitness()\n\n def compare(self, i1: evo.gp.support.ForestIndividual,\n i2: evo.gp.support.ForestIndividual, context=None):\n f1 = i1.get_fitness()\n f2 = i2.get_fitness()\n if f1 is None and f2 is not None:\n raise ValueError('First individual has no fitness.')\n if f1 is not None and f2 is None:\n raise ValueError('Second individual has no fitness.')\n if f1 is None and f2 is None:\n raise ValueError('Neither individual has fitness.')\n\n return self.fitness_cmp(f1, f2)\n\n def get_eval(self, individual: evo.gp.support.ForestIndividual,\n args):\n return individual.genotype[0].eval(args=args)\n\n def get_error(self, output, individual: evo.gp.support.ForestIndividual):\n e = self.train_output - output\n ae = numpy.abs(e)\n sse = e.dot(e)\n r2 = 1 - sse / self.ssw\n mse = sse / numpy.alen(e)\n mae = numpy.sum(ae) / numpy.alen(e)\n worst_case_ae = ae.max()\n individual.set_data('R2', r2)\n individual.set_data('MSE', mse)\n individual.set_data('MAE', mae)\n individual.set_data('WORST_CASE_AE', worst_case_ae)\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n return r2\n if self.fitness_measure is evo.sr.ErrorMeasure.MSE:\n return mse\n if self.fitness_measure is evo.sr.ErrorMeasure.MAE:\n return mae\n if self.fitness_measure is evo.sr.ErrorMeasure.WORST_CASE_AE:\n return worst_case_ae\n raise ValueError('Invalid value of fitness_measure.')\n\n def fitness_cmp(self, f1, f2):\n if self.fitness_measure is evo.sr.ErrorMeasure.R2:\n if f1 > f2:\n return -1\n if f1 < f2:\n return 1\n else:\n if f1 < f2:\n return -1\n if f1 > f2:\n return 1\n return 0\n\n\ndef full_model_str(individual: evo.gp.support.ForestIndividual,\n **kwargs) -> str:\n newline_genes = kwargs.get('newline_genes', False)\n strs = []\n for g in individual.genotype:\n strs.append('{}'.format(g.infix(**kwargs)))\n if newline_genes:\n return '\\n+ '.join(strs)\n else:\n return ' + '.join(strs)\n",
"step-ids": [
5,
6,
7,
9,
11
]
}
|
[
5,
6,
7,
9,
11
] |
import time
import json
from threading import Thread
try:
with open('file.json') as f:
name = json.load(f)
except:
f = open("file.json", "w+")
name = {}
def create(k, v, t='0'):
if k in name:
print("ERROR:The data already exists")
else:
if k.isalpha():
if v.isnumeric() and t.isnumeric():
v = int(v)
t = int(t)
if len(name) < (1024 * 1020 * 1024) and v <= (16 * 1024 * 1024):
if t == 0:
p = [v, t]
else:
p = [v, time.time() + t]
if len(k) <= 32:
name[k] = p
print("Key is created")
with open('file.json', 'w') as json_file:
json.dump(name, json_file)
else:
print("ERROR:Key length Exceeded")
else:
print("ERROR:MEMORY Exceeded!!!")
else:
print("ERROR:INVALID INPUT (NUMERIC ONLY)")
else:
print("ERROR:INVALID KEY INPUT (ALPHABETS ONLY)")
def read(k):
if k not in name:
print("ERROR:Key does not exists Enter a valid key!!")
else:
m = name[k]
if m[1] != 0:
if time.time() < m[1]:
print ( k + "-" + str(m[0]))
else:
print("ERROR: " + k + " Time expired")
else:
print(k + "-" + str(m[0]))
with open('file.json', 'w') as js:
json.dump(name, js)
def delete(k):
if k not in name:
print("ERROR:Key does not exists Enter a valid key!!")
else:
m = name[k]
if m[1] != 0:
if time.time() < m[1]:
del name[k]
print("Key (" + k + ") is deleted")
with open('file.json', 'w') as js:
json.dump(name, js)
else:
print("ERROR:ERROR: " + k + " Time expired")
else:
del name[k]
print("Key (" + k + ") is deleted")
with open('file.json', 'w') as js:
json.dump(name, js)
|
normal
|
{
"blob_id": "430dff54da986df4e3a68018d930735c757d49d0",
"index": 6794,
"step-1": "<mask token>\n\n\ndef create(k, v, t='0'):\n if k in name:\n print('ERROR:The data already exists')\n elif k.isalpha():\n if v.isnumeric() and t.isnumeric():\n v = int(v)\n t = int(t)\n if len(name) < 1024 * 1020 * 1024 and v <= 16 * 1024 * 1024:\n if t == 0:\n p = [v, t]\n else:\n p = [v, time.time() + t]\n if len(k) <= 32:\n name[k] = p\n print('Key is created')\n with open('file.json', 'w') as json_file:\n json.dump(name, json_file)\n else:\n print('ERROR:Key length Exceeded')\n else:\n print('ERROR:MEMORY Exceeded!!!')\n else:\n print('ERROR:INVALID INPUT (NUMERIC ONLY)')\n else:\n print('ERROR:INVALID KEY INPUT (ALPHABETS ONLY)')\n\n\ndef read(k):\n if k not in name:\n print('ERROR:Key does not exists Enter a valid key!!')\n else:\n m = name[k]\n if m[1] != 0:\n if time.time() < m[1]:\n print(k + '-' + str(m[0]))\n else:\n print('ERROR: ' + k + ' Time expired')\n else:\n print(k + '-' + str(m[0]))\n with open('file.json', 'w') as js:\n json.dump(name, js)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create(k, v, t='0'):\n if k in name:\n print('ERROR:The data already exists')\n elif k.isalpha():\n if v.isnumeric() and t.isnumeric():\n v = int(v)\n t = int(t)\n if len(name) < 1024 * 1020 * 1024 and v <= 16 * 1024 * 1024:\n if t == 0:\n p = [v, t]\n else:\n p = [v, time.time() + t]\n if len(k) <= 32:\n name[k] = p\n print('Key is created')\n with open('file.json', 'w') as json_file:\n json.dump(name, json_file)\n else:\n print('ERROR:Key length Exceeded')\n else:\n print('ERROR:MEMORY Exceeded!!!')\n else:\n print('ERROR:INVALID INPUT (NUMERIC ONLY)')\n else:\n print('ERROR:INVALID KEY INPUT (ALPHABETS ONLY)')\n\n\ndef read(k):\n if k not in name:\n print('ERROR:Key does not exists Enter a valid key!!')\n else:\n m = name[k]\n if m[1] != 0:\n if time.time() < m[1]:\n print(k + '-' + str(m[0]))\n else:\n print('ERROR: ' + k + ' Time expired')\n else:\n print(k + '-' + str(m[0]))\n with open('file.json', 'w') as js:\n json.dump(name, js)\n\n\ndef delete(k):\n if k not in name:\n print('ERROR:Key does not exists Enter a valid key!!')\n else:\n m = name[k]\n if m[1] != 0:\n if time.time() < m[1]:\n del name[k]\n print('Key (' + k + ') is deleted')\n with open('file.json', 'w') as js:\n json.dump(name, js)\n else:\n print('ERROR:ERROR: ' + k + ' Time expired')\n else:\n del name[k]\n print('Key (' + k + ') is deleted')\n with open('file.json', 'w') as js:\n json.dump(name, js)\n",
"step-3": "<mask token>\ntry:\n with open('file.json') as f:\n name = json.load(f)\nexcept:\n f = open('file.json', 'w+')\n name = {}\n\n\ndef create(k, v, t='0'):\n if k in name:\n print('ERROR:The data already exists')\n elif k.isalpha():\n if v.isnumeric() and t.isnumeric():\n v = int(v)\n t = int(t)\n if len(name) < 1024 * 1020 * 1024 and v <= 16 * 1024 * 1024:\n if t == 0:\n p = [v, t]\n else:\n p = [v, time.time() + t]\n if len(k) <= 32:\n name[k] = p\n print('Key is created')\n with open('file.json', 'w') as json_file:\n json.dump(name, json_file)\n else:\n print('ERROR:Key length Exceeded')\n else:\n print('ERROR:MEMORY Exceeded!!!')\n else:\n print('ERROR:INVALID INPUT (NUMERIC ONLY)')\n else:\n print('ERROR:INVALID KEY INPUT (ALPHABETS ONLY)')\n\n\ndef read(k):\n if k not in name:\n print('ERROR:Key does not exists Enter a valid key!!')\n else:\n m = name[k]\n if m[1] != 0:\n if time.time() < m[1]:\n print(k + '-' + str(m[0]))\n else:\n print('ERROR: ' + k + ' Time expired')\n else:\n print(k + '-' + str(m[0]))\n with open('file.json', 'w') as js:\n json.dump(name, js)\n\n\ndef delete(k):\n if k not in name:\n print('ERROR:Key does not exists Enter a valid key!!')\n else:\n m = name[k]\n if m[1] != 0:\n if time.time() < m[1]:\n del name[k]\n print('Key (' + k + ') is deleted')\n with open('file.json', 'w') as js:\n json.dump(name, js)\n else:\n print('ERROR:ERROR: ' + k + ' Time expired')\n else:\n del name[k]\n print('Key (' + k + ') is deleted')\n with open('file.json', 'w') as js:\n json.dump(name, js)\n",
"step-4": "import time\nimport json\nfrom threading import Thread\ntry:\n with open('file.json') as f:\n name = json.load(f)\nexcept:\n f = open('file.json', 'w+')\n name = {}\n\n\ndef create(k, v, t='0'):\n if k in name:\n print('ERROR:The data already exists')\n elif k.isalpha():\n if v.isnumeric() and t.isnumeric():\n v = int(v)\n t = int(t)\n if len(name) < 1024 * 1020 * 1024 and v <= 16 * 1024 * 1024:\n if t == 0:\n p = [v, t]\n else:\n p = [v, time.time() + t]\n if len(k) <= 32:\n name[k] = p\n print('Key is created')\n with open('file.json', 'w') as json_file:\n json.dump(name, json_file)\n else:\n print('ERROR:Key length Exceeded')\n else:\n print('ERROR:MEMORY Exceeded!!!')\n else:\n print('ERROR:INVALID INPUT (NUMERIC ONLY)')\n else:\n print('ERROR:INVALID KEY INPUT (ALPHABETS ONLY)')\n\n\ndef read(k):\n if k not in name:\n print('ERROR:Key does not exists Enter a valid key!!')\n else:\n m = name[k]\n if m[1] != 0:\n if time.time() < m[1]:\n print(k + '-' + str(m[0]))\n else:\n print('ERROR: ' + k + ' Time expired')\n else:\n print(k + '-' + str(m[0]))\n with open('file.json', 'w') as js:\n json.dump(name, js)\n\n\ndef delete(k):\n if k not in name:\n print('ERROR:Key does not exists Enter a valid key!!')\n else:\n m = name[k]\n if m[1] != 0:\n if time.time() < m[1]:\n del name[k]\n print('Key (' + k + ') is deleted')\n with open('file.json', 'w') as js:\n json.dump(name, js)\n else:\n print('ERROR:ERROR: ' + k + ' Time expired')\n else:\n del name[k]\n print('Key (' + k + ') is deleted')\n with open('file.json', 'w') as js:\n json.dump(name, js)\n",
"step-5": "import time\r\nimport json\r\nfrom threading import Thread\r\n\r\n\r\ntry:\r\n with open('file.json') as f:\r\n name = json.load(f)\r\nexcept:\r\n f = open(\"file.json\", \"w+\")\r\n name = {}\r\n\r\n\r\ndef create(k, v, t='0'):\r\n if k in name:\r\n print(\"ERROR:The data already exists\")\r\n else:\r\n if k.isalpha():\r\n if v.isnumeric() and t.isnumeric():\r\n v = int(v)\r\n t = int(t)\r\n if len(name) < (1024 * 1020 * 1024) and v <= (16 * 1024 * 1024):\r\n if t == 0:\r\n p = [v, t]\r\n else:\r\n p = [v, time.time() + t]\r\n if len(k) <= 32:\r\n name[k] = p\r\n print(\"Key is created\")\r\n with open('file.json', 'w') as json_file:\r\n json.dump(name, json_file)\r\n else:\r\n print(\"ERROR:Key length Exceeded\")\r\n else:\r\n print(\"ERROR:MEMORY Exceeded!!!\")\r\n else:\r\n print(\"ERROR:INVALID INPUT (NUMERIC ONLY)\")\r\n else:\r\n print(\"ERROR:INVALID KEY INPUT (ALPHABETS ONLY)\")\r\n\r\n\r\ndef read(k):\r\n if k not in name:\r\n print(\"ERROR:Key does not exists Enter a valid key!!\")\r\n else:\r\n m = name[k]\r\n if m[1] != 0:\r\n if time.time() < m[1]:\r\n print ( k + \"-\" + str(m[0]))\r\n else:\r\n print(\"ERROR: \" + k + \" Time expired\")\r\n else:\r\n print(k + \"-\" + str(m[0]))\r\n with open('file.json', 'w') as js:\r\n json.dump(name, js)\r\n\r\n\r\ndef delete(k):\r\n if k not in name:\r\n print(\"ERROR:Key does not exists Enter a valid key!!\")\r\n else:\r\n m = name[k]\r\n if m[1] != 0:\r\n if time.time() < m[1]:\r\n del name[k]\r\n print(\"Key (\" + k + \") is deleted\")\r\n with open('file.json', 'w') as js:\r\n json.dump(name, js)\r\n else:\r\n print(\"ERROR:ERROR: \" + k + \" Time expired\")\r\n else:\r\n del name[k]\r\n print(\"Key (\" + k + \") is deleted\")\r\n with open('file.json', 'w') as js:\r\n json.dump(name, js)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.web_perf.metrics import timeline_based_metric
from telemetry.web_perf.metrics.trace_event_stats import TraceEventStats
from telemetry.web_perf.metrics.trace_event_stats import TraceEventStatsInput
class IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):
"""Metrics for IndexedDB operations.
"""
def __init__(self):
super(IndexedDBTimelineMetric, self).__init__()
self._stats = TraceEventStats()
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBDatabase::GetOperation',
metric_name='idb-gets',
metric_description='The duration of all "get" ops in IndexedDB',
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBDatabase::PutOperation',
metric_name='idb-puts',
metric_description='The duration of all "put" ops in IndexedDB',
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBFactoryImpl::Open',
metric_name='idb-opens',
metric_description='The duration of all "open" ops in IndexedDB',
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBTransaction::Commit',
metric_name='idb-transaction-commits',
metric_description=('The duration of all "commit" ops of ' +
'transactions in IndexedDB.'),
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBFactoryImpl::DeleteDatabase',
metric_name='idb-database-deletes',
metric_description=('The duration of all "delete" ops of ' +
'IndexedDB databases.'),
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBDatabase::OpenCursorOperation',
metric_name='idb-cursor-opens',
metric_description=('The duration of all "open" ops of ' +
'IndexedDB cursors.'),
units='ms',
process_name='Browser'))
self._stats.AddInput(TraceEventStatsInput(
event_category='IndexedDB',
event_name='IndexedDBCursor::CursorIterationOperation',
metric_name='idb-cursor-iterations',
metric_description=('The duration of all "iteration" ops of ' +
'IndexedDB cursors.'),
units='ms',
process_name='Browser'))
def AddResults(self, model, renderer_process, interactions, results):
self._stats.AddResults(model, renderer_process, interactions, results)
|
normal
|
{
"blob_id": "47f88bc3836490e08f464f71351096b54118420e",
"index": 5297,
"step-1": "<mask token>\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n <mask token>\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets', metric_description=\n 'The duration of all \"get\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts', metric_description=\n 'The duration of all \"put\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens', metric_description=\n 'The duration of all \"open\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits', metric_description=\n 'The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.', units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes', metric_description=\n 'The duration of all \"delete\" ops of ' + 'IndexedDB databases.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBDatabase::OpenCursorOperation', metric_name=\n 'idb-cursor-opens', metric_description=\n 'The duration of all \"open\" ops of ' + 'IndexedDB cursors.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBCursor::CursorIterationOperation', metric_name=\n 'idb-cursor-iterations', metric_description=\n 'The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.', units='ms', process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-3": "<mask token>\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n \"\"\"Metrics for IndexedDB operations.\n \"\"\"\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets', metric_description=\n 'The duration of all \"get\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts', metric_description=\n 'The duration of all \"put\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens', metric_description=\n 'The duration of all \"open\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits', metric_description=\n 'The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.', units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes', metric_description=\n 'The duration of all \"delete\" ops of ' + 'IndexedDB databases.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBDatabase::OpenCursorOperation', metric_name=\n 'idb-cursor-opens', metric_description=\n 'The duration of all \"open\" ops of ' + 'IndexedDB cursors.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBCursor::CursorIterationOperation', metric_name=\n 'idb-cursor-iterations', metric_description=\n 'The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.', units='ms', process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-4": "from telemetry.web_perf.metrics import timeline_based_metric\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStats\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStatsInput\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n \"\"\"Metrics for IndexedDB operations.\n \"\"\"\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets', metric_description=\n 'The duration of all \"get\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts', metric_description=\n 'The duration of all \"put\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens', metric_description=\n 'The duration of all \"open\" ops in IndexedDB', units='ms',\n process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits', metric_description=\n 'The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.', units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes', metric_description=\n 'The duration of all \"delete\" ops of ' + 'IndexedDB databases.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBDatabase::OpenCursorOperation', metric_name=\n 'idb-cursor-opens', metric_description=\n 'The duration of all \"open\" ops of ' + 'IndexedDB cursors.',\n units='ms', process_name='Browser'))\n self._stats.AddInput(TraceEventStatsInput(event_category=\n 'IndexedDB', event_name=\n 'IndexedDBCursor::CursorIterationOperation', metric_name=\n 'idb-cursor-iterations', metric_description=\n 'The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.', units='ms', process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-5": "# Copyright 2015 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\n\nfrom telemetry.web_perf.metrics import timeline_based_metric\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStats\nfrom telemetry.web_perf.metrics.trace_event_stats import TraceEventStatsInput\n\n\nclass IndexedDBTimelineMetric(timeline_based_metric.TimelineBasedMetric):\n \"\"\"Metrics for IndexedDB operations.\n \"\"\"\n\n def __init__(self):\n super(IndexedDBTimelineMetric, self).__init__()\n self._stats = TraceEventStats()\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBDatabase::GetOperation',\n metric_name='idb-gets',\n metric_description='The duration of all \"get\" ops in IndexedDB',\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBDatabase::PutOperation',\n metric_name='idb-puts',\n metric_description='The duration of all \"put\" ops in IndexedDB',\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBFactoryImpl::Open',\n metric_name='idb-opens',\n metric_description='The duration of all \"open\" ops in IndexedDB',\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBTransaction::Commit',\n metric_name='idb-transaction-commits',\n metric_description=('The duration of all \"commit\" ops of ' +\n 'transactions in IndexedDB.'),\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBFactoryImpl::DeleteDatabase',\n metric_name='idb-database-deletes',\n metric_description=('The duration of all \"delete\" ops of ' +\n 'IndexedDB databases.'),\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBDatabase::OpenCursorOperation',\n metric_name='idb-cursor-opens',\n metric_description=('The duration of all \"open\" ops of ' +\n 'IndexedDB cursors.'),\n units='ms',\n process_name='Browser'))\n\n self._stats.AddInput(TraceEventStatsInput(\n event_category='IndexedDB',\n event_name='IndexedDBCursor::CursorIterationOperation',\n metric_name='idb-cursor-iterations',\n metric_description=('The duration of all \"iteration\" ops of ' +\n 'IndexedDB cursors.'),\n units='ms',\n process_name='Browser'))\n\n def AddResults(self, model, renderer_process, interactions, results):\n self._stats.AddResults(model, renderer_process, interactions, results)\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
sentence = "Practice Problems to Drill List Comprehension in Your Head."
sentence = sentence.split()
sentence = [i.replace(".", "") for i in sentence]
[print(i) for i in sentence if len(i)<5]
|
normal
|
{
"blob_id": "c0e349be45cd964e8e398baaed64eae792189dd1",
"index": 5723,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n[print(i) for i in sentence if len(i) < 5]\n",
"step-3": "sentence = 'Practice Problems to Drill List Comprehension in Your Head.'\nsentence = sentence.split()\nsentence = [i.replace('.', '') for i in sentence]\n[print(i) for i in sentence if len(i) < 5]\n",
"step-4": "sentence = \"Practice Problems to Drill List Comprehension in Your Head.\"\nsentence = sentence.split()\nsentence = [i.replace(\".\", \"\") for i in sentence]\n[print(i) for i in sentence if len(i)<5]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 2.2.6 on 2020-06-18 14:16
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('gestionadmin', '0133_auto_20200618_1339'),
]
operations = [
migrations.RemoveField(
model_name='comprasenc',
name='empleado',
),
]
|
normal
|
{
"blob_id": "f96a7bef48e7df2899343029a2fae9697125a5b2",
"index": 5203,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('gestionadmin', '0133_auto_20200618_1339')]\n operations = [migrations.RemoveField(model_name='comprasenc', name=\n 'empleado')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('gestionadmin', '0133_auto_20200618_1339')]\n operations = [migrations.RemoveField(model_name='comprasenc', name=\n 'empleado')]\n",
"step-5": "# Generated by Django 2.2.6 on 2020-06-18 14:16\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('gestionadmin', '0133_auto_20200618_1339'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='comprasenc',\n name='empleado',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2015] Michał Szczygieł, M4GiK Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
from django import template
register = template.Library()
@register.inclusion_tag('tags/fieldsetForm.html')
def show_fieldsetform(form):
"""
Renders given form without marking required fields.
@param form:
@return:
"""
return {'form': form, 'required_fields': True}
@register.inclusion_tag('tags/fieldsetForm.html')
def show_fieldsetform_nrf(form):
"""
Renders given form with required fields marked.
@param form:
@return:
"""
return {'form': form, 'required_fields': False}
@register.inclusion_tag('tags/sendForm.html')
def show_sendform(form):
"""
Renders given form without marking required fields.
@param form:
@return:
"""
return {'form': form, 'required_fields': True}
@register.inclusion_tag('tags/loginForm.html')
def show_loginform(form):
"""
Renders given form without marking required fields.
@param form:
@return:
"""
return {'form': form, 'required_fields': True}
@register.inclusion_tag('tags/accountForm.html')
def show_accountform(form, is_superuser):
"""
Renders given form without marking required fields.
@param form:
@return:
"""
return {'form': form, 'required_fields': False, 'is_superuser': is_superuser}
|
normal
|
{
"blob_id": "9f2105d188ac32a9eef31b21065e9bda13a02995",
"index": 6735,
"step-1": "<mask token>\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform_nrf(form):\n \"\"\"\n Renders given form with required fields marked.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False}\n\n\[email protected]_tag('tags/sendForm.html')\ndef show_sendform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\n<mask token>\n\n\[email protected]_tag('tags/accountForm.html')\ndef show_accountform(form, is_superuser):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False, 'is_superuser':\n is_superuser}\n",
"step-2": "<mask token>\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform_nrf(form):\n \"\"\"\n Renders given form with required fields marked.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False}\n\n\[email protected]_tag('tags/sendForm.html')\ndef show_sendform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/loginForm.html')\ndef show_loginform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/accountForm.html')\ndef show_accountform(form, is_superuser):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False, 'is_superuser':\n is_superuser}\n",
"step-3": "<mask token>\nregister = template.Library()\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform_nrf(form):\n \"\"\"\n Renders given form with required fields marked.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False}\n\n\[email protected]_tag('tags/sendForm.html')\ndef show_sendform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/loginForm.html')\ndef show_loginform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/accountForm.html')\ndef show_accountform(form, is_superuser):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False, 'is_superuser':\n is_superuser}\n",
"step-4": "from django import template\nregister = template.Library()\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform_nrf(form):\n \"\"\"\n Renders given form with required fields marked.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False}\n\n\[email protected]_tag('tags/sendForm.html')\ndef show_sendform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/loginForm.html')\ndef show_loginform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/accountForm.html')\ndef show_accountform(form, is_superuser):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False, 'is_superuser':\n is_superuser}\n",
"step-5": "# -*- coding: utf-8 -*-\n# @COPYRIGHT_begin\n#\n# Copyright [2015] Michał Szczygieł, M4GiK Software\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# @COPYRIGHT_end\nfrom django import template\n\n\nregister = template.Library()\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/fieldsetForm.html')\ndef show_fieldsetform_nrf(form):\n \"\"\"\n Renders given form with required fields marked.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False}\n\n\[email protected]_tag('tags/sendForm.html')\ndef show_sendform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/loginForm.html')\ndef show_loginform(form):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': True}\n\n\[email protected]_tag('tags/accountForm.html')\ndef show_accountform(form, is_superuser):\n \"\"\"\n Renders given form without marking required fields.\n @param form:\n @return:\n \"\"\"\n return {'form': form, 'required_fields': False, 'is_superuser': is_superuser}\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#!/usr/bin/env python3
from nmigen import *
from nmigen.build import *
from nmigen_boards.icebreaker import ICEBreakerPlatform
class SSDigitDecoder(Elaboratable):
def __init__(self):
self.i_num = Signal(4)
self.o_disp = Signal(7)
self.lut = {
0: 0b011_1111,
1: 0b000_0110,
2: 0b101_1011,
3: 0b100_1111,
4: 0b110_0110,
5: 0b110_1101,
6: 0b111_1101,
7: 0b000_0111,
8: 0b111_1111,
9: 0b110_0111,
}
def incr(self):
return self.i_num.eq(self.i_num+1)
def elaborate(self, platform):
m = Module()
with m.Switch(self.i_num):
for a, b in self.lut.items():
with m.Case(a):
m.d.comb += self.o_disp.eq(b)
return m
class Blinky(Elaboratable):
def __init__(self):
self.dd0 = SSDigitDecoder()
self.dd1 = SSDigitDecoder()
def elaborate(self, platform):
m = Module()
m.submodules.dd0 = self.dd0
m.submodules.dd1 = self.dd1
timer = Signal(20)
led = platform.request('led', 0)
btn = platform.request('button', 0)
btn1 = platform.request('button', 1)
dig_sel = platform.request('ss_dig_sel', 0)
disp = platform.request('ss_disp', 0)
# blinky led
m.d.sync += timer.eq(timer+1)
m.d.comb += led.o.eq(timer[-1] & ~btn)
# 7 seg
running = Signal(1)
"""
# naive btn
last_btn1 = Signal(1)
m.d.sync += last_btn1.eq(btn1.i)
with m.If(btn1.i & ~last_btn1):
m.d.sync += running.eq(~running)
"""
btn1_pipe1 = Signal(1)
btn1_pipe2 = Signal(1)
btn1_db = Signal(range(0, 0xffff))
m.d.sync += [
btn1_pipe1.eq(btn1.i),
btn1_pipe2.eq(btn1_pipe1),
]
with m.If(btn1_pipe2):
m.d.sync += btn1_db.eq(0xffff)
with m.Else():
with m.If(btn1_db > 0):
m.d.sync += btn1_db.eq(btn1_db-1)
with m.If(btn1_pipe2 & (btn1_db == 0)):
m.d.sync += running.eq(~running)
with m.If(running & (timer == 0)):
with m.If(self.dd0.i_num == 9):
m.d.sync += self.dd0.i_num.eq(0)
with m.If(self.dd1.i_num == 9):
m.d.sync += self.dd1.i_num.eq(0)
with m.Else():
m.d.sync += self.dd1.incr()
with m.Else():
m.d.sync += self.dd0.incr()
with m.If(timer[8]):
m.d.comb += [
dig_sel.o.eq(0),
disp.o.eq(self.dd1.o_disp),
]
with m.Else():
m.d.comb += [
dig_sel.o.eq(1),
disp.o.eq(self.dd0.o_disp),
]
return m
if __name__ == '__main__':
p = ICEBreakerPlatform()
p.add_resources(p.break_off_pmod)
p.add_resources([
Resource('ss_dig_sel', 0,
Pins('10', dir='o', conn=('pmod', 0)),
Attrs(IO_STANDARD='SB_LVCMOS')),
Resource('ss_disp', 0,
PinsN('1 2 3 4 7 8 9', dir='o', conn=('pmod', 0)),
Attrs(IO_STANDARD='SB_LVCMOS')),
])
for r in p.resources:
print('r:', r)
p.build(Blinky(), do_program=False)
|
normal
|
{
"blob_id": "74bb511a9ec272020693db65a2e708f3db56931e",
"index": 9954,
"step-1": "<mask token>\n\n\nclass SSDigitDecoder(Elaboratable):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Blinky(Elaboratable):\n\n def __init__(self):\n self.dd0 = SSDigitDecoder()\n self.dd1 = SSDigitDecoder()\n\n def elaborate(self, platform):\n m = Module()\n m.submodules.dd0 = self.dd0\n m.submodules.dd1 = self.dd1\n timer = Signal(20)\n led = platform.request('led', 0)\n btn = platform.request('button', 0)\n btn1 = platform.request('button', 1)\n dig_sel = platform.request('ss_dig_sel', 0)\n disp = platform.request('ss_disp', 0)\n m.d.sync += timer.eq(timer + 1)\n m.d.comb += led.o.eq(timer[-1] & ~btn)\n running = Signal(1)\n \"\"\"\n # naive btn\n last_btn1 = Signal(1)\n m.d.sync += last_btn1.eq(btn1.i)\n with m.If(btn1.i & ~last_btn1):\n m.d.sync += running.eq(~running)\n \"\"\"\n btn1_pipe1 = Signal(1)\n btn1_pipe2 = Signal(1)\n btn1_db = Signal(range(0, 65535))\n m.d.sync += [btn1_pipe1.eq(btn1.i), btn1_pipe2.eq(btn1_pipe1)]\n with m.If(btn1_pipe2):\n m.d.sync += btn1_db.eq(65535)\n with m.Else():\n with m.If(btn1_db > 0):\n m.d.sync += btn1_db.eq(btn1_db - 1)\n with m.If(btn1_pipe2 & (btn1_db == 0)):\n m.d.sync += running.eq(~running)\n with m.If(running & (timer == 0)):\n with m.If(self.dd0.i_num == 9):\n m.d.sync += self.dd0.i_num.eq(0)\n with m.If(self.dd1.i_num == 9):\n m.d.sync += self.dd1.i_num.eq(0)\n with m.Else():\n m.d.sync += self.dd1.incr()\n with m.Else():\n m.d.sync += self.dd0.incr()\n with m.If(timer[8]):\n m.d.comb += [dig_sel.o.eq(0), disp.o.eq(self.dd1.o_disp)]\n with m.Else():\n m.d.comb += [dig_sel.o.eq(1), disp.o.eq(self.dd0.o_disp)]\n return m\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SSDigitDecoder(Elaboratable):\n\n def __init__(self):\n self.i_num = Signal(4)\n self.o_disp = Signal(7)\n self.lut = {(0): 63, (1): 6, (2): 91, (3): 79, (4): 102, (5): 109,\n (6): 125, (7): 7, (8): 127, (9): 103}\n\n def incr(self):\n return self.i_num.eq(self.i_num + 1)\n <mask token>\n\n\nclass Blinky(Elaboratable):\n\n def __init__(self):\n self.dd0 = SSDigitDecoder()\n self.dd1 = SSDigitDecoder()\n\n def elaborate(self, platform):\n m = Module()\n m.submodules.dd0 = self.dd0\n m.submodules.dd1 = self.dd1\n timer = Signal(20)\n led = platform.request('led', 0)\n btn = platform.request('button', 0)\n btn1 = platform.request('button', 1)\n dig_sel = platform.request('ss_dig_sel', 0)\n disp = platform.request('ss_disp', 0)\n m.d.sync += timer.eq(timer + 1)\n m.d.comb += led.o.eq(timer[-1] & ~btn)\n running = Signal(1)\n \"\"\"\n # naive btn\n last_btn1 = Signal(1)\n m.d.sync += last_btn1.eq(btn1.i)\n with m.If(btn1.i & ~last_btn1):\n m.d.sync += running.eq(~running)\n \"\"\"\n btn1_pipe1 = Signal(1)\n btn1_pipe2 = Signal(1)\n btn1_db = Signal(range(0, 65535))\n m.d.sync += [btn1_pipe1.eq(btn1.i), btn1_pipe2.eq(btn1_pipe1)]\n with m.If(btn1_pipe2):\n m.d.sync += btn1_db.eq(65535)\n with m.Else():\n with m.If(btn1_db > 0):\n m.d.sync += btn1_db.eq(btn1_db - 1)\n with m.If(btn1_pipe2 & (btn1_db == 0)):\n m.d.sync += running.eq(~running)\n with m.If(running & (timer == 0)):\n with m.If(self.dd0.i_num == 9):\n m.d.sync += self.dd0.i_num.eq(0)\n with m.If(self.dd1.i_num == 9):\n m.d.sync += self.dd1.i_num.eq(0)\n with m.Else():\n m.d.sync += self.dd1.incr()\n with m.Else():\n m.d.sync += self.dd0.incr()\n with m.If(timer[8]):\n m.d.comb += [dig_sel.o.eq(0), disp.o.eq(self.dd1.o_disp)]\n with m.Else():\n m.d.comb += [dig_sel.o.eq(1), disp.o.eq(self.dd0.o_disp)]\n return m\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SSDigitDecoder(Elaboratable):\n\n def __init__(self):\n self.i_num = Signal(4)\n self.o_disp = Signal(7)\n self.lut = {(0): 63, (1): 6, (2): 91, (3): 79, (4): 102, (5): 109,\n (6): 125, (7): 7, (8): 127, (9): 103}\n\n def incr(self):\n return self.i_num.eq(self.i_num + 1)\n\n def elaborate(self, platform):\n m = Module()\n with m.Switch(self.i_num):\n for a, b in self.lut.items():\n with m.Case(a):\n m.d.comb += self.o_disp.eq(b)\n return m\n\n\nclass Blinky(Elaboratable):\n\n def __init__(self):\n self.dd0 = SSDigitDecoder()\n self.dd1 = SSDigitDecoder()\n\n def elaborate(self, platform):\n m = Module()\n m.submodules.dd0 = self.dd0\n m.submodules.dd1 = self.dd1\n timer = Signal(20)\n led = platform.request('led', 0)\n btn = platform.request('button', 0)\n btn1 = platform.request('button', 1)\n dig_sel = platform.request('ss_dig_sel', 0)\n disp = platform.request('ss_disp', 0)\n m.d.sync += timer.eq(timer + 1)\n m.d.comb += led.o.eq(timer[-1] & ~btn)\n running = Signal(1)\n \"\"\"\n # naive btn\n last_btn1 = Signal(1)\n m.d.sync += last_btn1.eq(btn1.i)\n with m.If(btn1.i & ~last_btn1):\n m.d.sync += running.eq(~running)\n \"\"\"\n btn1_pipe1 = Signal(1)\n btn1_pipe2 = Signal(1)\n btn1_db = Signal(range(0, 65535))\n m.d.sync += [btn1_pipe1.eq(btn1.i), btn1_pipe2.eq(btn1_pipe1)]\n with m.If(btn1_pipe2):\n m.d.sync += btn1_db.eq(65535)\n with m.Else():\n with m.If(btn1_db > 0):\n m.d.sync += btn1_db.eq(btn1_db - 1)\n with m.If(btn1_pipe2 & (btn1_db == 0)):\n m.d.sync += running.eq(~running)\n with m.If(running & (timer == 0)):\n with m.If(self.dd0.i_num == 9):\n m.d.sync += self.dd0.i_num.eq(0)\n with m.If(self.dd1.i_num == 9):\n m.d.sync += self.dd1.i_num.eq(0)\n with m.Else():\n m.d.sync += self.dd1.incr()\n with m.Else():\n m.d.sync += self.dd0.incr()\n with m.If(timer[8]):\n m.d.comb += [dig_sel.o.eq(0), disp.o.eq(self.dd1.o_disp)]\n with m.Else():\n m.d.comb += [dig_sel.o.eq(1), disp.o.eq(self.dd0.o_disp)]\n return m\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass SSDigitDecoder(Elaboratable):\n\n def __init__(self):\n self.i_num = Signal(4)\n self.o_disp = Signal(7)\n self.lut = {(0): 63, (1): 6, (2): 91, (3): 79, (4): 102, (5): 109,\n (6): 125, (7): 7, (8): 127, (9): 103}\n\n def incr(self):\n return self.i_num.eq(self.i_num + 1)\n\n def elaborate(self, platform):\n m = Module()\n with m.Switch(self.i_num):\n for a, b in self.lut.items():\n with m.Case(a):\n m.d.comb += self.o_disp.eq(b)\n return m\n\n\nclass Blinky(Elaboratable):\n\n def __init__(self):\n self.dd0 = SSDigitDecoder()\n self.dd1 = SSDigitDecoder()\n\n def elaborate(self, platform):\n m = Module()\n m.submodules.dd0 = self.dd0\n m.submodules.dd1 = self.dd1\n timer = Signal(20)\n led = platform.request('led', 0)\n btn = platform.request('button', 0)\n btn1 = platform.request('button', 1)\n dig_sel = platform.request('ss_dig_sel', 0)\n disp = platform.request('ss_disp', 0)\n m.d.sync += timer.eq(timer + 1)\n m.d.comb += led.o.eq(timer[-1] & ~btn)\n running = Signal(1)\n \"\"\"\n # naive btn\n last_btn1 = Signal(1)\n m.d.sync += last_btn1.eq(btn1.i)\n with m.If(btn1.i & ~last_btn1):\n m.d.sync += running.eq(~running)\n \"\"\"\n btn1_pipe1 = Signal(1)\n btn1_pipe2 = Signal(1)\n btn1_db = Signal(range(0, 65535))\n m.d.sync += [btn1_pipe1.eq(btn1.i), btn1_pipe2.eq(btn1_pipe1)]\n with m.If(btn1_pipe2):\n m.d.sync += btn1_db.eq(65535)\n with m.Else():\n with m.If(btn1_db > 0):\n m.d.sync += btn1_db.eq(btn1_db - 1)\n with m.If(btn1_pipe2 & (btn1_db == 0)):\n m.d.sync += running.eq(~running)\n with m.If(running & (timer == 0)):\n with m.If(self.dd0.i_num == 9):\n m.d.sync += self.dd0.i_num.eq(0)\n with m.If(self.dd1.i_num == 9):\n m.d.sync += self.dd1.i_num.eq(0)\n with m.Else():\n m.d.sync += self.dd1.incr()\n with m.Else():\n m.d.sync += self.dd0.incr()\n with m.If(timer[8]):\n m.d.comb += [dig_sel.o.eq(0), disp.o.eq(self.dd1.o_disp)]\n with m.Else():\n m.d.comb += [dig_sel.o.eq(1), disp.o.eq(self.dd0.o_disp)]\n return m\n\n\nif __name__ == '__main__':\n p = ICEBreakerPlatform()\n p.add_resources(p.break_off_pmod)\n p.add_resources([Resource('ss_dig_sel', 0, Pins('10', dir='o', conn=(\n 'pmod', 0)), Attrs(IO_STANDARD='SB_LVCMOS')), Resource('ss_disp', 0,\n PinsN('1 2 3 4 7 8 9', dir='o', conn=('pmod', 0)), Attrs(\n IO_STANDARD='SB_LVCMOS'))])\n for r in p.resources:\n print('r:', r)\n p.build(Blinky(), do_program=False)\n",
"step-5": "#!/usr/bin/env python3\n\nfrom nmigen import *\nfrom nmigen.build import *\nfrom nmigen_boards.icebreaker import ICEBreakerPlatform\n\nclass SSDigitDecoder(Elaboratable):\n def __init__(self):\n self.i_num = Signal(4)\n self.o_disp = Signal(7)\n self.lut = {\n 0: 0b011_1111,\n 1: 0b000_0110,\n 2: 0b101_1011,\n 3: 0b100_1111,\n 4: 0b110_0110,\n 5: 0b110_1101,\n 6: 0b111_1101,\n 7: 0b000_0111,\n 8: 0b111_1111,\n 9: 0b110_0111,\n }\n def incr(self):\n return self.i_num.eq(self.i_num+1)\n def elaborate(self, platform):\n m = Module()\n with m.Switch(self.i_num):\n for a, b in self.lut.items():\n with m.Case(a):\n m.d.comb += self.o_disp.eq(b)\n return m\n\nclass Blinky(Elaboratable):\n def __init__(self):\n self.dd0 = SSDigitDecoder()\n self.dd1 = SSDigitDecoder()\n def elaborate(self, platform):\n m = Module()\n m.submodules.dd0 = self.dd0\n m.submodules.dd1 = self.dd1\n\n timer = Signal(20)\n led = platform.request('led', 0)\n btn = platform.request('button', 0)\n btn1 = platform.request('button', 1)\n dig_sel = platform.request('ss_dig_sel', 0)\n disp = platform.request('ss_disp', 0)\n\n # blinky led\n m.d.sync += timer.eq(timer+1)\n m.d.comb += led.o.eq(timer[-1] & ~btn)\n\n # 7 seg\n running = Signal(1)\n \"\"\"\n # naive btn\n last_btn1 = Signal(1)\n m.d.sync += last_btn1.eq(btn1.i)\n with m.If(btn1.i & ~last_btn1):\n m.d.sync += running.eq(~running)\n \"\"\"\n btn1_pipe1 = Signal(1)\n btn1_pipe2 = Signal(1)\n btn1_db = Signal(range(0, 0xffff))\n m.d.sync += [\n btn1_pipe1.eq(btn1.i),\n btn1_pipe2.eq(btn1_pipe1),\n ]\n with m.If(btn1_pipe2):\n m.d.sync += btn1_db.eq(0xffff)\n with m.Else():\n with m.If(btn1_db > 0):\n m.d.sync += btn1_db.eq(btn1_db-1)\n with m.If(btn1_pipe2 & (btn1_db == 0)):\n m.d.sync += running.eq(~running)\n\n with m.If(running & (timer == 0)):\n with m.If(self.dd0.i_num == 9):\n m.d.sync += self.dd0.i_num.eq(0)\n with m.If(self.dd1.i_num == 9):\n m.d.sync += self.dd1.i_num.eq(0)\n with m.Else():\n m.d.sync += self.dd1.incr()\n with m.Else():\n m.d.sync += self.dd0.incr()\n with m.If(timer[8]):\n m.d.comb += [\n dig_sel.o.eq(0),\n disp.o.eq(self.dd1.o_disp),\n ]\n with m.Else():\n m.d.comb += [\n dig_sel.o.eq(1),\n disp.o.eq(self.dd0.o_disp),\n ]\n\n return m\n\nif __name__ == '__main__':\n p = ICEBreakerPlatform()\n p.add_resources(p.break_off_pmod)\n p.add_resources([\n Resource('ss_dig_sel', 0, \n Pins('10', dir='o', conn=('pmod', 0)),\n Attrs(IO_STANDARD='SB_LVCMOS')),\n Resource('ss_disp', 0, \n PinsN('1 2 3 4 7 8 9', dir='o', conn=('pmod', 0)),\n Attrs(IO_STANDARD='SB_LVCMOS')),\n ])\n for r in p.resources:\n print('r:', r)\n p.build(Blinky(), do_program=False)\n",
"step-ids": [
4,
6,
7,
8,
10
]
}
|
[
4,
6,
7,
8,
10
] |
# -*- coding: UTF-8 -*-
from keywords.httpkeys1 import HTTP
http1 = HTTP()
# ip = '10.68.170.184:8080'
ip = '10.68.170.184:8080'
http1.post('http://'+ip+'/music_download/api/login','username=admin&password=123456')
# http1.savejson('result','id')
# http1.get('http://47.101.197.102:8080/music/api/user','{id}')
# data = {'username':'admin','password':'123456'}
# # json方式传递数据
# http1.postjson('http://47.101.197.102:8080/music/api/login',data=data)
# http1.savejson('result','id')
# http1.get('http://47.101.197.102:8080/music/api/user','{id}')
# http1.addheader('Content-type','multipart/form-data')
http1.upload('http://'+ip+'/music_download/api/song/upload','speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\music_data\\1.mp3')
# http1.upload('http://10.68.170.184:8080/music/api/song/upload','filename=1.mp3&speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file1=G:/music_data/1.mp3')
|
normal
|
{
"blob_id": "68e09f72e8338efbef108ffd0c93eff067bf7b07",
"index": 135,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nhttp1.post('http://' + ip + '/music_download/api/login',\n 'username=admin&password=123456')\nhttp1.upload('http://' + ip + '/music_download/api/song/upload',\n 'speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3'\n )\n",
"step-3": "<mask token>\nhttp1 = HTTP()\nip = '10.68.170.184:8080'\nhttp1.post('http://' + ip + '/music_download/api/login',\n 'username=admin&password=123456')\nhttp1.upload('http://' + ip + '/music_download/api/song/upload',\n 'speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3'\n )\n",
"step-4": "from keywords.httpkeys1 import HTTP\nhttp1 = HTTP()\nip = '10.68.170.184:8080'\nhttp1.post('http://' + ip + '/music_download/api/login',\n 'username=admin&password=123456')\nhttp1.upload('http://' + ip + '/music_download/api/song/upload',\n 'speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3'\n )\n",
"step-5": "# -*- coding: UTF-8 -*-\nfrom keywords.httpkeys1 import HTTP\n\nhttp1 = HTTP()\n\n# ip = '10.68.170.184:8080'\nip = '10.68.170.184:8080'\n\nhttp1.post('http://'+ip+'/music_download/api/login','username=admin&password=123456')\n# http1.savejson('result','id')\n# http1.get('http://47.101.197.102:8080/music/api/user','{id}')\n\n# data = {'username':'admin','password':'123456'}\n# # json方式传递数据\n# http1.postjson('http://47.101.197.102:8080/music/api/login',data=data)\n# http1.savejson('result','id')\n# http1.get('http://47.101.197.102:8080/music/api/user','{id}')\n\n# http1.addheader('Content-type','multipart/form-data')\n\nhttp1.upload('http://'+ip+'/music_download/api/song/upload','speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file=G:\\\\music_data\\\\1.mp3')\n\n# http1.upload('http://10.68.170.184:8080/music/api/song/upload','filename=1.mp3&speed=0&styleId=c0a4bd86-a09b-43ac-8169-14bb69630ac0&file1=G:/music_data/1.mp3')\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from behave import given, when, then
from pages.LoginPage import LoginPage
from pages.ProductsPage import ProductsPage
class ProductsListSteps:
@given("Prepare classes products list")
def prepare_class(context):
context.login = LoginPage(context.driver)
context.products = ProductsPage(context.driver)
@when("Sort by price low to high")
def sort_low_to_high(context):
context.products.sort_price_low_to_high()
@then("Validate price order")
def validate_price_order(context):
context.products.validate_price_order()
|
normal
|
{
"blob_id": "a74a880039bad030d665e001da74075bd61fcc23",
"index": 1593,
"step-1": "<mask token>\n\n\nclass ProductsListSteps:\n\n @given('Prepare classes products list')\n def prepare_class(context):\n context.login = LoginPage(context.driver)\n context.products = ProductsPage(context.driver)\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ProductsListSteps:\n\n @given('Prepare classes products list')\n def prepare_class(context):\n context.login = LoginPage(context.driver)\n context.products = ProductsPage(context.driver)\n\n @when('Sort by price low to high')\n def sort_low_to_high(context):\n context.products.sort_price_low_to_high()\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ProductsListSteps:\n\n @given('Prepare classes products list')\n def prepare_class(context):\n context.login = LoginPage(context.driver)\n context.products = ProductsPage(context.driver)\n\n @when('Sort by price low to high')\n def sort_low_to_high(context):\n context.products.sort_price_low_to_high()\n\n @then('Validate price order')\n def validate_price_order(context):\n context.products.validate_price_order()\n",
"step-4": "from behave import given, when, then\nfrom pages.LoginPage import LoginPage\nfrom pages.ProductsPage import ProductsPage\n\n\nclass ProductsListSteps:\n\n @given('Prepare classes products list')\n def prepare_class(context):\n context.login = LoginPage(context.driver)\n context.products = ProductsPage(context.driver)\n\n @when('Sort by price low to high')\n def sort_low_to_high(context):\n context.products.sort_price_low_to_high()\n\n @then('Validate price order')\n def validate_price_order(context):\n context.products.validate_price_order()\n",
"step-5": "from behave import given, when, then\nfrom pages.LoginPage import LoginPage\nfrom pages.ProductsPage import ProductsPage\n\nclass ProductsListSteps:\n\n @given(\"Prepare classes products list\")\n def prepare_class(context):\n context.login = LoginPage(context.driver)\n context.products = ProductsPage(context.driver)\n\n @when(\"Sort by price low to high\")\n def sort_low_to_high(context):\n context.products.sort_price_low_to_high()\n\n @then(\"Validate price order\")\n def validate_price_order(context):\n context.products.validate_price_order()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding:utf-8 -*-
import time
import random
import numpy as np
from collections import defaultdict
class Simulator(object):
ALLOCATION_INTERVAL_MEAN = 150
ALLOCATION_INTERVAL_STDEV = 30
AFTER_ALLOCATION_INTERVAL_MEAN = 150
AFTER_ALLOCATION_INTERVAL_STDEV = 30
CLICK_INTERVAL_MEAN = 30
CLICK_INTERVAL_STDEV = 20
OUTPUT_NAME = 'output.csv'
# Single thread 개발을 위해 time slot 간격으로 나누자.
# 같은 time slot에 들어온 요청들은 모두 동시 처리 되며 예산 감소도 마찬가지다.
def __init__(self, budget, unitPrice):
self.budget = budget
self.unitPrice = unitPrice
self.spent = 0
self.depositRequest = defaultdict(int)
self.allocationRequest = defaultdict(int)
self.timeSpent = defaultdict(int)
self.now = 0
self.past_spent = 0
def AdAllocation(self):
temp = np.random.normal(self.ALLOCATION_INTERVAL_MEAN, self.ALLOCATION_INTERVAL_STDEV, 1)[0]
if temp < 1:
requestCount = 0
else:
requestCount = int(temp)
if self.spent > self.budget * 0.9:
temp = np.random.normal(self.AFTER_ALLOCATION_INTERVAL_MEAN, self.AFTER_ALLOCATION_INTERVAL_STDEV, 1)[0]
if temp < 1:
requestCount = 0
else:
requestCount = int(temp)
if requestCount == 0:
return
for i in xrange(requestCount):
isCapped = self.IsCap()
if isCapped:
continue
self.allocationRequest[self.now] += 1
self.ClickAd()
def DepositBudget(self):
self.past_spent = self.spent
self.spent += self.depositRequest[self.now] * self.unitPrice
self.timeSpent[self.now] = self.spent
self.now += 1
def ClickAd(self):
interval = np.random.normal(self.CLICK_INTERVAL_MEAN, self.CLICK_INTERVAL_STDEV, 1)[0]
if interval >= 0:
clickTime = self.now + int(interval)
else:
clickTime = self.now
self.depositRequest[clickTime] += 1
def IsCap(self):
return NotImplemented
def PrintSpent(self):
for i in xrange(self.now):
print str(i) + ',' + str(self.timeSpent[i])
def PrintAllocation(self):
allocationSum = 0
for i in xrange(self.now):
allocationSum += self.allocationRequest[i]
print str(i) + ',' + str(allocationSum)
def OutputResult(self):
allocationSum = 0
f = open(self.OUTPUT_NAME, 'w')
f.write('time,spent,allocation\n')
for i in xrange(self.now):
allocationSum += self.allocationRequest[i]
f.write(str(i) + ',' + str(self.timeSpent[i]) + ',' + str(allocationSum) + '\n')
f.close()
|
normal
|
{
"blob_id": "7378f76b4c1f67d8a549aa2a88db8caa9b05338e",
"index": 4441,
"step-1": "# -*- coding:utf-8 -*-\nimport time\nimport random\nimport numpy as np\nfrom collections import defaultdict\n\nclass Simulator(object):\n\tALLOCATION_INTERVAL_MEAN = 150\n\tALLOCATION_INTERVAL_STDEV = 30\n\tAFTER_ALLOCATION_INTERVAL_MEAN = 150\n\tAFTER_ALLOCATION_INTERVAL_STDEV = 30\n\tCLICK_INTERVAL_MEAN = 30\n\tCLICK_INTERVAL_STDEV = 20\n\tOUTPUT_NAME = 'output.csv'\n\t# Single thread 개발을 위해 time slot 간격으로 나누자.\n\t# 같은 time slot에 들어온 요청들은 모두 동시 처리 되며 예산 감소도 마찬가지다.\n\tdef __init__(self, budget, unitPrice):\n\t\tself.budget = budget\n\t\tself.unitPrice = unitPrice\n\t\tself.spent = 0\n\t\tself.depositRequest = defaultdict(int)\n\t\tself.allocationRequest = defaultdict(int)\n\t\tself.timeSpent = defaultdict(int)\n\t\tself.now = 0\n\t\tself.past_spent = 0\n\n\tdef AdAllocation(self):\n\t\ttemp = np.random.normal(self.ALLOCATION_INTERVAL_MEAN, self.ALLOCATION_INTERVAL_STDEV, 1)[0]\n\t\tif temp < 1:\n\t\t\trequestCount = 0\n\t\telse:\n\t\t\trequestCount = int(temp)\n\n\t\tif self.spent > self.budget * 0.9:\n\t\t\ttemp = np.random.normal(self.AFTER_ALLOCATION_INTERVAL_MEAN, self.AFTER_ALLOCATION_INTERVAL_STDEV, 1)[0]\n\t\t\tif temp < 1:\n\t\t\t\trequestCount = 0\n\t\t\telse:\n\t\t\t\trequestCount = int(temp)\n\n\t\tif requestCount == 0:\n\t\t\treturn\n\n\t\tfor i in xrange(requestCount):\n\t\t\tisCapped = self.IsCap()\n\t\t\tif isCapped:\n\t\t\t\tcontinue\n\t\t\tself.allocationRequest[self.now] += 1\n\t\t\tself.ClickAd()\n\n\tdef DepositBudget(self):\n\t\tself.past_spent = self.spent\n\t\tself.spent += self.depositRequest[self.now] * self.unitPrice\n\t\tself.timeSpent[self.now] = self.spent\n\t\tself.now += 1\n\n\tdef ClickAd(self):\n\t\tinterval = np.random.normal(self.CLICK_INTERVAL_MEAN, self.CLICK_INTERVAL_STDEV, 1)[0]\n\t\tif interval >= 0:\n\t\t\tclickTime = self.now + int(interval)\n\t\telse:\n\t\t\tclickTime = self.now\n\t\tself.depositRequest[clickTime] += 1\n\n\tdef IsCap(self):\n\t\treturn NotImplemented\n\n\tdef PrintSpent(self):\n\t\tfor i in xrange(self.now):\n\t\t\tprint str(i) + ',' + str(self.timeSpent[i])\n\n\tdef PrintAllocation(self):\n\t\tallocationSum = 0\n\t\tfor i in xrange(self.now):\n\t\t\tallocationSum += self.allocationRequest[i]\n\t\t\tprint str(i) + ',' + str(allocationSum)\n\n\tdef OutputResult(self):\n\t\tallocationSum = 0\n\t\tf = open(self.OUTPUT_NAME, 'w')\n\t\tf.write('time,spent,allocation\\n')\n\t\tfor i in xrange(self.now):\n\t\t\tallocationSum += self.allocationRequest[i]\n\t\t\tf.write(str(i) + ',' + str(self.timeSpent[i]) + ',' + str(allocationSum) + '\\n')\n\t\tf.close()\n\n\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
class Solution(object):
def maxSubArrayLen(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
sums = [0] * (len(nums) + 1)
seen = {}
seen[0] = -1
res = 0
for idx, n in enumerate(nums):
sums[idx + 1] = sums[idx] + n
if sums[idx + 1] - k in seen:
res = max(res, idx - seen[sums[idx + 1] - k])
if sums[idx + 1] not in seen:
seen[sums[idx + 1]] = idx
return res
|
normal
|
{
"blob_id": "1ccaedb6e79101764db1907634ba627a0f9f2bb2",
"index": 5500,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def maxSubArrayLen(self, nums, k):\n \"\"\"\n :type nums: List[int]\n :type k: int\n :rtype: int\n \"\"\"\n sums = [0] * (len(nums) + 1)\n seen = {}\n seen[0] = -1\n res = 0\n for idx, n in enumerate(nums):\n sums[idx + 1] = sums[idx] + n\n if sums[idx + 1] - k in seen:\n res = max(res, idx - seen[sums[idx + 1] - k])\n if sums[idx + 1] not in seen:\n seen[sums[idx + 1]] = idx\n return res\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# Ejercicio 1
print('Pepito')
print('Cumpleaños: 22 de enero')
edad = 42
print('Tengo', edad, 'años')
cantante = 'Suzanne Vega'
comida = 'rúcula'
ciudad = 'Barcelona'
print('Me gusta la música de', cantante)
print('Me gusta cenar', comida)
print('Vivo en', ciudad)
|
normal
|
{
"blob_id": "f26c624e8ae9711eb835e223407256e60dfc6d6e",
"index": 8945,
"step-1": "<mask token>\n",
"step-2": "print('Pepito')\nprint('Cumpleaños: 22 de enero')\n<mask token>\nprint('Tengo', edad, 'años')\n<mask token>\nprint('Me gusta la música de', cantante)\nprint('Me gusta cenar', comida)\nprint('Vivo en', ciudad)\n",
"step-3": "print('Pepito')\nprint('Cumpleaños: 22 de enero')\nedad = 42\nprint('Tengo', edad, 'años')\ncantante = 'Suzanne Vega'\ncomida = 'rúcula'\nciudad = 'Barcelona'\nprint('Me gusta la música de', cantante)\nprint('Me gusta cenar', comida)\nprint('Vivo en', ciudad)\n",
"step-4": "# Ejercicio 1\nprint('Pepito')\nprint('Cumpleaños: 22 de enero')\nedad = 42\nprint('Tengo', edad, 'años')\ncantante = 'Suzanne Vega'\ncomida = 'rúcula'\nciudad = 'Barcelona'\nprint('Me gusta la música de', cantante)\nprint('Me gusta cenar', comida)\nprint('Vivo en', ciudad)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
from math import ceil, floor, sqrt
def palindromes(n: int) -> int:
"""yield successive palindromes starting at n"""
# 1 -> 2 -> 3 ... 9 -> 11 -> 22 -> 33 -> 44 .. 99 -> 101
# 101 -> 111 -> 121 -> 131 -> ... -> 191 -> 202 -> 212
# 989 -> 999 -> 1001 -> 1111 -> 1221
# 9889 -> 9999 -> 10001 -> 10101 -> 10201
prev = n
s = str(n)
even = len(s) % 2 == 0
s = s[:ceil(len(s) / 2)]
n = int(s)
while True:
if even:
pal = int(''.join([s, s[-1::-1]])) # join '12' with '21'
else:
pal = int(''.join([s, s[-2::-1]])) # join '12' with '1'
if prev <= pal:
yield pal
n += 1
if all(digit == '9' for digit in s):
even = not even
if even: n //= 10
s = str(n)
def isPrime(n: int) -> bool:
if n < 2:
return False
for i in range(2, floor(sqrt(n)) + 1):
if n % i == 0:
return False
return True
class Solution:
def primePalindrome(self, N: int) -> int:
"""return lowest prime palindrome >= N"""
for p in palindromes(N):
if isPrime(p):
return p
|
normal
|
{
"blob_id": "b07073a7f65dbc10806b68729f21a8bc8773a1ab",
"index": 3836,
"step-1": "<mask token>\n\n\nclass Solution:\n\n def primePalindrome(self, N: int) ->int:\n \"\"\"return lowest prime palindrome >= N\"\"\"\n for p in palindromes(N):\n if isPrime(p):\n return p\n",
"step-2": "<mask token>\n\n\ndef palindromes(n: int) ->int:\n \"\"\"yield successive palindromes starting at n\"\"\"\n prev = n\n s = str(n)\n even = len(s) % 2 == 0\n s = s[:ceil(len(s) / 2)]\n n = int(s)\n while True:\n if even:\n pal = int(''.join([s, s[-1::-1]]))\n else:\n pal = int(''.join([s, s[-2::-1]]))\n if prev <= pal:\n yield pal\n n += 1\n if all(digit == '9' for digit in s):\n even = not even\n if even:\n n //= 10\n s = str(n)\n\n\n<mask token>\n\n\nclass Solution:\n\n def primePalindrome(self, N: int) ->int:\n \"\"\"return lowest prime palindrome >= N\"\"\"\n for p in palindromes(N):\n if isPrime(p):\n return p\n",
"step-3": "<mask token>\n\n\ndef palindromes(n: int) ->int:\n \"\"\"yield successive palindromes starting at n\"\"\"\n prev = n\n s = str(n)\n even = len(s) % 2 == 0\n s = s[:ceil(len(s) / 2)]\n n = int(s)\n while True:\n if even:\n pal = int(''.join([s, s[-1::-1]]))\n else:\n pal = int(''.join([s, s[-2::-1]]))\n if prev <= pal:\n yield pal\n n += 1\n if all(digit == '9' for digit in s):\n even = not even\n if even:\n n //= 10\n s = str(n)\n\n\ndef isPrime(n: int) ->bool:\n if n < 2:\n return False\n for i in range(2, floor(sqrt(n)) + 1):\n if n % i == 0:\n return False\n return True\n\n\nclass Solution:\n\n def primePalindrome(self, N: int) ->int:\n \"\"\"return lowest prime palindrome >= N\"\"\"\n for p in palindromes(N):\n if isPrime(p):\n return p\n",
"step-4": "from math import ceil, floor, sqrt\n\n\ndef palindromes(n: int) ->int:\n \"\"\"yield successive palindromes starting at n\"\"\"\n prev = n\n s = str(n)\n even = len(s) % 2 == 0\n s = s[:ceil(len(s) / 2)]\n n = int(s)\n while True:\n if even:\n pal = int(''.join([s, s[-1::-1]]))\n else:\n pal = int(''.join([s, s[-2::-1]]))\n if prev <= pal:\n yield pal\n n += 1\n if all(digit == '9' for digit in s):\n even = not even\n if even:\n n //= 10\n s = str(n)\n\n\ndef isPrime(n: int) ->bool:\n if n < 2:\n return False\n for i in range(2, floor(sqrt(n)) + 1):\n if n % i == 0:\n return False\n return True\n\n\nclass Solution:\n\n def primePalindrome(self, N: int) ->int:\n \"\"\"return lowest prime palindrome >= N\"\"\"\n for p in palindromes(N):\n if isPrime(p):\n return p\n",
"step-5": "#!/usr/bin/env python\n\nfrom math import ceil, floor, sqrt\n\ndef palindromes(n: int) -> int:\n \"\"\"yield successive palindromes starting at n\"\"\"\n # 1 -> 2 -> 3 ... 9 -> 11 -> 22 -> 33 -> 44 .. 99 -> 101\n # 101 -> 111 -> 121 -> 131 -> ... -> 191 -> 202 -> 212\n # 989 -> 999 -> 1001 -> 1111 -> 1221\n # 9889 -> 9999 -> 10001 -> 10101 -> 10201\n prev = n\n s = str(n)\n even = len(s) % 2 == 0\n s = s[:ceil(len(s) / 2)]\n n = int(s)\n while True:\n if even:\n pal = int(''.join([s, s[-1::-1]])) # join '12' with '21'\n else:\n pal = int(''.join([s, s[-2::-1]])) # join '12' with '1'\n if prev <= pal:\n yield pal\n \n n += 1\n if all(digit == '9' for digit in s):\n even = not even\n if even: n //= 10\n s = str(n)\n\ndef isPrime(n: int) -> bool:\n if n < 2:\n return False\n for i in range(2, floor(sqrt(n)) + 1):\n if n % i == 0:\n return False\n return True\n \n\nclass Solution:\n def primePalindrome(self, N: int) -> int:\n \"\"\"return lowest prime palindrome >= N\"\"\"\n for p in palindromes(N):\n if isPrime(p):\n return p\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
stevila = [5, 2, 8, 3]
#Izpis vseh števil
print(stevila)
#Izpis števila na mestu 1
print(stevila[1])
|
normal
|
{
"blob_id": "6e845f2543b548fb936cc3719eb150e530281945",
"index": 9505,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(stevila)\nprint(stevila[1])\n",
"step-3": "stevila = [5, 2, 8, 3]\nprint(stevila)\nprint(stevila[1])\n",
"step-4": "stevila = [5, 2, 8, 3]\n\n#Izpis vseh števil\nprint(stevila)\n\n#Izpis števila na mestu 1\nprint(stevila[1])",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from liver_tumor_segmentation.CGBS_Net import *
from liver_tumor_segmentation.loss import *
from keras.optimizers import *
from liver_tumor_segmentation.CGBS_data_generator import *
from keras.callbacks import *
import os
from keras.callbacks import ReduceLROnPlateau
from keras import losses
from configuration import *
def get_lr_metric(optimizer):
def lr(y_true, y_pred):
return optimizer.lr
return lr
def train():
batch_size = 4 #4 for single GPU; 8 for two GPUs
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
trainGene = trainGenerator(batch_size, data_path='/data',
folder='train', aug_dict=aug_args, seed = 1, interaction='RECIST')
devGene = trainGenerator(batch_size, data_path='/data',
folder='dev', aug_dict=no_aug_args, seed = 1, interaction='RECIST')
testGene = testGenerator(test_path='test_path', interaction='RECIST')
model = CGBS_Net(input_shape=(256, 256, 4),rate=3)
model.summary()
# GPU_COUNT = 2
# model = multi_gpu_model(original_model, GPU_COUNT)
opt=SGD(lr=4e-4, decay=1e-6, momentum=0.9, nesterov=True)
lr_metric = get_lr_metric(opt)
model.compile(optimizer=opt, loss={'out_seg': dice_coef_loss, 'out_shape': losses.binary_crossentropy},
loss_weights={'out_seg': 1, 'out_shape': 1}, metrics=[dice_coef, lr_metric])
csv_logger = CSVLogger('./Models/'+'CGBS_Net.csv', append=True) # ss-0.01
# tensorboard = TensorBoard(log_dir='./tmp/graph', write_graph=True, write_images=True)
# earlystopping = EarlyStopping(monitor='val_loss', patience=0, verbose=0, mode='auto')
model_checkpoint = ModelCheckpoint(
'./Models/CGBS/{epoch:02d}-{val_out_seg_dice_coef:.4f}.h5',
monitor='val_out_seg_loss',
verbose=0, save_best_only=True, save_weights_only=True, mode='auto', period=1)
reduce_lr = ReduceLROnPlateau(monitor='val_out_seg_loss', factor=0.1, patience=50, mode='auto')
model.fit_generator(generator=trainGene, steps_per_epoch=int(5000/batch_size),
epochs=500, validation_data=devGene,
validation_steps=int(5000/batch_size), verbose=2,
callbacks=[model_checkpoint, csv_logger, reduce_lr])
train()
|
normal
|
{
"blob_id": "8c17f2c770c24bbf8c73628c6740c0b866e6b1c0",
"index": 9047,
"step-1": "<mask token>\n\n\ndef train():\n batch_size = 4\n os.environ['CUDA_VISIBLE_DEVICES'] = '0'\n trainGene = trainGenerator(batch_size, data_path='/data', folder=\n 'train', aug_dict=aug_args, seed=1, interaction='RECIST')\n devGene = trainGenerator(batch_size, data_path='/data', folder='dev',\n aug_dict=no_aug_args, seed=1, interaction='RECIST')\n testGene = testGenerator(test_path='test_path', interaction='RECIST')\n model = CGBS_Net(input_shape=(256, 256, 4), rate=3)\n model.summary()\n opt = SGD(lr=0.0004, decay=1e-06, momentum=0.9, nesterov=True)\n lr_metric = get_lr_metric(opt)\n model.compile(optimizer=opt, loss={'out_seg': dice_coef_loss,\n 'out_shape': losses.binary_crossentropy}, loss_weights={'out_seg': \n 1, 'out_shape': 1}, metrics=[dice_coef, lr_metric])\n csv_logger = CSVLogger('./Models/' + 'CGBS_Net.csv', append=True)\n model_checkpoint = ModelCheckpoint(\n './Models/CGBS/{epoch:02d}-{val_out_seg_dice_coef:.4f}.h5', monitor\n ='val_out_seg_loss', verbose=0, save_best_only=True,\n save_weights_only=True, mode='auto', period=1)\n reduce_lr = ReduceLROnPlateau(monitor='val_out_seg_loss', factor=0.1,\n patience=50, mode='auto')\n model.fit_generator(generator=trainGene, steps_per_epoch=int(5000 /\n batch_size), epochs=500, validation_data=devGene, validation_steps=\n int(5000 / batch_size), verbose=2, callbacks=[model_checkpoint,\n csv_logger, reduce_lr])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_lr_metric(optimizer):\n\n def lr(y_true, y_pred):\n return optimizer.lr\n return lr\n\n\ndef train():\n batch_size = 4\n os.environ['CUDA_VISIBLE_DEVICES'] = '0'\n trainGene = trainGenerator(batch_size, data_path='/data', folder=\n 'train', aug_dict=aug_args, seed=1, interaction='RECIST')\n devGene = trainGenerator(batch_size, data_path='/data', folder='dev',\n aug_dict=no_aug_args, seed=1, interaction='RECIST')\n testGene = testGenerator(test_path='test_path', interaction='RECIST')\n model = CGBS_Net(input_shape=(256, 256, 4), rate=3)\n model.summary()\n opt = SGD(lr=0.0004, decay=1e-06, momentum=0.9, nesterov=True)\n lr_metric = get_lr_metric(opt)\n model.compile(optimizer=opt, loss={'out_seg': dice_coef_loss,\n 'out_shape': losses.binary_crossentropy}, loss_weights={'out_seg': \n 1, 'out_shape': 1}, metrics=[dice_coef, lr_metric])\n csv_logger = CSVLogger('./Models/' + 'CGBS_Net.csv', append=True)\n model_checkpoint = ModelCheckpoint(\n './Models/CGBS/{epoch:02d}-{val_out_seg_dice_coef:.4f}.h5', monitor\n ='val_out_seg_loss', verbose=0, save_best_only=True,\n save_weights_only=True, mode='auto', period=1)\n reduce_lr = ReduceLROnPlateau(monitor='val_out_seg_loss', factor=0.1,\n patience=50, mode='auto')\n model.fit_generator(generator=trainGene, steps_per_epoch=int(5000 /\n batch_size), epochs=500, validation_data=devGene, validation_steps=\n int(5000 / batch_size), verbose=2, callbacks=[model_checkpoint,\n csv_logger, reduce_lr])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_lr_metric(optimizer):\n\n def lr(y_true, y_pred):\n return optimizer.lr\n return lr\n\n\ndef train():\n batch_size = 4\n os.environ['CUDA_VISIBLE_DEVICES'] = '0'\n trainGene = trainGenerator(batch_size, data_path='/data', folder=\n 'train', aug_dict=aug_args, seed=1, interaction='RECIST')\n devGene = trainGenerator(batch_size, data_path='/data', folder='dev',\n aug_dict=no_aug_args, seed=1, interaction='RECIST')\n testGene = testGenerator(test_path='test_path', interaction='RECIST')\n model = CGBS_Net(input_shape=(256, 256, 4), rate=3)\n model.summary()\n opt = SGD(lr=0.0004, decay=1e-06, momentum=0.9, nesterov=True)\n lr_metric = get_lr_metric(opt)\n model.compile(optimizer=opt, loss={'out_seg': dice_coef_loss,\n 'out_shape': losses.binary_crossentropy}, loss_weights={'out_seg': \n 1, 'out_shape': 1}, metrics=[dice_coef, lr_metric])\n csv_logger = CSVLogger('./Models/' + 'CGBS_Net.csv', append=True)\n model_checkpoint = ModelCheckpoint(\n './Models/CGBS/{epoch:02d}-{val_out_seg_dice_coef:.4f}.h5', monitor\n ='val_out_seg_loss', verbose=0, save_best_only=True,\n save_weights_only=True, mode='auto', period=1)\n reduce_lr = ReduceLROnPlateau(monitor='val_out_seg_loss', factor=0.1,\n patience=50, mode='auto')\n model.fit_generator(generator=trainGene, steps_per_epoch=int(5000 /\n batch_size), epochs=500, validation_data=devGene, validation_steps=\n int(5000 / batch_size), verbose=2, callbacks=[model_checkpoint,\n csv_logger, reduce_lr])\n\n\ntrain()\n",
"step-4": "from liver_tumor_segmentation.CGBS_Net import *\nfrom liver_tumor_segmentation.loss import *\nfrom keras.optimizers import *\nfrom liver_tumor_segmentation.CGBS_data_generator import *\nfrom keras.callbacks import *\nimport os\nfrom keras.callbacks import ReduceLROnPlateau\nfrom keras import losses\nfrom configuration import *\n\n\ndef get_lr_metric(optimizer):\n\n def lr(y_true, y_pred):\n return optimizer.lr\n return lr\n\n\ndef train():\n batch_size = 4\n os.environ['CUDA_VISIBLE_DEVICES'] = '0'\n trainGene = trainGenerator(batch_size, data_path='/data', folder=\n 'train', aug_dict=aug_args, seed=1, interaction='RECIST')\n devGene = trainGenerator(batch_size, data_path='/data', folder='dev',\n aug_dict=no_aug_args, seed=1, interaction='RECIST')\n testGene = testGenerator(test_path='test_path', interaction='RECIST')\n model = CGBS_Net(input_shape=(256, 256, 4), rate=3)\n model.summary()\n opt = SGD(lr=0.0004, decay=1e-06, momentum=0.9, nesterov=True)\n lr_metric = get_lr_metric(opt)\n model.compile(optimizer=opt, loss={'out_seg': dice_coef_loss,\n 'out_shape': losses.binary_crossentropy}, loss_weights={'out_seg': \n 1, 'out_shape': 1}, metrics=[dice_coef, lr_metric])\n csv_logger = CSVLogger('./Models/' + 'CGBS_Net.csv', append=True)\n model_checkpoint = ModelCheckpoint(\n './Models/CGBS/{epoch:02d}-{val_out_seg_dice_coef:.4f}.h5', monitor\n ='val_out_seg_loss', verbose=0, save_best_only=True,\n save_weights_only=True, mode='auto', period=1)\n reduce_lr = ReduceLROnPlateau(monitor='val_out_seg_loss', factor=0.1,\n patience=50, mode='auto')\n model.fit_generator(generator=trainGene, steps_per_epoch=int(5000 /\n batch_size), epochs=500, validation_data=devGene, validation_steps=\n int(5000 / batch_size), verbose=2, callbacks=[model_checkpoint,\n csv_logger, reduce_lr])\n\n\ntrain()\n",
"step-5": "from liver_tumor_segmentation.CGBS_Net import *\r\nfrom liver_tumor_segmentation.loss import *\r\nfrom keras.optimizers import *\r\nfrom liver_tumor_segmentation.CGBS_data_generator import *\r\nfrom keras.callbacks import *\r\nimport os\r\nfrom keras.callbacks import ReduceLROnPlateau\r\nfrom keras import losses\r\nfrom configuration import *\r\n\r\ndef get_lr_metric(optimizer):\r\n def lr(y_true, y_pred):\r\n return optimizer.lr\r\n\r\n return lr\r\ndef train():\r\n batch_size = 4 #4 for single GPU; 8 for two GPUs\r\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = '0'\r\n\r\n trainGene = trainGenerator(batch_size, data_path='/data',\r\n folder='train', aug_dict=aug_args, seed = 1, interaction='RECIST')\r\n devGene = trainGenerator(batch_size, data_path='/data',\r\n folder='dev', aug_dict=no_aug_args, seed = 1, interaction='RECIST')\r\n testGene = testGenerator(test_path='test_path', interaction='RECIST')\r\n\r\n model = CGBS_Net(input_shape=(256, 256, 4),rate=3)\r\n model.summary()\r\n\r\n # GPU_COUNT = 2\r\n # model = multi_gpu_model(original_model, GPU_COUNT)\r\n\r\n opt=SGD(lr=4e-4, decay=1e-6, momentum=0.9, nesterov=True)\r\n lr_metric = get_lr_metric(opt)\r\n model.compile(optimizer=opt, loss={'out_seg': dice_coef_loss, 'out_shape': losses.binary_crossentropy},\r\n loss_weights={'out_seg': 1, 'out_shape': 1}, metrics=[dice_coef, lr_metric])\r\n\r\n csv_logger = CSVLogger('./Models/'+'CGBS_Net.csv', append=True) # ss-0.01\r\n # tensorboard = TensorBoard(log_dir='./tmp/graph', write_graph=True, write_images=True)\r\n # earlystopping = EarlyStopping(monitor='val_loss', patience=0, verbose=0, mode='auto')\r\n\r\n model_checkpoint = ModelCheckpoint(\r\n './Models/CGBS/{epoch:02d}-{val_out_seg_dice_coef:.4f}.h5',\r\n monitor='val_out_seg_loss',\r\n verbose=0, save_best_only=True, save_weights_only=True, mode='auto', period=1)\r\n reduce_lr = ReduceLROnPlateau(monitor='val_out_seg_loss', factor=0.1, patience=50, mode='auto')\r\n model.fit_generator(generator=trainGene, steps_per_epoch=int(5000/batch_size),\r\n epochs=500, validation_data=devGene,\r\n validation_steps=int(5000/batch_size), verbose=2,\r\n callbacks=[model_checkpoint, csv_logger, reduce_lr])\r\n\r\n\r\ntrain()\r\n\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.db import models
from django.contrib.auth.models import User
from Event.models import Event
from University.models import University
from django.core.validators import validate_email
class Person(models.Model):
user = models.ForeignKey(User, related_name='person', on_delete=models.
CASCADE, blank=True, null=True)
event = models.ForeignKey(Event, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
email = models.EmailField(unique=True, validators=[validate_email])
university = models.ForeignKey(University, on_delete=models.PROTECT)
rut = models.CharField(max_length=13, unique=True)
phone_number = models.CharField(max_length=20)
emergency_phone_number = models.CharField(max_length=20, null=True)
avatar = models.ImageField(upload_to='person_avatars/', blank=True)
pending_messages = models.IntegerField(default=0)
def __str__(self):
return '{} {}'.format(self.name, self.last_name)
class PersonTemporaryCode(models.Model):
person = models.ForeignKey(Person, on_delete=models.CASCADE)
code = models.IntegerField()
expiration_date = models.DateTimeField()
def __str__(self):
return f'{self.person} - {self.code} -- {self.expiration_date}'
|
normal
|
{
"blob_id": "28f4f14c3c29ee96c370ffe71c268549552b915e",
"index": 2419,
"step-1": "<mask token>\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-2": "<mask token>\n\n\nclass Person(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-3": "<mask token>\n\n\nclass Person(models.Model):\n user = models.ForeignKey(User, related_name='person', on_delete=models.\n CASCADE, blank=True, null=True)\n event = models.ForeignKey(Event, on_delete=models.CASCADE)\n name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n email = models.EmailField(unique=True, validators=[validate_email])\n university = models.ForeignKey(University, on_delete=models.PROTECT)\n rut = models.CharField(max_length=13, unique=True)\n phone_number = models.CharField(max_length=20)\n emergency_phone_number = models.CharField(max_length=20, null=True)\n avatar = models.ImageField(upload_to='person_avatars/', blank=True)\n pending_messages = models.IntegerField(default=0)\n\n def __str__(self):\n return '{} {}'.format(self.name, self.last_name)\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom Event.models import Event\nfrom University.models import University\nfrom django.core.validators import validate_email\n\n\nclass Person(models.Model):\n user = models.ForeignKey(User, related_name='person', on_delete=models.\n CASCADE, blank=True, null=True)\n event = models.ForeignKey(Event, on_delete=models.CASCADE)\n name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n email = models.EmailField(unique=True, validators=[validate_email])\n university = models.ForeignKey(University, on_delete=models.PROTECT)\n rut = models.CharField(max_length=13, unique=True)\n phone_number = models.CharField(max_length=20)\n emergency_phone_number = models.CharField(max_length=20, null=True)\n avatar = models.ImageField(upload_to='person_avatars/', blank=True)\n pending_messages = models.IntegerField(default=0)\n\n def __str__(self):\n return '{} {}'.format(self.name, self.last_name)\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-5": null,
"step-ids": [
3,
4,
6,
7
]
}
|
[
3,
4,
6,
7
] |
"""
Question 39:
Define a function which can generate a list where the values are square of numbers between 1 and
20 (both included). Then the function needs to print the last 5 elements in the list.
"""
#To get a value from console input.
input_num = input("Write number:")
lis1=[]
lis2=[]
def lis(n1,n2):
"""
Generate and print last 5 element in list.
param:n1,n2
"""
i = 0
if n1 and n2 <= 20:
for x in range(n1,n2+1):
lis1.append(x*x)
lis1.reverse()
for y in lis1:
if i <=4:
lis2.append(y)
i +=1
print(lis2)
else:
print("Value out of range")
# Calling function.
lis(input_num[0],input_num[1])
|
normal
|
{
"blob_id": "24c1f5195bad17f995fb97a03218fc9bbe5ce4cd",
"index": 2476,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef lis(n1, n2):\n \"\"\"\n\tGenerate and print last 5 element in list.\n\tparam:n1,n2\n\t\"\"\"\n i = 0\n if n1 and n2 <= 20:\n for x in range(n1, n2 + 1):\n lis1.append(x * x)\n lis1.reverse()\n for y in lis1:\n if i <= 4:\n lis2.append(y)\n i += 1\n print(lis2)\n else:\n print('Value out of range')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef lis(n1, n2):\n \"\"\"\n\tGenerate and print last 5 element in list.\n\tparam:n1,n2\n\t\"\"\"\n i = 0\n if n1 and n2 <= 20:\n for x in range(n1, n2 + 1):\n lis1.append(x * x)\n lis1.reverse()\n for y in lis1:\n if i <= 4:\n lis2.append(y)\n i += 1\n print(lis2)\n else:\n print('Value out of range')\n\n\nlis(input_num[0], input_num[1])\n",
"step-4": "<mask token>\ninput_num = input('Write number:')\nlis1 = []\nlis2 = []\n\n\ndef lis(n1, n2):\n \"\"\"\n\tGenerate and print last 5 element in list.\n\tparam:n1,n2\n\t\"\"\"\n i = 0\n if n1 and n2 <= 20:\n for x in range(n1, n2 + 1):\n lis1.append(x * x)\n lis1.reverse()\n for y in lis1:\n if i <= 4:\n lis2.append(y)\n i += 1\n print(lis2)\n else:\n print('Value out of range')\n\n\nlis(input_num[0], input_num[1])\n",
"step-5": "\"\"\"\nQuestion 39:\nDefine a function which can generate a list where the values are square of numbers between 1 and\n20 (both included). Then the function needs to print the last 5 elements in the list.\n\"\"\"\n\n#To get a value from console input.\ninput_num = input(\"Write number:\")\nlis1=[]\nlis2=[]\n\ndef lis(n1,n2):\n\t\"\"\"\n\tGenerate and print last 5 element in list.\n\tparam:n1,n2\n\t\"\"\"\n\ti = 0\n\tif n1 and n2 <= 20:\n\t\tfor x in range(n1,n2+1):\n\t\t\tlis1.append(x*x)\n\t\tlis1.reverse()\n\t\t\n\t\tfor y in lis1:\n\t\t\tif i <=4:\n\t\t\t\tlis2.append(y)\n\t\t\t\ti +=1\n\t\tprint(lis2)\n\telse:\n\t\tprint(\"Value out of range\")\n\n# Calling function.\nlis(input_num[0],input_num[1])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.conf.urls import url, include
from api.resources import PlayerResource, GameResource
from . import views
player_resource = PlayerResource()
game_resource = GameResource()
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^api/', include(player_resource.urls)),
url(r'^api/', include(game_resource.urls)),
]
|
normal
|
{
"blob_id": "ff959a388438a6d9c6d418e28c676ec3fd196ea0",
"index": 6076,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplayer_resource = PlayerResource()\ngame_resource = GameResource()\nurlpatterns = [url('^$', views.index, name='index'), url('^api/', include(\n player_resource.urls)), url('^api/', include(game_resource.urls))]\n",
"step-3": "from django.conf.urls import url, include\nfrom api.resources import PlayerResource, GameResource\nfrom . import views\nplayer_resource = PlayerResource()\ngame_resource = GameResource()\nurlpatterns = [url('^$', views.index, name='index'), url('^api/', include(\n player_resource.urls)), url('^api/', include(game_resource.urls))]\n",
"step-4": "from django.conf.urls import url, include\nfrom api.resources import PlayerResource, GameResource\nfrom . import views\n\nplayer_resource = PlayerResource()\ngame_resource = GameResource()\n\nurlpatterns = [\n url(r'^$', views.index, name='index'),\n url(r'^api/', include(player_resource.urls)),\n url(r'^api/', include(game_resource.urls)),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from typing import List
import pytest
from raiden import waiting
from raiden.api.python import RaidenAPI
from raiden.raiden_service import RaidenService
from raiden.tests.utils.detect_failure import raise_on_failure
from raiden.tests.utils.network import CHAIN
from raiden.tests.utils.transfer import block_offset_timeout
from raiden.transfer import views
from raiden.utils.typing import BlockTimeout
@raise_on_failure
@pytest.mark.parametrize("channels_per_node", [CHAIN])
@pytest.mark.parametrize("number_of_nodes", [3])
def test_leave_token_network(raiden_network: List[RaidenService], token_addresses):
registry_address = raiden_network[0].default_registry.address
token_address = token_addresses[0]
_, app1, _ = raiden_network
channels = views.list_channelstate_for_tokennetwork(
chain_state=views.state_from_raiden(app1),
token_network_registry_address=registry_address,
token_address=token_address,
)
timeout = block_offset_timeout(
app1, "Channels not settled in time", BlockTimeout(channels[0].settle_timeout * 10)
)
with timeout:
RaidenAPI(app1).token_network_leave(registry_address, token_address)
waiting.wait_for_settle(
raiden=app1,
token_network_registry_address=registry_address,
token_address=token_address,
channel_ids=[channel.identifier for channel in channels],
retry_timeout=0.1,
)
|
normal
|
{
"blob_id": "c4a13069b5add538589886b5e282d4fc9f2b72ad",
"index": 6807,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@raise_on_failure\[email protected]('channels_per_node', [CHAIN])\[email protected]('number_of_nodes', [3])\ndef test_leave_token_network(raiden_network: List[RaidenService],\n token_addresses):\n registry_address = raiden_network[0].default_registry.address\n token_address = token_addresses[0]\n _, app1, _ = raiden_network\n channels = views.list_channelstate_for_tokennetwork(chain_state=views.\n state_from_raiden(app1), token_network_registry_address=\n registry_address, token_address=token_address)\n timeout = block_offset_timeout(app1, 'Channels not settled in time',\n BlockTimeout(channels[0].settle_timeout * 10))\n with timeout:\n RaidenAPI(app1).token_network_leave(registry_address, token_address)\n waiting.wait_for_settle(raiden=app1, token_network_registry_address\n =registry_address, token_address=token_address, channel_ids=[\n channel.identifier for channel in channels], retry_timeout=0.1)\n",
"step-3": "from typing import List\nimport pytest\nfrom raiden import waiting\nfrom raiden.api.python import RaidenAPI\nfrom raiden.raiden_service import RaidenService\nfrom raiden.tests.utils.detect_failure import raise_on_failure\nfrom raiden.tests.utils.network import CHAIN\nfrom raiden.tests.utils.transfer import block_offset_timeout\nfrom raiden.transfer import views\nfrom raiden.utils.typing import BlockTimeout\n\n\n@raise_on_failure\[email protected]('channels_per_node', [CHAIN])\[email protected]('number_of_nodes', [3])\ndef test_leave_token_network(raiden_network: List[RaidenService],\n token_addresses):\n registry_address = raiden_network[0].default_registry.address\n token_address = token_addresses[0]\n _, app1, _ = raiden_network\n channels = views.list_channelstate_for_tokennetwork(chain_state=views.\n state_from_raiden(app1), token_network_registry_address=\n registry_address, token_address=token_address)\n timeout = block_offset_timeout(app1, 'Channels not settled in time',\n BlockTimeout(channels[0].settle_timeout * 10))\n with timeout:\n RaidenAPI(app1).token_network_leave(registry_address, token_address)\n waiting.wait_for_settle(raiden=app1, token_network_registry_address\n =registry_address, token_address=token_address, channel_ids=[\n channel.identifier for channel in channels], retry_timeout=0.1)\n",
"step-4": "from typing import List\n\nimport pytest\n\nfrom raiden import waiting\nfrom raiden.api.python import RaidenAPI\nfrom raiden.raiden_service import RaidenService\nfrom raiden.tests.utils.detect_failure import raise_on_failure\nfrom raiden.tests.utils.network import CHAIN\nfrom raiden.tests.utils.transfer import block_offset_timeout\nfrom raiden.transfer import views\nfrom raiden.utils.typing import BlockTimeout\n\n\n@raise_on_failure\[email protected](\"channels_per_node\", [CHAIN])\[email protected](\"number_of_nodes\", [3])\ndef test_leave_token_network(raiden_network: List[RaidenService], token_addresses):\n registry_address = raiden_network[0].default_registry.address\n token_address = token_addresses[0]\n _, app1, _ = raiden_network\n\n channels = views.list_channelstate_for_tokennetwork(\n chain_state=views.state_from_raiden(app1),\n token_network_registry_address=registry_address,\n token_address=token_address,\n )\n\n timeout = block_offset_timeout(\n app1, \"Channels not settled in time\", BlockTimeout(channels[0].settle_timeout * 10)\n )\n with timeout:\n RaidenAPI(app1).token_network_leave(registry_address, token_address)\n waiting.wait_for_settle(\n raiden=app1,\n token_network_registry_address=registry_address,\n token_address=token_address,\n channel_ids=[channel.identifier for channel in channels],\n retry_timeout=0.1,\n )\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import pymongo
import os,sys
import re
from db_User import *
from db_Event import *
class ClassRoom:
# 链接本地客户端
__myclient = pymongo.MongoClient("mongodb://localhost:27017")
# 创建数据库
__mydb = __myclient["MMKeyDB"]
# 创建新的集合
__mycol = __mydb["ClassRoom"]
# 判断是否输入id或是输入name,如果有输入则转译
def Name2Id(room_id,name):
bool_n = bool(re.match("教\d{1}-\d{3}",name))
bool_id = bool(re.match("B\d{1}R\d{3}",room_id))
if not (bool_id or bool_n):
return False
elif bool_n:
room_id = "B" + name[1] + "R" + name[3:6]
else:
name = "教" + room_id[1] + "-" + room_id[3:6]
return room_id,name
def __init__(self,
room_id = "",
name = "",
seats = 0,
key_id = "",
event = []):
if not(ClassRoom.Name2Id(room_id,name)):
self.WrongFlag = 1
else:
self.id,self.name = ClassRoom.Name2Id(room_id,name)
self.seats = seats
self.key_id = key_id
self.event = event
ClassRoom.PullClassroom(self)
def PullClassroom(self):
result = self.__mycol.find_one({ "_id": self.id })
if result:
self.name = self.name or result['name']
self.seats = self.seats or result['seats']
self.key_id= self.key_id or result['key_id']
self.event = self.event or result['event']
return self
else:
return False
def TurnDict(self):
mydict = {
"_id" : self.id ,
"name" : self.name,
"seats" : self.seats,
"key_id" : self.key_id,
"event" : self.event}
return mydict
def PushClassroom(self):
mydict = self.TurnDict()
if self.__mycol.find_one({ "_id": self.id }):
myquery = {"_id" : self.id}
self.__mycol.update(myquery,mydict)
return "Acc_Updated"
else:
self.__mycol.insert_one(mydict) # 上传新的document
return "Acc_Created"
def AllClassroom(self):
cursor = self.__mycol.find()
# __import__('ipdb').set_trace()
if cursor:
# index = []
# for doc in cursor:
# print(doc)
# temp = [doc['_id'],doc['name'],doc['seats'],doc['event']]
# index.append(temp)
return cursor
else:
return False
# 删除教室记录
def Delete(self):
User.mycol.delete_one({"_id": self.id})
return "Deleted"
if __name__ == '__main__':
index = ClassRoom().AllClassroom()
for i in index:
print(i)
|
normal
|
{
"blob_id": "8dae8a89d08bc522f9a5fdde8aeb9e322fafcbec",
"index": 3251,
"step-1": "<mask token>\n\n\nclass ClassRoom:\n <mask token>\n <mask token>\n <mask token>\n\n def Name2Id(room_id, name):\n bool_n = bool(re.match('教\\\\d{1}-\\\\d{3}', name))\n bool_id = bool(re.match('B\\\\d{1}R\\\\d{3}', room_id))\n if not (bool_id or bool_n):\n return False\n elif bool_n:\n room_id = 'B' + name[1] + 'R' + name[3:6]\n else:\n name = '教' + room_id[1] + '-' + room_id[3:6]\n return room_id, name\n\n def __init__(self, room_id='', name='', seats=0, key_id='', event=[]):\n if not ClassRoom.Name2Id(room_id, name):\n self.WrongFlag = 1\n else:\n self.id, self.name = ClassRoom.Name2Id(room_id, name)\n self.seats = seats\n self.key_id = key_id\n self.event = event\n ClassRoom.PullClassroom(self)\n\n def PullClassroom(self):\n result = self.__mycol.find_one({'_id': self.id})\n if result:\n self.name = self.name or result['name']\n self.seats = self.seats or result['seats']\n self.key_id = self.key_id or result['key_id']\n self.event = self.event or result['event']\n return self\n else:\n return False\n\n def TurnDict(self):\n mydict = {'_id': self.id, 'name': self.name, 'seats': self.seats,\n 'key_id': self.key_id, 'event': self.event}\n return mydict\n\n def PushClassroom(self):\n mydict = self.TurnDict()\n if self.__mycol.find_one({'_id': self.id}):\n myquery = {'_id': self.id}\n self.__mycol.update(myquery, mydict)\n return 'Acc_Updated'\n else:\n self.__mycol.insert_one(mydict)\n return 'Acc_Created'\n\n def AllClassroom(self):\n cursor = self.__mycol.find()\n if cursor:\n return cursor\n else:\n return False\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ClassRoom:\n <mask token>\n <mask token>\n <mask token>\n\n def Name2Id(room_id, name):\n bool_n = bool(re.match('教\\\\d{1}-\\\\d{3}', name))\n bool_id = bool(re.match('B\\\\d{1}R\\\\d{3}', room_id))\n if not (bool_id or bool_n):\n return False\n elif bool_n:\n room_id = 'B' + name[1] + 'R' + name[3:6]\n else:\n name = '教' + room_id[1] + '-' + room_id[3:6]\n return room_id, name\n\n def __init__(self, room_id='', name='', seats=0, key_id='', event=[]):\n if not ClassRoom.Name2Id(room_id, name):\n self.WrongFlag = 1\n else:\n self.id, self.name = ClassRoom.Name2Id(room_id, name)\n self.seats = seats\n self.key_id = key_id\n self.event = event\n ClassRoom.PullClassroom(self)\n\n def PullClassroom(self):\n result = self.__mycol.find_one({'_id': self.id})\n if result:\n self.name = self.name or result['name']\n self.seats = self.seats or result['seats']\n self.key_id = self.key_id or result['key_id']\n self.event = self.event or result['event']\n return self\n else:\n return False\n\n def TurnDict(self):\n mydict = {'_id': self.id, 'name': self.name, 'seats': self.seats,\n 'key_id': self.key_id, 'event': self.event}\n return mydict\n\n def PushClassroom(self):\n mydict = self.TurnDict()\n if self.__mycol.find_one({'_id': self.id}):\n myquery = {'_id': self.id}\n self.__mycol.update(myquery, mydict)\n return 'Acc_Updated'\n else:\n self.__mycol.insert_one(mydict)\n return 'Acc_Created'\n\n def AllClassroom(self):\n cursor = self.__mycol.find()\n if cursor:\n return cursor\n else:\n return False\n\n def Delete(self):\n User.mycol.delete_one({'_id': self.id})\n return 'Deleted'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ClassRoom:\n __myclient = pymongo.MongoClient('mongodb://localhost:27017')\n __mydb = __myclient['MMKeyDB']\n __mycol = __mydb['ClassRoom']\n\n def Name2Id(room_id, name):\n bool_n = bool(re.match('教\\\\d{1}-\\\\d{3}', name))\n bool_id = bool(re.match('B\\\\d{1}R\\\\d{3}', room_id))\n if not (bool_id or bool_n):\n return False\n elif bool_n:\n room_id = 'B' + name[1] + 'R' + name[3:6]\n else:\n name = '教' + room_id[1] + '-' + room_id[3:6]\n return room_id, name\n\n def __init__(self, room_id='', name='', seats=0, key_id='', event=[]):\n if not ClassRoom.Name2Id(room_id, name):\n self.WrongFlag = 1\n else:\n self.id, self.name = ClassRoom.Name2Id(room_id, name)\n self.seats = seats\n self.key_id = key_id\n self.event = event\n ClassRoom.PullClassroom(self)\n\n def PullClassroom(self):\n result = self.__mycol.find_one({'_id': self.id})\n if result:\n self.name = self.name or result['name']\n self.seats = self.seats or result['seats']\n self.key_id = self.key_id or result['key_id']\n self.event = self.event or result['event']\n return self\n else:\n return False\n\n def TurnDict(self):\n mydict = {'_id': self.id, 'name': self.name, 'seats': self.seats,\n 'key_id': self.key_id, 'event': self.event}\n return mydict\n\n def PushClassroom(self):\n mydict = self.TurnDict()\n if self.__mycol.find_one({'_id': self.id}):\n myquery = {'_id': self.id}\n self.__mycol.update(myquery, mydict)\n return 'Acc_Updated'\n else:\n self.__mycol.insert_one(mydict)\n return 'Acc_Created'\n\n def AllClassroom(self):\n cursor = self.__mycol.find()\n if cursor:\n return cursor\n else:\n return False\n\n def Delete(self):\n User.mycol.delete_one({'_id': self.id})\n return 'Deleted'\n\n\nif __name__ == '__main__':\n index = ClassRoom().AllClassroom()\n for i in index:\n print(i)\n",
"step-4": "import pymongo\nimport os, sys\nimport re\nfrom db_User import *\nfrom db_Event import *\n\n\nclass ClassRoom:\n __myclient = pymongo.MongoClient('mongodb://localhost:27017')\n __mydb = __myclient['MMKeyDB']\n __mycol = __mydb['ClassRoom']\n\n def Name2Id(room_id, name):\n bool_n = bool(re.match('教\\\\d{1}-\\\\d{3}', name))\n bool_id = bool(re.match('B\\\\d{1}R\\\\d{3}', room_id))\n if not (bool_id or bool_n):\n return False\n elif bool_n:\n room_id = 'B' + name[1] + 'R' + name[3:6]\n else:\n name = '教' + room_id[1] + '-' + room_id[3:6]\n return room_id, name\n\n def __init__(self, room_id='', name='', seats=0, key_id='', event=[]):\n if not ClassRoom.Name2Id(room_id, name):\n self.WrongFlag = 1\n else:\n self.id, self.name = ClassRoom.Name2Id(room_id, name)\n self.seats = seats\n self.key_id = key_id\n self.event = event\n ClassRoom.PullClassroom(self)\n\n def PullClassroom(self):\n result = self.__mycol.find_one({'_id': self.id})\n if result:\n self.name = self.name or result['name']\n self.seats = self.seats or result['seats']\n self.key_id = self.key_id or result['key_id']\n self.event = self.event or result['event']\n return self\n else:\n return False\n\n def TurnDict(self):\n mydict = {'_id': self.id, 'name': self.name, 'seats': self.seats,\n 'key_id': self.key_id, 'event': self.event}\n return mydict\n\n def PushClassroom(self):\n mydict = self.TurnDict()\n if self.__mycol.find_one({'_id': self.id}):\n myquery = {'_id': self.id}\n self.__mycol.update(myquery, mydict)\n return 'Acc_Updated'\n else:\n self.__mycol.insert_one(mydict)\n return 'Acc_Created'\n\n def AllClassroom(self):\n cursor = self.__mycol.find()\n if cursor:\n return cursor\n else:\n return False\n\n def Delete(self):\n User.mycol.delete_one({'_id': self.id})\n return 'Deleted'\n\n\nif __name__ == '__main__':\n index = ClassRoom().AllClassroom()\n for i in index:\n print(i)\n",
"step-5": "import pymongo\nimport os,sys\nimport re\n\n\nfrom db_User import *\nfrom db_Event import *\n\n\nclass ClassRoom:\n # 链接本地客户端\n __myclient = pymongo.MongoClient(\"mongodb://localhost:27017\")\n # 创建数据库\n __mydb = __myclient[\"MMKeyDB\"]\n # 创建新的集合\n __mycol = __mydb[\"ClassRoom\"]\n\n # 判断是否输入id或是输入name,如果有输入则转译\n def Name2Id(room_id,name):\n bool_n = bool(re.match(\"教\\d{1}-\\d{3}\",name))\n bool_id = bool(re.match(\"B\\d{1}R\\d{3}\",room_id))\n if not (bool_id or bool_n):\n return False\n elif bool_n:\n room_id = \"B\" + name[1] + \"R\" + name[3:6]\n else:\n name = \"教\" + room_id[1] + \"-\" + room_id[3:6]\n\n return room_id,name\n\n def __init__(self,\n room_id = \"\",\n name = \"\",\n seats = 0,\n key_id = \"\",\n event = []):\n\n if not(ClassRoom.Name2Id(room_id,name)):\n self.WrongFlag = 1\n else:\n self.id,self.name = ClassRoom.Name2Id(room_id,name)\n self.seats = seats\n self.key_id = key_id\n self.event = event\n ClassRoom.PullClassroom(self)\n\n def PullClassroom(self):\n result = self.__mycol.find_one({ \"_id\": self.id })\n if result:\n self.name = self.name or result['name']\n self.seats = self.seats or result['seats']\n self.key_id= self.key_id or result['key_id']\n self.event = self.event or result['event']\n return self\n else:\n return False\n\n def TurnDict(self):\n mydict = {\n \"_id\" : self.id ,\n \"name\" : self.name,\n \"seats\" : self.seats,\n \"key_id\" : self.key_id,\n \"event\" : self.event}\n return mydict\n\n def PushClassroom(self):\n mydict = self.TurnDict()\n if self.__mycol.find_one({ \"_id\": self.id }):\n myquery = {\"_id\" : self.id}\n self.__mycol.update(myquery,mydict)\n return \"Acc_Updated\"\n else:\n self.__mycol.insert_one(mydict) # 上传新的document\n return \"Acc_Created\"\n \n def AllClassroom(self):\n cursor = self.__mycol.find()\n # __import__('ipdb').set_trace()\n if cursor:\n # index = []\n # for doc in cursor:\n # print(doc)\n # temp = [doc['_id'],doc['name'],doc['seats'],doc['event']]\n # index.append(temp)\n return cursor\n else:\n return False\n\n # 删除教室记录\n def Delete(self):\n User.mycol.delete_one({\"_id\": self.id})\n return \"Deleted\"\n\nif __name__ == '__main__':\n index = ClassRoom().AllClassroom()\n for i in index:\n print(i)\n",
"step-ids": [
7,
8,
10,
11,
12
]
}
|
[
7,
8,
10,
11,
12
] |
"""
The MIT License (MIT)
Copyright (c) 2021-present Pycord Development
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import Literal, TypedDict
from .member import Member
from .snowflake import Snowflake
from .user import User
ScheduledEventStatus = Literal[1, 2, 3, 4]
ScheduledEventLocationType = Literal[1, 2, 3]
ScheduledEventPrivacyLevel = Literal[2]
class ScheduledEvent(TypedDict):
id: Snowflake
guild_id: Snowflake
channel_id: Snowflake
creator_id: Snowflake
name: str
description: str
image: str | None
scheduled_start_time: str
scheduled_end_time: str | None
privacy_level: ScheduledEventPrivacyLevel
status: ScheduledEventStatus
entity_type: ScheduledEventLocationType
entity_id: Snowflake
entity_metadata: ScheduledEventEntityMetadata
creator: User
user_count: int | None
class ScheduledEventEntityMetadata(TypedDict):
location: str
class ScheduledEventSubscriber(TypedDict):
guild_scheduled_event_id: Snowflake
user: User
member: Member | None
|
normal
|
{
"blob_id": "a73dcfc21c31d4e984db39c072d11cb9a9c3d5e5",
"index": 2470,
"step-1": "<mask token>\n\n\nclass ScheduledEventSubscriber(TypedDict):\n guild_scheduled_event_id: Snowflake\n user: User\n member: Member | None\n",
"step-2": "<mask token>\n\n\nclass ScheduledEvent(TypedDict):\n id: Snowflake\n guild_id: Snowflake\n channel_id: Snowflake\n creator_id: Snowflake\n name: str\n description: str\n image: str | None\n scheduled_start_time: str\n scheduled_end_time: str | None\n privacy_level: ScheduledEventPrivacyLevel\n status: ScheduledEventStatus\n entity_type: ScheduledEventLocationType\n entity_id: Snowflake\n entity_metadata: ScheduledEventEntityMetadata\n creator: User\n user_count: int | None\n\n\nclass ScheduledEventEntityMetadata(TypedDict):\n location: str\n\n\nclass ScheduledEventSubscriber(TypedDict):\n guild_scheduled_event_id: Snowflake\n user: User\n member: Member | None\n",
"step-3": "<mask token>\nScheduledEventStatus = Literal[1, 2, 3, 4]\nScheduledEventLocationType = Literal[1, 2, 3]\nScheduledEventPrivacyLevel = Literal[2]\n\n\nclass ScheduledEvent(TypedDict):\n id: Snowflake\n guild_id: Snowflake\n channel_id: Snowflake\n creator_id: Snowflake\n name: str\n description: str\n image: str | None\n scheduled_start_time: str\n scheduled_end_time: str | None\n privacy_level: ScheduledEventPrivacyLevel\n status: ScheduledEventStatus\n entity_type: ScheduledEventLocationType\n entity_id: Snowflake\n entity_metadata: ScheduledEventEntityMetadata\n creator: User\n user_count: int | None\n\n\nclass ScheduledEventEntityMetadata(TypedDict):\n location: str\n\n\nclass ScheduledEventSubscriber(TypedDict):\n guild_scheduled_event_id: Snowflake\n user: User\n member: Member | None\n",
"step-4": "<mask token>\nfrom __future__ import annotations\nfrom typing import Literal, TypedDict\nfrom .member import Member\nfrom .snowflake import Snowflake\nfrom .user import User\nScheduledEventStatus = Literal[1, 2, 3, 4]\nScheduledEventLocationType = Literal[1, 2, 3]\nScheduledEventPrivacyLevel = Literal[2]\n\n\nclass ScheduledEvent(TypedDict):\n id: Snowflake\n guild_id: Snowflake\n channel_id: Snowflake\n creator_id: Snowflake\n name: str\n description: str\n image: str | None\n scheduled_start_time: str\n scheduled_end_time: str | None\n privacy_level: ScheduledEventPrivacyLevel\n status: ScheduledEventStatus\n entity_type: ScheduledEventLocationType\n entity_id: Snowflake\n entity_metadata: ScheduledEventEntityMetadata\n creator: User\n user_count: int | None\n\n\nclass ScheduledEventEntityMetadata(TypedDict):\n location: str\n\n\nclass ScheduledEventSubscriber(TypedDict):\n guild_scheduled_event_id: Snowflake\n user: User\n member: Member | None\n",
"step-5": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2021-present Pycord Development\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import Literal, TypedDict\n\nfrom .member import Member\nfrom .snowflake import Snowflake\nfrom .user import User\n\nScheduledEventStatus = Literal[1, 2, 3, 4]\nScheduledEventLocationType = Literal[1, 2, 3]\nScheduledEventPrivacyLevel = Literal[2]\n\n\nclass ScheduledEvent(TypedDict):\n id: Snowflake\n guild_id: Snowflake\n channel_id: Snowflake\n creator_id: Snowflake\n name: str\n description: str\n image: str | None\n scheduled_start_time: str\n scheduled_end_time: str | None\n privacy_level: ScheduledEventPrivacyLevel\n status: ScheduledEventStatus\n entity_type: ScheduledEventLocationType\n entity_id: Snowflake\n entity_metadata: ScheduledEventEntityMetadata\n creator: User\n user_count: int | None\n\n\nclass ScheduledEventEntityMetadata(TypedDict):\n location: str\n\n\nclass ScheduledEventSubscriber(TypedDict):\n guild_scheduled_event_id: Snowflake\n user: User\n member: Member | None\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
import requests
# url="http://www.google.com"
# response=requests.get(url)
# print(response.status_code)
url = "http://icanhazdadjoke.com/"
response = requests.get(url, headers={"Accept": "application/json"})
data = response.text
print(type(data))
data = response.json()
print(data)
|
normal
|
{
"blob_id": "f94894e5d3e6a0ff367911c72f4d863ac32c8baa",
"index": 1435,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(type(data))\n<mask token>\nprint(data)\n",
"step-3": "<mask token>\nurl = 'http://icanhazdadjoke.com/'\nresponse = requests.get(url, headers={'Accept': 'application/json'})\ndata = response.text\nprint(type(data))\ndata = response.json()\nprint(data)\n",
"step-4": "import requests\nurl = 'http://icanhazdadjoke.com/'\nresponse = requests.get(url, headers={'Accept': 'application/json'})\ndata = response.text\nprint(type(data))\ndata = response.json()\nprint(data)\n",
"step-5": "import requests\n\n# url=\"http://www.google.com\"\n# response=requests.get(url)\n# print(response.status_code)\n\n\nurl = \"http://icanhazdadjoke.com/\"\nresponse = requests.get(url, headers={\"Accept\": \"application/json\"})\ndata = response.text\nprint(type(data))\ndata = response.json()\nprint(data)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#coding: utf-8
from flask import Flask, redirect, url_for, request
from werkzeug.utils import secure_filename
import torch, torchvision
# Setup detectron2 logger
import detectron2
from detectron2.utils.logger import setup_logger
setup_logger()
# import some common libraries
import numpy as np
import os, json, cv2, random
# import some common detectron2 utilities
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from detectron2.config import get_cfg
from detectron2.utils.visualizer import Visualizer
from detectron2.data import MetadataCatalog, DatasetCatalog
from detectron2.structures import Instances
import os
import sys
app = Flask(__name__)
def init_setup():
cfg = get_cfg()
# add project-specific config (e.g., TensorMask) here if you're not running a model in detectron2's core library
cfg.merge_from_file(model_zoo.get_config_file("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml"))
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5 # set threshold for this model
# Find a model from detectron2's model zoo. You can use the https://dl.fbaipublicfiles... url as well
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml")
cfg.MODEL.DEVICE='cpu'
return cfg
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
def detect_object(filename):
PATH_TO_TEST_IMAGES_DIR = app.config['UPLOAD_FOLDER']
TEST_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, filename)
PRED_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, 'pred_' + filename)
im = cv2.imread(TEST_IMAGE_PATH)
cfg = app.config['detectron2_cfg']
predictor = DefaultPredictor(cfg)
outputs = predictor(im)
# filterout bana and orage
data_set = MetadataCatalog.get(cfg.DATASETS.TRAIN[0])
# print(data_set.thing_classes)
pred_inst = outputs["instances"].to("cpu")
show_inst = []
pred_res = []
for tc in app.config['THING_CLASSES']:
if tc not in data_set.thing_classes:
print("Thing Class:"+ tc +", Not found in the training set")
continue
t_idx = data_set.thing_classes.index(tc)
filt_inst = pred_inst[pred_inst.pred_classes == t_idx]
cat_cnt = len(filt_inst)
if cat_cnt > 0:
show_inst.append(filt_inst)
pred_res.append({"t_class": tc, "t_count":cat_cnt})
if len(show_inst) > 0:
pred_inst = Instances.cat(show_inst)
# Comment this out later
# v = Visualizer(im[:, :, ::-1],data_set , scale=0.3)
# out = v.draw_instance_predictions(pred_inst)
# cv2.imwrite(PRED_IMAGE_PATH, out.get_image()[:, :, ::-1])
response = app.response_class(
response=json.dumps({'result': pred_res}),
status=200,
mimetype='application/json'
)
return response
@app.route("/infer", methods=['POST'])
def infer():
file = request.files['fimg']
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return detect_object(filename=filename)
if __name__ == '__main__':
app.config['UPLOAD_FOLDER'] = '/app/imgstore/'
app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg'])
app.config['detectron2_cfg'] = init_setup()
app.config['THING_CLASSES'] = ['banana', 'orange', 'carrot', 'apple', 'bottle']
app.run(debug=False,host='0.0.0.0')
|
normal
|
{
"blob_id": "a18e98db417fe234e3d8d5d1321203fbac18751c",
"index": 8174,
"step-1": "<mask token>\n\n\ndef init_setup():\n cfg = get_cfg()\n cfg.merge_from_file(model_zoo.get_config_file(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml'))\n cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5\n cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml')\n cfg.MODEL.DEVICE = 'cpu'\n return cfg\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in app.config[\n 'ALLOWED_EXTENSIONS']\n\n\ndef detect_object(filename):\n PATH_TO_TEST_IMAGES_DIR = app.config['UPLOAD_FOLDER']\n TEST_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, filename)\n PRED_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, 'pred_' + filename)\n im = cv2.imread(TEST_IMAGE_PATH)\n cfg = app.config['detectron2_cfg']\n predictor = DefaultPredictor(cfg)\n outputs = predictor(im)\n data_set = MetadataCatalog.get(cfg.DATASETS.TRAIN[0])\n pred_inst = outputs['instances'].to('cpu')\n show_inst = []\n pred_res = []\n for tc in app.config['THING_CLASSES']:\n if tc not in data_set.thing_classes:\n print('Thing Class:' + tc + ', Not found in the training set')\n continue\n t_idx = data_set.thing_classes.index(tc)\n filt_inst = pred_inst[pred_inst.pred_classes == t_idx]\n cat_cnt = len(filt_inst)\n if cat_cnt > 0:\n show_inst.append(filt_inst)\n pred_res.append({'t_class': tc, 't_count': cat_cnt})\n if len(show_inst) > 0:\n pred_inst = Instances.cat(show_inst)\n response = app.response_class(response=json.dumps({'result': pred_res}),\n status=200, mimetype='application/json')\n return response\n\n\[email protected]('/infer', methods=['POST'])\ndef infer():\n file = request.files['fimg']\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n return detect_object(filename=filename)\n\n\n<mask token>\n",
"step-2": "<mask token>\nsetup_logger()\n<mask token>\n\n\ndef init_setup():\n cfg = get_cfg()\n cfg.merge_from_file(model_zoo.get_config_file(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml'))\n cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5\n cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml')\n cfg.MODEL.DEVICE = 'cpu'\n return cfg\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in app.config[\n 'ALLOWED_EXTENSIONS']\n\n\ndef detect_object(filename):\n PATH_TO_TEST_IMAGES_DIR = app.config['UPLOAD_FOLDER']\n TEST_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, filename)\n PRED_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, 'pred_' + filename)\n im = cv2.imread(TEST_IMAGE_PATH)\n cfg = app.config['detectron2_cfg']\n predictor = DefaultPredictor(cfg)\n outputs = predictor(im)\n data_set = MetadataCatalog.get(cfg.DATASETS.TRAIN[0])\n pred_inst = outputs['instances'].to('cpu')\n show_inst = []\n pred_res = []\n for tc in app.config['THING_CLASSES']:\n if tc not in data_set.thing_classes:\n print('Thing Class:' + tc + ', Not found in the training set')\n continue\n t_idx = data_set.thing_classes.index(tc)\n filt_inst = pred_inst[pred_inst.pred_classes == t_idx]\n cat_cnt = len(filt_inst)\n if cat_cnt > 0:\n show_inst.append(filt_inst)\n pred_res.append({'t_class': tc, 't_count': cat_cnt})\n if len(show_inst) > 0:\n pred_inst = Instances.cat(show_inst)\n response = app.response_class(response=json.dumps({'result': pred_res}),\n status=200, mimetype='application/json')\n return response\n\n\[email protected]('/infer', methods=['POST'])\ndef infer():\n file = request.files['fimg']\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n return detect_object(filename=filename)\n\n\nif __name__ == '__main__':\n app.config['UPLOAD_FOLDER'] = '/app/imgstore/'\n app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg'])\n app.config['detectron2_cfg'] = init_setup()\n app.config['THING_CLASSES'] = ['banana', 'orange', 'carrot', 'apple',\n 'bottle']\n app.run(debug=False, host='0.0.0.0')\n",
"step-3": "<mask token>\nsetup_logger()\n<mask token>\napp = Flask(__name__)\n\n\ndef init_setup():\n cfg = get_cfg()\n cfg.merge_from_file(model_zoo.get_config_file(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml'))\n cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5\n cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml')\n cfg.MODEL.DEVICE = 'cpu'\n return cfg\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in app.config[\n 'ALLOWED_EXTENSIONS']\n\n\ndef detect_object(filename):\n PATH_TO_TEST_IMAGES_DIR = app.config['UPLOAD_FOLDER']\n TEST_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, filename)\n PRED_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, 'pred_' + filename)\n im = cv2.imread(TEST_IMAGE_PATH)\n cfg = app.config['detectron2_cfg']\n predictor = DefaultPredictor(cfg)\n outputs = predictor(im)\n data_set = MetadataCatalog.get(cfg.DATASETS.TRAIN[0])\n pred_inst = outputs['instances'].to('cpu')\n show_inst = []\n pred_res = []\n for tc in app.config['THING_CLASSES']:\n if tc not in data_set.thing_classes:\n print('Thing Class:' + tc + ', Not found in the training set')\n continue\n t_idx = data_set.thing_classes.index(tc)\n filt_inst = pred_inst[pred_inst.pred_classes == t_idx]\n cat_cnt = len(filt_inst)\n if cat_cnt > 0:\n show_inst.append(filt_inst)\n pred_res.append({'t_class': tc, 't_count': cat_cnt})\n if len(show_inst) > 0:\n pred_inst = Instances.cat(show_inst)\n response = app.response_class(response=json.dumps({'result': pred_res}),\n status=200, mimetype='application/json')\n return response\n\n\[email protected]('/infer', methods=['POST'])\ndef infer():\n file = request.files['fimg']\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n return detect_object(filename=filename)\n\n\nif __name__ == '__main__':\n app.config['UPLOAD_FOLDER'] = '/app/imgstore/'\n app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg'])\n app.config['detectron2_cfg'] = init_setup()\n app.config['THING_CLASSES'] = ['banana', 'orange', 'carrot', 'apple',\n 'bottle']\n app.run(debug=False, host='0.0.0.0')\n",
"step-4": "from flask import Flask, redirect, url_for, request\nfrom werkzeug.utils import secure_filename\nimport torch, torchvision\nimport detectron2\nfrom detectron2.utils.logger import setup_logger\nsetup_logger()\nimport numpy as np\nimport os, json, cv2, random\nfrom detectron2 import model_zoo\nfrom detectron2.engine import DefaultPredictor\nfrom detectron2.config import get_cfg\nfrom detectron2.utils.visualizer import Visualizer\nfrom detectron2.data import MetadataCatalog, DatasetCatalog\nfrom detectron2.structures import Instances\nimport os\nimport sys\napp = Flask(__name__)\n\n\ndef init_setup():\n cfg = get_cfg()\n cfg.merge_from_file(model_zoo.get_config_file(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml'))\n cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5\n cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(\n 'COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml')\n cfg.MODEL.DEVICE = 'cpu'\n return cfg\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in app.config[\n 'ALLOWED_EXTENSIONS']\n\n\ndef detect_object(filename):\n PATH_TO_TEST_IMAGES_DIR = app.config['UPLOAD_FOLDER']\n TEST_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, filename)\n PRED_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, 'pred_' + filename)\n im = cv2.imread(TEST_IMAGE_PATH)\n cfg = app.config['detectron2_cfg']\n predictor = DefaultPredictor(cfg)\n outputs = predictor(im)\n data_set = MetadataCatalog.get(cfg.DATASETS.TRAIN[0])\n pred_inst = outputs['instances'].to('cpu')\n show_inst = []\n pred_res = []\n for tc in app.config['THING_CLASSES']:\n if tc not in data_set.thing_classes:\n print('Thing Class:' + tc + ', Not found in the training set')\n continue\n t_idx = data_set.thing_classes.index(tc)\n filt_inst = pred_inst[pred_inst.pred_classes == t_idx]\n cat_cnt = len(filt_inst)\n if cat_cnt > 0:\n show_inst.append(filt_inst)\n pred_res.append({'t_class': tc, 't_count': cat_cnt})\n if len(show_inst) > 0:\n pred_inst = Instances.cat(show_inst)\n response = app.response_class(response=json.dumps({'result': pred_res}),\n status=200, mimetype='application/json')\n return response\n\n\[email protected]('/infer', methods=['POST'])\ndef infer():\n file = request.files['fimg']\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n return detect_object(filename=filename)\n\n\nif __name__ == '__main__':\n app.config['UPLOAD_FOLDER'] = '/app/imgstore/'\n app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg'])\n app.config['detectron2_cfg'] = init_setup()\n app.config['THING_CLASSES'] = ['banana', 'orange', 'carrot', 'apple',\n 'bottle']\n app.run(debug=False, host='0.0.0.0')\n",
"step-5": "#coding: utf-8\n\nfrom flask import Flask, redirect, url_for, request\nfrom werkzeug.utils import secure_filename\n\nimport torch, torchvision\n\n# Setup detectron2 logger\nimport detectron2\nfrom detectron2.utils.logger import setup_logger\nsetup_logger()\n\n# import some common libraries\nimport numpy as np\nimport os, json, cv2, random\n\n# import some common detectron2 utilities\nfrom detectron2 import model_zoo\nfrom detectron2.engine import DefaultPredictor\nfrom detectron2.config import get_cfg\nfrom detectron2.utils.visualizer import Visualizer\nfrom detectron2.data import MetadataCatalog, DatasetCatalog\nfrom detectron2.structures import Instances\n\nimport os\nimport sys\n\napp = Flask(__name__)\n\ndef init_setup():\n cfg = get_cfg()\n # add project-specific config (e.g., TensorMask) here if you're not running a model in detectron2's core library\n cfg.merge_from_file(model_zoo.get_config_file(\"COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml\"))\n cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5 # set threshold for this model\n # Find a model from detectron2's model zoo. You can use the https://dl.fbaipublicfiles... url as well\n cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(\"COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml\")\n cfg.MODEL.DEVICE='cpu'\n return cfg\n\ndef allowed_file(filename):\n return '.' in filename and \\\n filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']\n\ndef detect_object(filename):\n PATH_TO_TEST_IMAGES_DIR = app.config['UPLOAD_FOLDER']\n TEST_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, filename)\n PRED_IMAGE_PATH = os.path.join(PATH_TO_TEST_IMAGES_DIR, 'pred_' + filename)\n \n im = cv2.imread(TEST_IMAGE_PATH)\n cfg = app.config['detectron2_cfg']\n\n predictor = DefaultPredictor(cfg)\n outputs = predictor(im)\n\n # filterout bana and orage\n data_set = MetadataCatalog.get(cfg.DATASETS.TRAIN[0])\n\n # print(data_set.thing_classes)\n pred_inst = outputs[\"instances\"].to(\"cpu\")\n\n show_inst = []\n pred_res = []\n for tc in app.config['THING_CLASSES']:\n if tc not in data_set.thing_classes:\n print(\"Thing Class:\"+ tc +\", Not found in the training set\")\n continue\n t_idx = data_set.thing_classes.index(tc)\n filt_inst = pred_inst[pred_inst.pred_classes == t_idx]\n cat_cnt = len(filt_inst)\n if cat_cnt > 0:\n show_inst.append(filt_inst)\n pred_res.append({\"t_class\": tc, \"t_count\":cat_cnt})\n\n if len(show_inst) > 0:\n pred_inst = Instances.cat(show_inst)\n\n # Comment this out later\n # v = Visualizer(im[:, :, ::-1],data_set , scale=0.3)\n # out = v.draw_instance_predictions(pred_inst)\n # cv2.imwrite(PRED_IMAGE_PATH, out.get_image()[:, :, ::-1])\n \n response = app.response_class(\n response=json.dumps({'result': pred_res}),\n status=200,\n mimetype='application/json'\n )\n\n return response\n\n\[email protected](\"/infer\", methods=['POST'])\ndef infer():\n file = request.files['fimg']\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n return detect_object(filename=filename)\n\nif __name__ == '__main__':\n app.config['UPLOAD_FOLDER'] = '/app/imgstore/'\n app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg'])\n app.config['detectron2_cfg'] = init_setup()\n app.config['THING_CLASSES'] = ['banana', 'orange', 'carrot', 'apple', 'bottle']\n app.run(debug=False,host='0.0.0.0')\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
class TrieNode:
def __init__(self):
self.children: Dict[str, TrieNode] = collections.defaultdict(TrieNode)
self.word: Optional[str] = None
class Solution:
def findWords(self, board: List[List[str]], words: List[str]) ->List[str]:
m = len(board)
n = len(board[0])
ans = []
root = TrieNode()
def insert(word: str) ->None:
node = root
for c in word:
if c not in node.children:
node.children[c] = TrieNode()
node = node.children[c]
node.word = word
for word in words:
insert(word)
def dfs(i: int, j: int, node: TrieNode) ->None:
if i < 0 or i == m or j < 0 or j == n:
return
if board[i][j] == '*':
return
c = board[i][j]
if c not in node.children:
return
child = node.children[c]
if child.word:
ans.append(child.word)
child.word = None
board[i][j] = '*'
dfs(i + 1, j, child)
dfs(i - 1, j, child)
dfs(i, j + 1, child)
dfs(i, j - 1, child)
board[i][j] = c
for i in range(m):
for j in range(n):
dfs(i, j, root)
return ans
|
normal
|
{
"blob_id": "f996dffcb9650663278ec1e31d9f88d50142f4ea",
"index": 4491,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def findWords(self, board: List[List[str]], words: List[str]) ->List[str]:\n m = len(board)\n n = len(board[0])\n ans = []\n root = TrieNode()\n\n def insert(word: str) ->None:\n node = root\n for c in word:\n if c not in node.children:\n node.children[c] = TrieNode()\n node = node.children[c]\n node.word = word\n for word in words:\n insert(word)\n\n def dfs(i: int, j: int, node: TrieNode) ->None:\n if i < 0 or i == m or j < 0 or j == n:\n return\n if board[i][j] == '*':\n return\n c = board[i][j]\n if c not in node.children:\n return\n child = node.children[c]\n if child.word:\n ans.append(child.word)\n child.word = None\n board[i][j] = '*'\n dfs(i + 1, j, child)\n dfs(i - 1, j, child)\n dfs(i, j + 1, child)\n dfs(i, j - 1, child)\n board[i][j] = c\n for i in range(m):\n for j in range(n):\n dfs(i, j, root)\n return ans\n",
"step-3": "class TrieNode:\n <mask token>\n\n\nclass Solution:\n\n def findWords(self, board: List[List[str]], words: List[str]) ->List[str]:\n m = len(board)\n n = len(board[0])\n ans = []\n root = TrieNode()\n\n def insert(word: str) ->None:\n node = root\n for c in word:\n if c not in node.children:\n node.children[c] = TrieNode()\n node = node.children[c]\n node.word = word\n for word in words:\n insert(word)\n\n def dfs(i: int, j: int, node: TrieNode) ->None:\n if i < 0 or i == m or j < 0 or j == n:\n return\n if board[i][j] == '*':\n return\n c = board[i][j]\n if c not in node.children:\n return\n child = node.children[c]\n if child.word:\n ans.append(child.word)\n child.word = None\n board[i][j] = '*'\n dfs(i + 1, j, child)\n dfs(i - 1, j, child)\n dfs(i, j + 1, child)\n dfs(i, j - 1, child)\n board[i][j] = c\n for i in range(m):\n for j in range(n):\n dfs(i, j, root)\n return ans\n",
"step-4": "class TrieNode:\n\n def __init__(self):\n self.children: Dict[str, TrieNode] = collections.defaultdict(TrieNode)\n self.word: Optional[str] = None\n\n\nclass Solution:\n\n def findWords(self, board: List[List[str]], words: List[str]) ->List[str]:\n m = len(board)\n n = len(board[0])\n ans = []\n root = TrieNode()\n\n def insert(word: str) ->None:\n node = root\n for c in word:\n if c not in node.children:\n node.children[c] = TrieNode()\n node = node.children[c]\n node.word = word\n for word in words:\n insert(word)\n\n def dfs(i: int, j: int, node: TrieNode) ->None:\n if i < 0 or i == m or j < 0 or j == n:\n return\n if board[i][j] == '*':\n return\n c = board[i][j]\n if c not in node.children:\n return\n child = node.children[c]\n if child.word:\n ans.append(child.word)\n child.word = None\n board[i][j] = '*'\n dfs(i + 1, j, child)\n dfs(i - 1, j, child)\n dfs(i, j + 1, child)\n dfs(i, j - 1, child)\n board[i][j] = c\n for i in range(m):\n for j in range(n):\n dfs(i, j, root)\n return ans\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
#!/usr/bin/env python3
# coding: utf-8
"""
Blaise de Vigenère (1523–1596) mathematician, developed encryption scheme,
VigenereCipher algorithm is implemented based on his work, with a utility
of relative strength index for encryption and decryption.
VERSION : 1.0
LICENSE : GNU GPLv3
STYLE : PEP 8
AUTHOR : AKULA.S.S.S.R.Krishna
Date : 05/11/2020
PURPOSE : To encrypt and decrypt text based files
INPUT : python3 VingenerCipher -i sample_file.txt -e "sample password"
OUTPUT : sample_file.txt will be replaced with encrypted data.
"""
import os
import argparse
class VigenereCipher(object):
def __init__(self, key):
print('Vigenere Cipher Encription')
self.key = key
def encode(self, text): # Based on password every character
key = self.key # will be encrypted with different bias
ans = ''
for index, i in enumerate(text):
if(ord('!') <= ord(i) <= ord('~')):
index %= len(key)
if(ord(i) + ord(key[index]) - ord('!') > ord('~')):
ans += (chr(ord('!') + (ord(i) + ord(key[index])
- ord('!')) % ord('~') - 1))
else:
ans += (chr(ord(i) + ord(key[index]) - ord('!')))
else:
ans += i
return ans
def decode(self, text): # Based on password every character
key = self.key # will be decrypted with different bias
ans = ''
for index, i in enumerate(text):
if(ord('!') <= ord(i) <= ord('~')):
index %= len(key)
if((ord('!') + ord(i) - ord(key[index])) < ord('!')):
ans += (chr(ord('~') + (ord(i) - ord(key[index])) + 1))
else:
ans += (chr(ord('!') + ord(i) - ord(key[index])))
else:
ans += i
return ans
def read_from_file(file_name):
f = open(file_name, 'r')
data = f.read()
f.close()
return data
def write_to_file(file_name, data):
f = open(file_name, 'w')
data = f.write(data)
f.close()
def encode_from_file(file_name, obj):
data = read_from_file(file_name)
for _ in range(args.strength):
data = obj.encode(data)
write_to_file(file_name, data) # Replaces file with encrypted data
print('encode file -> ' + file_name)
def decode_from_file(file_name, obj):
data = read_from_file(file_name)
for _ in range(args.strength):
data = obj.decode(data)
write_to_file(file_name, data) # Replaces file with decrypted data
print('decode file -> ' + file_name)
def encription_form_path(PATH, obj): # Recursive function (MT-safe)
try:
for path in os.listdir(PATH):
encription_form_path(PATH + '/' + path, obj)
except(OSError):
if(args.encode):
encode_from_file(PATH, obj)
elif(args.decode):
decode_from_file(PATH, obj)
"""
input can be either -i file / -f folder,
encode -e, decode -d for encryption and decryption respectively,
strength -s indicates number of times to be encrypted / decrypted.
"""
parser = argparse.ArgumentParser('Description of your program')
parser.add_argument('-i', '--input_file',
help='input file name', required=False)
parser.add_argument('-e', '--encode',
help='encode password', required=False)
parser.add_argument('-d', '--decode',
help='decode password', required=False)
parser.add_argument('-f', '--folder',
help='folder name', required=False)
parser.add_argument('-s', '--strength',
help='encription strength', type=int,
default=1, required=False)
args = (parser.parse_args())
if(args.input_file):
PATH = args.input_file
elif(args.folder):
PATH = args.folder
else:
exit('Need --input_file or --folder\nUse -h for help')
if(args.encode):
pswd = args.encode
elif(args.decode):
pswd = args.decode
else:
exit('Need --encode or --decode\nUse -h for help')
obj = VigenereCipher(pswd)
encription_form_path(PATH, obj)
|
normal
|
{
"blob_id": "38906a31ab96e05a9e55a51260632538872ed463",
"index": 6889,
"step-1": "<mask token>\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\n<mask token>\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\ndef read_from_file(file_name):\n f = open(file_name, 'r')\n data = f.read()\n f.close()\n return data\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\ndef read_from_file(file_name):\n f = open(file_name, 'r')\n data = f.read()\n f.close()\n return data\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\nparser = argparse.ArgumentParser('Description of your program')\nparser.add_argument('-i', '--input_file', help='input file name', required=\n False)\nparser.add_argument('-e', '--encode', help='encode password', required=False)\nparser.add_argument('-d', '--decode', help='decode password', required=False)\nparser.add_argument('-f', '--folder', help='folder name', required=False)\nparser.add_argument('-s', '--strength', help='encription strength', type=\n int, default=1, required=False)\nargs = parser.parse_args()\nif args.input_file:\n PATH = args.input_file\nelif args.folder:\n PATH = args.folder\nelse:\n exit('Need --input_file or --folder\\nUse -h for help')\nif args.encode:\n pswd = args.encode\nelif args.decode:\n pswd = args.decode\nelse:\n exit('Need --encode or --decode\\nUse -h for help')\nobj = VigenereCipher(pswd)\nencription_form_path(PATH, obj)\n",
"step-4": "<mask token>\nimport os\nimport argparse\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\ndef read_from_file(file_name):\n f = open(file_name, 'r')\n data = f.read()\n f.close()\n return data\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\nparser = argparse.ArgumentParser('Description of your program')\nparser.add_argument('-i', '--input_file', help='input file name', required=\n False)\nparser.add_argument('-e', '--encode', help='encode password', required=False)\nparser.add_argument('-d', '--decode', help='decode password', required=False)\nparser.add_argument('-f', '--folder', help='folder name', required=False)\nparser.add_argument('-s', '--strength', help='encription strength', type=\n int, default=1, required=False)\nargs = parser.parse_args()\nif args.input_file:\n PATH = args.input_file\nelif args.folder:\n PATH = args.folder\nelse:\n exit('Need --input_file or --folder\\nUse -h for help')\nif args.encode:\n pswd = args.encode\nelif args.decode:\n pswd = args.decode\nelse:\n exit('Need --encode or --decode\\nUse -h for help')\nobj = VigenereCipher(pswd)\nencription_form_path(PATH, obj)\n",
"step-5": "#!/usr/bin/env python3\r\n# coding: utf-8\r\n\r\n\r\n\"\"\"\r\n Blaise de Vigenère (1523–1596) mathematician, developed encryption scheme,\r\n VigenereCipher algorithm is implemented based on his work, with a utility\r\n of relative strength index for encryption and decryption.\r\n\r\n VERSION : 1.0\r\n LICENSE : GNU GPLv3\r\n STYLE : PEP 8\r\n AUTHOR : AKULA.S.S.S.R.Krishna\r\n Date : 05/11/2020\r\n\r\n PURPOSE : To encrypt and decrypt text based files\r\n INPUT : python3 VingenerCipher -i sample_file.txt -e \"sample password\"\r\n OUTPUT : sample_file.txt will be replaced with encrypted data.\r\n\"\"\"\r\n\r\n\r\nimport os\r\nimport argparse\r\n\r\n\r\nclass VigenereCipher(object):\r\n def __init__(self, key):\r\n print('Vigenere Cipher Encription')\r\n self.key = key\r\n\r\n def encode(self, text): # Based on password every character\r\n key = self.key # will be encrypted with different bias\r\n ans = ''\r\n for index, i in enumerate(text):\r\n if(ord('!') <= ord(i) <= ord('~')):\r\n index %= len(key)\r\n if(ord(i) + ord(key[index]) - ord('!') > ord('~')):\r\n ans += (chr(ord('!') + (ord(i) + ord(key[index])\r\n - ord('!')) % ord('~') - 1))\r\n else:\r\n ans += (chr(ord(i) + ord(key[index]) - ord('!')))\r\n else:\r\n ans += i\r\n return ans\r\n\r\n def decode(self, text): # Based on password every character\r\n key = self.key # will be decrypted with different bias\r\n ans = ''\r\n for index, i in enumerate(text):\r\n if(ord('!') <= ord(i) <= ord('~')):\r\n index %= len(key)\r\n if((ord('!') + ord(i) - ord(key[index])) < ord('!')):\r\n ans += (chr(ord('~') + (ord(i) - ord(key[index])) + 1))\r\n else:\r\n ans += (chr(ord('!') + ord(i) - ord(key[index])))\r\n else:\r\n ans += i\r\n return ans\r\n\r\n\r\ndef read_from_file(file_name):\r\n f = open(file_name, 'r')\r\n data = f.read()\r\n f.close()\r\n return data\r\n\r\n\r\ndef write_to_file(file_name, data):\r\n f = open(file_name, 'w')\r\n data = f.write(data)\r\n f.close()\r\n\r\n\r\ndef encode_from_file(file_name, obj):\r\n data = read_from_file(file_name)\r\n for _ in range(args.strength):\r\n data = obj.encode(data)\r\n write_to_file(file_name, data) # Replaces file with encrypted data\r\n print('encode file -> ' + file_name)\r\n\r\n\r\ndef decode_from_file(file_name, obj):\r\n data = read_from_file(file_name)\r\n for _ in range(args.strength):\r\n data = obj.decode(data)\r\n write_to_file(file_name, data) # Replaces file with decrypted data\r\n print('decode file -> ' + file_name)\r\n\r\n\r\ndef encription_form_path(PATH, obj): # Recursive function (MT-safe)\r\n try:\r\n for path in os.listdir(PATH):\r\n encription_form_path(PATH + '/' + path, obj)\r\n except(OSError):\r\n if(args.encode):\r\n encode_from_file(PATH, obj)\r\n elif(args.decode):\r\n decode_from_file(PATH, obj)\r\n\r\n\r\n\"\"\"\r\n input can be either -i file / -f folder,\r\n encode -e, decode -d for encryption and decryption respectively,\r\n strength -s indicates number of times to be encrypted / decrypted.\r\n\r\n\"\"\"\r\n\r\n\r\nparser = argparse.ArgumentParser('Description of your program')\r\nparser.add_argument('-i', '--input_file',\r\n help='input file name', required=False)\r\nparser.add_argument('-e', '--encode',\r\n help='encode password', required=False)\r\nparser.add_argument('-d', '--decode',\r\n help='decode password', required=False)\r\nparser.add_argument('-f', '--folder',\r\n help='folder name', required=False)\r\nparser.add_argument('-s', '--strength',\r\n help='encription strength', type=int,\r\n default=1, required=False)\r\nargs = (parser.parse_args())\r\n\r\nif(args.input_file):\r\n PATH = args.input_file\r\nelif(args.folder):\r\n PATH = args.folder\r\nelse:\r\n exit('Need --input_file or --folder\\nUse -h for help')\r\n\r\nif(args.encode):\r\n pswd = args.encode\r\nelif(args.decode):\r\n pswd = args.decode\r\nelse:\r\n exit('Need --encode or --decode\\nUse -h for help')\r\n\r\n\r\nobj = VigenereCipher(pswd)\r\nencription_form_path(PATH, obj)\r\n",
"step-ids": [
8,
9,
11,
12,
13
]
}
|
[
8,
9,
11,
12,
13
] |
#!/usr/bin/python
# -*- coding:utf-8 -*-
import importlib
def import_string(path):
"""
根据字符串的形式去导入路径中的对象
:param path: 'src.engine.agent.AgentHandler'
:return:
"""
module_path,cls_name = path.rsplit('.',maxsplit=1)
module = importlib.import_module(module_path)
return getattr(module,cls_name)
|
normal
|
{
"blob_id": "8502ebdb13c68a9a56a1a4ba51370d8458ca81dc",
"index": 7944,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef import_string(path):\n \"\"\"\n 根据字符串的形式去导入路径中的对象\n :param path: 'src.engine.agent.AgentHandler'\n :return:\n \"\"\"\n module_path, cls_name = path.rsplit('.', maxsplit=1)\n module = importlib.import_module(module_path)\n return getattr(module, cls_name)\n",
"step-3": "import importlib\n\n\ndef import_string(path):\n \"\"\"\n 根据字符串的形式去导入路径中的对象\n :param path: 'src.engine.agent.AgentHandler'\n :return:\n \"\"\"\n module_path, cls_name = path.rsplit('.', maxsplit=1)\n module = importlib.import_module(module_path)\n return getattr(module, cls_name)\n",
"step-4": "#!/usr/bin/python\n# -*- coding:utf-8 -*-\nimport importlib\n\ndef import_string(path):\n \"\"\"\n 根据字符串的形式去导入路径中的对象\n :param path: 'src.engine.agent.AgentHandler'\n :return:\n \"\"\"\n\n module_path,cls_name = path.rsplit('.',maxsplit=1)\n module = importlib.import_module(module_path)\n return getattr(module,cls_name)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.apps import AppConfig
class TermserviceConfig(AppConfig):
name = 'termservice'
|
normal
|
{
"blob_id": "f0168a737b9215520ce600470f9b27837dafb593",
"index": 4183,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass TermserviceConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TermserviceConfig(AppConfig):\n name = 'termservice'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass TermserviceConfig(AppConfig):\n name = 'termservice'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from package import *
class mysql(MakePackage):
dependencies = ["cmake"]
fetch="http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.10.tar.gz/from/http://cdn.mysql.com/"
config='cmake -G "Unix Makefiles" -DCMAKE_INSTALL_PREFIX=%(prefix)s -DWITH_READLINE=1'
|
normal
|
{
"blob_id": "ec90c731a0e546d9d399cbb68c92be1acca8cbe0",
"index": 518,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass mysql(MakePackage):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass mysql(MakePackage):\n dependencies = ['cmake']\n fetch = (\n 'http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.10.tar.gz/from/http://cdn.mysql.com/'\n )\n config = (\n 'cmake -G \"Unix Makefiles\" -DCMAKE_INSTALL_PREFIX=%(prefix)s -DWITH_READLINE=1'\n )\n",
"step-4": "from package import *\n\n\nclass mysql(MakePackage):\n dependencies = ['cmake']\n fetch = (\n 'http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.10.tar.gz/from/http://cdn.mysql.com/'\n )\n config = (\n 'cmake -G \"Unix Makefiles\" -DCMAKE_INSTALL_PREFIX=%(prefix)s -DWITH_READLINE=1'\n )\n",
"step-5": "\nfrom package import *\n\nclass mysql(MakePackage):\n dependencies = [\"cmake\"]\n fetch=\"http://dev.mysql.com/get/Downloads/MySQL-5.6/mysql-5.6.10.tar.gz/from/http://cdn.mysql.com/\"\n config='cmake -G \"Unix Makefiles\" -DCMAKE_INSTALL_PREFIX=%(prefix)s -DWITH_READLINE=1'\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from time import time
import threading
import os
#hh:mm:ss
movie1Time = "00:00:00"
movie2Time = "00:00:00"
movie3Time = "00:00:00"
movie4Time = "00:00:00"
movie5Time = "00:00:00"
timer1Start = None
timer1Time = "00:00:00"
timer1Running = False
timer2Start = None
timer2Time = "00:00:00"
timer2Running = False
timer3Start = None
timer3Time = "00:00:00"
timer3Running = False
timer4Start = None
timer4Time = "00:00:00"
timer4Running = False
timer5Start = None
timer5Time = "00:00:00"
timer5Running = False
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main.ui'
#
# Created: Wed May 21 20:35:02 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import sys
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class TimerBackground(QtCore.QThread):
index_finished = QtCore.pyqtSignal([str, QtCore.QObject])
def __init__(self, timerStart, timerRunning, timerNumber, movieTime, textBrowser, parent=None):
QtCore.QThread.__init__(self, parent)
self.timerStart = timerStart
self.timerRunning = timerRunning
self.timerNumber = timerNumber
self.textBrowser = textBrowser
self.movieTime = movieTime
def run(self):
self.incrememnt(self.timerStart, self.timerRunning, self.timerNumber, self.movieTime)
def formatTime(self, time):
formattedTime = ''
hours = time / 3600
minutes = time / 60
seconds = time % 60
#handles hours
if hours == 0:
formattedTime += "00:"
elif len(str(hours)) == 1:
formattedTime += '0' + str(hours) + ':'
else:
formattedTime += str(hours)
#handles minutes
if minutes == 0:
formattedTime += "00:"
elif minutes >= 60:
newMinutes = minutes
if minutes % 60 == 0:
newMinutes = 0
while newMinutes > 60:
newMinutes -= 60
if len(str(newMinutes)) == 1:
formattedTime += '0' + str(newMinutes) + ':'
else:
formattedTime += str(newMinutes) + ':'
else:
if len(str(minutes)) == 1:
formattedTime += '0' + str(minutes) + ':'
else:
formattedTime += str(minutes)
#handles seconds
if len(str(seconds)) == 1:
formattedTime += '0' + str(seconds)
else:
formattedTime += str(seconds)
return formattedTime
def deformatTime(self, time):
timeInSecs = 0
timeInSecs += int(time[0:2]) * 3600 # hours
timeInSecs += int(time[3:5]) * 60 # minutes
timeInSecs += int(time[6:8]) # seconds
return timeInSecs
def incrememnt(self, timerStart, timerRunning, timerNumber, movieTime):
global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time
if timerRunning:
convertedTime = self.deformatTime(movieTime)
timerTime = self.formatTime(int(time()) - int(timerStart) + convertedTime)
if timerNumber == 1:
timer1Time = timerTime
self.index_finished.emit(timer1Time, self.textBrowser)
elif timerNumber == 2:
timer2Time = timerTime
self.index_finished.emit(timer2Time, self.textBrowser)
elif timerNumber == 3:
timer3Time = timerTime
self.index_finished.emit(timer3Time, self.textBrowser)
elif timerNumber == 4:
timer4Time = timerTime
self.index_finished.emit(timer4Time, self.textBrowser)
elif timerNumber == 5:
timer5Time = timerTime
self.index_finished.emit(timer5Time, self.textBrowser)
else:
timerStart = None
self.index_finished.emit('none')
return timerStart
class Ui_Form1(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
self.setupUi(self)
if os.path.exists(os.getcwd() + '\\settings.ini') and os.path.getsize(os.getcwd() + '\\settings.ini') > 0:
with open(os.getcwd() + '\\settings.ini', 'r') as var:
global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time
movie1Time = var.readline().strip()
self.updateGUITimers(movie1Time, self.textBrowser_6)
movie2Time = var.readline().strip()
self.updateGUITimers(movie2Time, self.textBrowser_2)
movie3Time = var.readline().strip()
self.updateGUITimers(movie3Time, self.textBrowser_5)
movie4Time = var.readline().strip()
self.updateGUITimers(movie4Time, self.textBrowser_3)
movie5Time = var.readline().strip()
self.updateGUITimers(movie5Time, self.textBrowser_4)
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(611, 289)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())
Form.setSizePolicy(sizePolicy)
Form.setMinimumSize(QtCore.QSize(611, 289))
Form.setMaximumSize(QtCore.QSize(611, 289))
self.verticalLayoutWidget = QtGui.QWidget(Form)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)
self.movieOne.setObjectName(_fromUtf8("movieOne"))
self.verticalLayout.addWidget(self.movieOne)
self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)
self.movieTwo.setObjectName(_fromUtf8("movieTwo"))
self.verticalLayout.addWidget(self.movieTwo)
self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)
self.movieThree.setObjectName(_fromUtf8("movieThree"))
self.verticalLayout.addWidget(self.movieThree)
self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)
self.movieFour.setObjectName(_fromUtf8("movieFour"))
self.verticalLayout.addWidget(self.movieFour)
self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)
self.movieFive.setObjectName(_fromUtf8("movieFive"))
self.verticalLayout.addWidget(self.movieFive)
self.DesignedBy = QtGui.QLabel(Form)
self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))
self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)
self.DesignedBy.setObjectName(_fromUtf8("DesignedBy"))
self.sourceAt = QtGui.QLabel(Form)
self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))
self.sourceAt.setObjectName(_fromUtf8("sourceAt"))
self.label = QtGui.QLabel(Form)
self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayoutWidget_2 = QtGui.QWidget(Form)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261))
self.verticalLayoutWidget_2.setObjectName(_fromUtf8("verticalLayoutWidget_2"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_2.setMargin(0)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startTwo.setObjectName(_fromUtf8("startTwo"))
self.verticalLayout_2.addWidget(self.startTwo)
self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startOne.setObjectName(_fromUtf8("startOne"))
self.verticalLayout_2.addWidget(self.startOne)
self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startThree.setObjectName(_fromUtf8("startThree"))
self.verticalLayout_2.addWidget(self.startThree)
self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startFour.setObjectName(_fromUtf8("startFour"))
self.verticalLayout_2.addWidget(self.startFour)
self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startFive.setObjectName(_fromUtf8("startFive"))
self.verticalLayout_2.addWidget(self.startFive)
self.horizontalLayoutWidget = QtGui.QWidget(Form)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80))
self.horizontalLayoutWidget.setObjectName(_fromUtf8("horizontalLayoutWidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setMargin(0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.save = QtGui.QPushButton(self.horizontalLayoutWidget)
self.save.setObjectName(_fromUtf8("save"))
self.horizontalLayout.addWidget(self.save)
self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)
self.settings.setObjectName(_fromUtf8("settings"))
self.horizontalLayout.addWidget(self.settings)
self.textBrowser_2 = QtGui.QTextBrowser(Form)
self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().hasHeightForWidth())
self.textBrowser_2.setSizePolicy(sizePolicy)
self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_2.setReadOnly(False)
self.textBrowser_2.setUndoRedoEnabled(True)
self.textBrowser_2.setObjectName(_fromUtf8("textBrowser_2"))
self.textBrowser_5 = QtGui.QTextBrowser(Form)
self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().hasHeightForWidth())
self.textBrowser_5.setSizePolicy(sizePolicy)
self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_5.setReadOnly(False)
self.textBrowser_5.setUndoRedoEnabled(True)
self.textBrowser_5.setObjectName(_fromUtf8("textBrowser_5"))
self.textBrowser_4 = QtGui.QTextBrowser(Form)
self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().hasHeightForWidth())
self.textBrowser_4.setSizePolicy(sizePolicy)
self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_4.setReadOnly(False)
self.textBrowser_4.setUndoRedoEnabled(True)
self.textBrowser_4.setObjectName(_fromUtf8("textBrowser_4"))
self.textBrowser_3 = QtGui.QTextBrowser(Form)
self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().hasHeightForWidth())
self.textBrowser_3.setSizePolicy(sizePolicy)
self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_3.setReadOnly(False)
self.textBrowser_3.setUndoRedoEnabled(True)
self.textBrowser_3.setObjectName(_fromUtf8("textBrowser_3"))
self.textBrowser_6 = QtGui.QTextBrowser(Form)
self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().hasHeightForWidth())
self.textBrowser_6.setSizePolicy(sizePolicy)
self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_6.setReadOnly(False)
self.textBrowser_6.setUndoRedoEnabled(True)
self.textBrowser_6.setObjectName(_fromUtf8("textBrowser_6"))
self.line = QtGui.QFrame(Form)
self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))
self.line.setFrameShape(QtGui.QFrame.VLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.label_2 = QtGui.QLabel(Form)
self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))
self.label_2.setOpenExternalLinks(True)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtGui.QLabel(Form)
self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))
self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_3.setText(_fromUtf8(""))
self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8("logo.jpg")))
self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.retranslateUi(Form)
QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie1)
QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie2)
QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie3)
QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie4)
QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie5)
QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer1State)
QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer2State)
QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer3State)
QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer4State)
QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer5State)
QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.saveChanges)
QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.reset)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Multiple Movie Timer", None))
self.movieOne.setText(_translate("Form", "Movie 1", None))
self.movieTwo.setText(_translate("Form", "Movie 2", None))
self.movieThree.setText(_translate("Form", "Movie 3", None))
self.movieFour.setText(_translate("Form", "Movie 4", None))
self.movieFive.setText(_translate("Form", "Movie 5", None))
self.DesignedBy.setText(_translate("Form", "This program was\n"
"designed by:", None))
self.sourceAt.setText(_translate("Form", " Source is available at:", None))
self.label.setText(_translate("Form", "V 1.2", None))
self.startTwo.setText(_translate("Form", "Start / Stop", None))
self.startOne.setText(_translate("Form", "Start / Stop", None))
self.startThree.setText(_translate("Form", "Start / Stop", None))
self.startFour.setText(_translate("Form", "Start / Stop", None))
self.startFive.setText(_translate("Form", "Start / Stop", None))
self.save.setToolTip(_translate("Form", "<html><head/><body><p>Save all the current times</p></body></html>", None))
self.save.setText(_translate("Form", "Save", None))
self.settings.setText(_translate("Form", "Reset timers", None))
self.textBrowser_2.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_5.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_4.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_3.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_6.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.label_2.setText(_translate("Form", "<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>", None))
def changeMovie1(self):
pass
def changeMovie2(self):
pass
def changeMovie3(self):
pass
def changeMovie4(self):
pass
def changeMovie5(self):
pass
def changeTimer1State(self):
global movie1Time, timer1Running, timer1Start, timer1Time
if not timer1Running:
timer1Running = True
timer1Start = time()
self.thread1 = TimerBackground(timer1Start, timer1Running, 1, movie1Time, self.textBrowser_6)
self.thread1.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer1Running:
self.thread1.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer1Running:
timer1Running = False
movie1Time = timer1Time
def changeTimer2State(self):
global movie2Time, timer2Running, timer2Start, timer2Time
if not timer2Running:
timer2Running = True
timer2Start = time()
self.thread2 = TimerBackground(timer2Start, timer2Running, 2, movie2Time, self.textBrowser_2)
self.thread2.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer2Running:
self.thread2.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer2Running:
timer2Running = False
movie2Time = timer2Time
def changeTimer3State(self):
global movie3Time, timer3Running, timer3Start, timer3Time
if not timer3Running:
timer3Running = True
timer3Start = time()
self.thread3 = TimerBackground(timer3Start, timer3Running, 3, movie3Time, self.textBrowser_5)
self.thread3.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer3Running:
self.thread3.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer3Running:
timer3Running = False
movie3Time = timer3Time
def changeTimer4State(self):
global movie4Time, timer4Running, timer4Start, timer4Time
if not timer4Running:
timer4Running = True
timer4Start = time()
self.thread4 = TimerBackground(timer4Start, timer4Running, 4, movie4Time, self.textBrowser_3)
self.thread4.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer4Running:
self.thread4.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer4Running:
timer4Running = False
movie4Time = timer4Time
def changeTimer5State(self):
global movie5Time, timer5Running, timer5Start, timer5Time
if not timer5Running:
timer5Running = True
timer5Start = time()
self.thread5 = TimerBackground(timer5Start, timer5Running, 5, movie5Time, self.textBrowser_4)
self.thread5.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer5Running:
self.thread5 .start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer5Running:
timer5Running = False
movie5Time = timer5Time
def reset(self):
global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time
global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time
self.updateGUITimers('00:00:00', self.textBrowser_2)
self.updateGUITimers('00:00:00', self.textBrowser_3)
self.updateGUITimers('00:00:00', self.textBrowser_4)
self.updateGUITimers('00:00:00', self.textBrowser_5)
self.updateGUITimers('00:00:00', self.textBrowser_6)
timerStartingValue = '00:00:00'
movie1Time = timerStartingValue
movie2Time = timerStartingValue
movie3Time = timerStartingValue
movie4Time = timerStartingValue
movie5Time = timerStartingValue
timer1Time = timerStartingValue
timer2Time = timerStartingValue
timer3Time = timerStartingValue
timer4Time = timerStartingValue
timer5time = timerStartingValue
def saveChanges(self):
cwd = os.getcwd()
with open(cwd + '\\settings.ini', 'w') as var:
toWrite = [movie1Time, movie2Time, movie3Time, movie4Time, movie5Time]
for i in toWrite:
var.write(i + '\n')
def updateGUITimers(self, time, textBrowser):
if time != 'none':
textBrowser.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">" + str(time) + "</span></p></body></html>", None))
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
ex = Ui_Form1()
ex.show()
sys.exit(app.exec_())
|
normal
|
{
"blob_id": "cef4568b4568bceeedca6d57c0ccacfaae67c061",
"index": 147,
"step-1": "<mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n <mask token>\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n <mask token>\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TimerBackground(QtCore.QThread):\n <mask token>\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime,\n textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n <mask token>\n <mask token>\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600\n timeInSecs += int(time[3:5]) * 60\n timeInSecs += int(time[6:8])\n return timeInSecs\n <mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1,\n movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time,\n movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TimerBackground(QtCore.QThread):\n <mask token>\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime,\n textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n <mask token>\n\n def formatTime(self, time):\n formattedTime = ''\n hours = time / 3600\n minutes = time / 60\n seconds = time % 60\n if hours == 0:\n formattedTime += '00:'\n elif len(str(hours)) == 1:\n formattedTime += '0' + str(hours) + ':'\n else:\n formattedTime += str(hours)\n if minutes == 0:\n formattedTime += '00:'\n elif minutes >= 60:\n newMinutes = minutes\n if minutes % 60 == 0:\n newMinutes = 0\n while newMinutes > 60:\n newMinutes -= 60\n if len(str(newMinutes)) == 1:\n formattedTime += '0' + str(newMinutes) + ':'\n else:\n formattedTime += str(newMinutes) + ':'\n elif len(str(minutes)) == 1:\n formattedTime += '0' + str(minutes) + ':'\n else:\n formattedTime += str(minutes)\n if len(str(seconds)) == 1:\n formattedTime += '0' + str(seconds)\n else:\n formattedTime += str(seconds)\n return formattedTime\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600\n timeInSecs += int(time[3:5]) * 60\n timeInSecs += int(time[6:8])\n return timeInSecs\n <mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1,\n movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time,\n movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TimerBackground(QtCore.QThread):\n <mask token>\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime,\n textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n\n def run(self):\n self.incrememnt(self.timerStart, self.timerRunning, self.\n timerNumber, self.movieTime)\n\n def formatTime(self, time):\n formattedTime = ''\n hours = time / 3600\n minutes = time / 60\n seconds = time % 60\n if hours == 0:\n formattedTime += '00:'\n elif len(str(hours)) == 1:\n formattedTime += '0' + str(hours) + ':'\n else:\n formattedTime += str(hours)\n if minutes == 0:\n formattedTime += '00:'\n elif minutes >= 60:\n newMinutes = minutes\n if minutes % 60 == 0:\n newMinutes = 0\n while newMinutes > 60:\n newMinutes -= 60\n if len(str(newMinutes)) == 1:\n formattedTime += '0' + str(newMinutes) + ':'\n else:\n formattedTime += str(newMinutes) + ':'\n elif len(str(minutes)) == 1:\n formattedTime += '0' + str(minutes) + ':'\n else:\n formattedTime += str(minutes)\n if len(str(seconds)) == 1:\n formattedTime += '0' + str(seconds)\n else:\n formattedTime += str(seconds)\n return formattedTime\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600\n timeInSecs += int(time[3:5]) * 60\n timeInSecs += int(time[6:8])\n return timeInSecs\n <mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1,\n movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time,\n movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-5": "from time import time\nimport threading\nimport os\n#hh:mm:ss\nmovie1Time = \"00:00:00\"\nmovie2Time = \"00:00:00\"\nmovie3Time = \"00:00:00\"\nmovie4Time = \"00:00:00\"\nmovie5Time = \"00:00:00\"\n\ntimer1Start = None\ntimer1Time = \"00:00:00\"\ntimer1Running = False\ntimer2Start = None\ntimer2Time = \"00:00:00\"\ntimer2Running = False\ntimer3Start = None\ntimer3Time = \"00:00:00\"\ntimer3Running = False\ntimer4Start = None\ntimer4Time = \"00:00:00\"\ntimer4Running = False\ntimer5Start = None\ntimer5Time = \"00:00:00\"\ntimer5Running = False\n\n# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'main.ui'\n#\n# Created: Wed May 21 20:35:02 2014\n# by: PyQt4 UI code generator 4.10.4\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt4 import QtCore, QtGui\nimport sys\ntry:\n _fromUtf8 = QtCore.QString.fromUtf8\nexcept AttributeError:\n def _fromUtf8(s):\n return s\n\ntry:\n _encoding = QtGui.QApplication.UnicodeUTF8\n def _translate(context, text, disambig):\n return QtGui.QApplication.translate(context, text, disambig, _encoding)\nexcept AttributeError:\n def _translate(context, text, disambig):\n return QtGui.QApplication.translate(context, text, disambig)\n\n\nclass TimerBackground(QtCore.QThread):\n index_finished = QtCore.pyqtSignal([str, QtCore.QObject])\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime, textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n\n def run(self):\n self.incrememnt(self.timerStart, self.timerRunning, self.timerNumber, self.movieTime)\n\n def formatTime(self, time):\n formattedTime = ''\n hours = time / 3600\n minutes = time / 60\n seconds = time % 60\n #handles hours\n if hours == 0:\n formattedTime += \"00:\"\n elif len(str(hours)) == 1:\n formattedTime += '0' + str(hours) + ':'\n else:\n formattedTime += str(hours)\n #handles minutes\n if minutes == 0:\n formattedTime += \"00:\"\n elif minutes >= 60:\n newMinutes = minutes\n if minutes % 60 == 0:\n newMinutes = 0\n while newMinutes > 60:\n newMinutes -= 60\n if len(str(newMinutes)) == 1:\n formattedTime += '0' + str(newMinutes) + ':'\n else:\n formattedTime += str(newMinutes) + ':'\n else:\n if len(str(minutes)) == 1:\n formattedTime += '0' + str(minutes) + ':'\n else:\n formattedTime += str(minutes)\n #handles seconds\n if len(str(seconds)) == 1:\n formattedTime += '0' + str(seconds)\n else:\n formattedTime += str(seconds)\n return formattedTime\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600 # hours\n timeInSecs += int(time[3:5]) * 60 # minutes\n timeInSecs += int(time[6:8]) # seconds\n return timeInSecs\n\n def incrememnt(self, timerStart, timerRunning, timerNumber, movieTime):\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n if timerRunning:\n convertedTime = self.deformatTime(movieTime)\n timerTime = self.formatTime(int(time()) - int(timerStart) + convertedTime)\n if timerNumber == 1:\n timer1Time = timerTime\n self.index_finished.emit(timer1Time, self.textBrowser)\n elif timerNumber == 2:\n timer2Time = timerTime\n self.index_finished.emit(timer2Time, self.textBrowser)\n elif timerNumber == 3:\n timer3Time = timerTime\n self.index_finished.emit(timer3Time, self.textBrowser)\n elif timerNumber == 4:\n timer4Time = timerTime\n self.index_finished.emit(timer4Time, self.textBrowser)\n elif timerNumber == 5:\n timer5Time = timerTime\n self.index_finished.emit(timer5Time, self.textBrowser)\n else:\n timerStart = None\n self.index_finished.emit('none')\n return timerStart\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8(\"Form\"))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\"verticalLayoutWidget\"))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8(\"verticalLayout\"))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8(\"movieOne\"))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8(\"movieTwo\"))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8(\"movieThree\"))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8(\"movieFour\"))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8(\"movieFive\"))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8(\"DesignedBy\"))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8(\"sourceAt\"))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8(\"label\"))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261))\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\"verticalLayoutWidget_2\"))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8(\"verticalLayout_2\"))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8(\"startTwo\"))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8(\"startOne\"))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8(\"startThree\"))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8(\"startFour\"))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8(\"startFive\"))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80))\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\"horizontalLayoutWidget\"))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8(\"horizontalLayout\"))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8(\"save\"))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8(\"settings\"))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8(\"textBrowser_2\"))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8(\"textBrowser_5\"))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8(\"textBrowser_4\"))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8(\"textBrowser_3\"))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8(\"textBrowser_6\"))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8(\"line\"))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8(\"label_2\"))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(\"\"))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8(\"logo.jpg\")))\n self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8(\"label_3\"))\n\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate(\"Form\", \"Multiple Movie Timer\", None))\n self.movieOne.setText(_translate(\"Form\", \"Movie 1\", None))\n self.movieTwo.setText(_translate(\"Form\", \"Movie 2\", None))\n self.movieThree.setText(_translate(\"Form\", \"Movie 3\", None))\n self.movieFour.setText(_translate(\"Form\", \"Movie 4\", None))\n self.movieFive.setText(_translate(\"Form\", \"Movie 5\", None))\n self.DesignedBy.setText(_translate(\"Form\", \"This program was\\n\"\n\"designed by:\", None))\n self.sourceAt.setText(_translate(\"Form\", \" Source is available at:\", None))\n self.label.setText(_translate(\"Form\", \"V 1.2\", None))\n self.startTwo.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startOne.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startThree.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startFour.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startFive.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.save.setToolTip(_translate(\"Form\", \"<html><head/><body><p>Save all the current times</p></body></html>\", None))\n self.save.setText(_translate(\"Form\", \"Save\", None))\n self.settings.setText(_translate(\"Form\", \"Reset timers\", None))\n self.textBrowser_2.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_5.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_4.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_3.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_6.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.label_2.setText(_translate(\"Form\", \"<html><head/><body><p><a href=\\\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\\\"><span style=\\\" text-decoration: underline; color:#0000ff;\\\">https://github.com/tmwbook</span></a></p></body></html>\", None))\n\n\n def changeMovie1(self):\n pass\n def changeMovie2(self):\n pass\n def changeMovie3(self):\n pass\n def changeMovie4(self):\n pass\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1, movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2, movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3, movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4, movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5, movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5 .start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time, movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n \"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n \"p, li { white-space: pre-wrap; }\\n\"\n \"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n \"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">\" + str(time) + \"</span></p></body></html>\", None))\n\n\nif __name__ == \"__main__\":\n app = QtGui.QApplication(sys.argv)\n ex = Ui_Form1()\n ex.show()\n sys.exit(app.exec_())",
"step-ids": [
15,
20,
21,
22,
28
]
}
|
[
15,
20,
21,
22,
28
] |
from math import log2
from egosplit.benchmarks.data_structures.cover_benchmark import *
from egosplit.benchmarks.evaluation.utility import create_line
from networkit.stopwatch import clockit
# Analyse the result cover of a benchmark run
@clockit
def analyze_cover(benchmarks, result_dir, calc_f1, append):
if not append:
print_headers(result_dir)
for benchmark in benchmarks:
count_benchmark_cover(result_dir, calc_f1, benchmark)
# Print output file headers
def print_headers(result_dir):
with open(result_dir + 'cover_num_comms.result', 'w') as f:
f.write(create_line(*CoverBenchmark.output_header(), 'Number of Communities'))
with open(result_dir + 'cover_comm_sizes.result', 'w') as f:
f.write(create_line(*CoverBenchmark.output_header(), 'Community Size', 'F1 Score'))
with open(result_dir + 'cover_node_comms.result', 'w') as f:
f.write(create_line(*CoverBenchmark.output_header(), 'Number of Communities per Node'))
# Count the number of communities and their sizes
def count_benchmark_cover(result_dir, calc_f1, benchmark):
cover = benchmark.get_cover()
ground_truth = benchmark.get_ground_truth()
comm_map = get_communities(benchmark.get_graph(), cover)
gt_map = get_communities(benchmark.get_graph(), ground_truth)
comm_sizes = cover.subsetSizeMap()
# Number of communities
with open(result_dir + 'cover_num_comms.result', 'a') as f:
f.write(create_line(*benchmark.output_line(), cover.numberOfSubsets()))
# Community sizes and F1 scores
with open(result_dir + 'cover_comm_sizes.result', 'a') as f:
for u in cover.getSubsetIds():
comm = comm_map[u]
size = comm_sizes[u]
f1 = f1_score(comm, gt_map) if calc_f1 else 0
f.write(create_line(*benchmark.output_line(), log2(size), f1))
# Number of Communities per Node
with open(result_dir + 'cover_node_comms.result', 'a') as f:
for u in benchmark.get_graph().nodes():
num_comms = len(cover.subsetsOf(u))
if num_comms > 0:
f.write(create_line(*benchmark.output_line(), log2(num_comms)))
def get_communities(graph, cover):
comm_map = defaultdict(lambda: set())
for u in graph.nodes():
comms = cover.subsetsOf(u)
for c in comms:
comm_map[c].add(u)
return comm_map
def f1_score(community, ground_truth):
max_f1 = 0.0
for gt_comm in ground_truth.values():
overlap = len(gt_comm.intersection(community))
if overlap == 0:
continue
precision = overlap / len(community)
recall = overlap / len(gt_comm)
f1 = 2 * precision * recall / (precision + recall)
max_f1 = max(max_f1, f1)
return max_f1
|
normal
|
{
"blob_id": "dc5b9600828857cc5ea434a7b010cd8aa2589d22",
"index": 6568,
"step-1": "<mask token>\n\n\n@clockit\ndef analyze_cover(benchmarks, result_dir, calc_f1, append):\n if not append:\n print_headers(result_dir)\n for benchmark in benchmarks:\n count_benchmark_cover(result_dir, calc_f1, benchmark)\n\n\n<mask token>\n\n\ndef f1_score(community, ground_truth):\n max_f1 = 0.0\n for gt_comm in ground_truth.values():\n overlap = len(gt_comm.intersection(community))\n if overlap == 0:\n continue\n precision = overlap / len(community)\n recall = overlap / len(gt_comm)\n f1 = 2 * precision * recall / (precision + recall)\n max_f1 = max(max_f1, f1)\n return max_f1\n",
"step-2": "<mask token>\n\n\n@clockit\ndef analyze_cover(benchmarks, result_dir, calc_f1, append):\n if not append:\n print_headers(result_dir)\n for benchmark in benchmarks:\n count_benchmark_cover(result_dir, calc_f1, benchmark)\n\n\n<mask token>\n\n\ndef get_communities(graph, cover):\n comm_map = defaultdict(lambda : set())\n for u in graph.nodes():\n comms = cover.subsetsOf(u)\n for c in comms:\n comm_map[c].add(u)\n return comm_map\n\n\ndef f1_score(community, ground_truth):\n max_f1 = 0.0\n for gt_comm in ground_truth.values():\n overlap = len(gt_comm.intersection(community))\n if overlap == 0:\n continue\n precision = overlap / len(community)\n recall = overlap / len(gt_comm)\n f1 = 2 * precision * recall / (precision + recall)\n max_f1 = max(max_f1, f1)\n return max_f1\n",
"step-3": "<mask token>\n\n\n@clockit\ndef analyze_cover(benchmarks, result_dir, calc_f1, append):\n if not append:\n print_headers(result_dir)\n for benchmark in benchmarks:\n count_benchmark_cover(result_dir, calc_f1, benchmark)\n\n\ndef print_headers(result_dir):\n with open(result_dir + 'cover_num_comms.result', 'w') as f:\n f.write(create_line(*CoverBenchmark.output_header(),\n 'Number of Communities'))\n with open(result_dir + 'cover_comm_sizes.result', 'w') as f:\n f.write(create_line(*CoverBenchmark.output_header(),\n 'Community Size', 'F1 Score'))\n with open(result_dir + 'cover_node_comms.result', 'w') as f:\n f.write(create_line(*CoverBenchmark.output_header(),\n 'Number of Communities per Node'))\n\n\ndef count_benchmark_cover(result_dir, calc_f1, benchmark):\n cover = benchmark.get_cover()\n ground_truth = benchmark.get_ground_truth()\n comm_map = get_communities(benchmark.get_graph(), cover)\n gt_map = get_communities(benchmark.get_graph(), ground_truth)\n comm_sizes = cover.subsetSizeMap()\n with open(result_dir + 'cover_num_comms.result', 'a') as f:\n f.write(create_line(*benchmark.output_line(), cover.numberOfSubsets()))\n with open(result_dir + 'cover_comm_sizes.result', 'a') as f:\n for u in cover.getSubsetIds():\n comm = comm_map[u]\n size = comm_sizes[u]\n f1 = f1_score(comm, gt_map) if calc_f1 else 0\n f.write(create_line(*benchmark.output_line(), log2(size), f1))\n with open(result_dir + 'cover_node_comms.result', 'a') as f:\n for u in benchmark.get_graph().nodes():\n num_comms = len(cover.subsetsOf(u))\n if num_comms > 0:\n f.write(create_line(*benchmark.output_line(), log2(num_comms)))\n\n\ndef get_communities(graph, cover):\n comm_map = defaultdict(lambda : set())\n for u in graph.nodes():\n comms = cover.subsetsOf(u)\n for c in comms:\n comm_map[c].add(u)\n return comm_map\n\n\ndef f1_score(community, ground_truth):\n max_f1 = 0.0\n for gt_comm in ground_truth.values():\n overlap = len(gt_comm.intersection(community))\n if overlap == 0:\n continue\n precision = overlap / len(community)\n recall = overlap / len(gt_comm)\n f1 = 2 * precision * recall / (precision + recall)\n max_f1 = max(max_f1, f1)\n return max_f1\n",
"step-4": "from math import log2\nfrom egosplit.benchmarks.data_structures.cover_benchmark import *\nfrom egosplit.benchmarks.evaluation.utility import create_line\nfrom networkit.stopwatch import clockit\n\n\n@clockit\ndef analyze_cover(benchmarks, result_dir, calc_f1, append):\n if not append:\n print_headers(result_dir)\n for benchmark in benchmarks:\n count_benchmark_cover(result_dir, calc_f1, benchmark)\n\n\ndef print_headers(result_dir):\n with open(result_dir + 'cover_num_comms.result', 'w') as f:\n f.write(create_line(*CoverBenchmark.output_header(),\n 'Number of Communities'))\n with open(result_dir + 'cover_comm_sizes.result', 'w') as f:\n f.write(create_line(*CoverBenchmark.output_header(),\n 'Community Size', 'F1 Score'))\n with open(result_dir + 'cover_node_comms.result', 'w') as f:\n f.write(create_line(*CoverBenchmark.output_header(),\n 'Number of Communities per Node'))\n\n\ndef count_benchmark_cover(result_dir, calc_f1, benchmark):\n cover = benchmark.get_cover()\n ground_truth = benchmark.get_ground_truth()\n comm_map = get_communities(benchmark.get_graph(), cover)\n gt_map = get_communities(benchmark.get_graph(), ground_truth)\n comm_sizes = cover.subsetSizeMap()\n with open(result_dir + 'cover_num_comms.result', 'a') as f:\n f.write(create_line(*benchmark.output_line(), cover.numberOfSubsets()))\n with open(result_dir + 'cover_comm_sizes.result', 'a') as f:\n for u in cover.getSubsetIds():\n comm = comm_map[u]\n size = comm_sizes[u]\n f1 = f1_score(comm, gt_map) if calc_f1 else 0\n f.write(create_line(*benchmark.output_line(), log2(size), f1))\n with open(result_dir + 'cover_node_comms.result', 'a') as f:\n for u in benchmark.get_graph().nodes():\n num_comms = len(cover.subsetsOf(u))\n if num_comms > 0:\n f.write(create_line(*benchmark.output_line(), log2(num_comms)))\n\n\ndef get_communities(graph, cover):\n comm_map = defaultdict(lambda : set())\n for u in graph.nodes():\n comms = cover.subsetsOf(u)\n for c in comms:\n comm_map[c].add(u)\n return comm_map\n\n\ndef f1_score(community, ground_truth):\n max_f1 = 0.0\n for gt_comm in ground_truth.values():\n overlap = len(gt_comm.intersection(community))\n if overlap == 0:\n continue\n precision = overlap / len(community)\n recall = overlap / len(gt_comm)\n f1 = 2 * precision * recall / (precision + recall)\n max_f1 = max(max_f1, f1)\n return max_f1\n",
"step-5": "from math import log2\n\nfrom egosplit.benchmarks.data_structures.cover_benchmark import *\nfrom egosplit.benchmarks.evaluation.utility import create_line\nfrom networkit.stopwatch import clockit\n\n\n# Analyse the result cover of a benchmark run\n@clockit\ndef analyze_cover(benchmarks, result_dir, calc_f1, append):\n\tif not append:\n\t\tprint_headers(result_dir)\n\n\tfor benchmark in benchmarks:\n\t\tcount_benchmark_cover(result_dir, calc_f1, benchmark)\n\n\n# Print output file headers\ndef print_headers(result_dir):\n\twith open(result_dir + 'cover_num_comms.result', 'w') as f:\n\t\tf.write(create_line(*CoverBenchmark.output_header(), 'Number of Communities'))\n\twith open(result_dir + 'cover_comm_sizes.result', 'w') as f:\n\t\tf.write(create_line(*CoverBenchmark.output_header(), 'Community Size', 'F1 Score'))\n\twith open(result_dir + 'cover_node_comms.result', 'w') as f:\n\t\tf.write(create_line(*CoverBenchmark.output_header(), 'Number of Communities per Node'))\n\n\n# Count the number of communities and their sizes\ndef count_benchmark_cover(result_dir, calc_f1, benchmark):\n\tcover = benchmark.get_cover()\n\tground_truth = benchmark.get_ground_truth()\n\tcomm_map = get_communities(benchmark.get_graph(), cover)\n\tgt_map = get_communities(benchmark.get_graph(), ground_truth)\n\tcomm_sizes = cover.subsetSizeMap()\n\n\t# Number of communities\n\twith open(result_dir + 'cover_num_comms.result', 'a') as f:\n\t\tf.write(create_line(*benchmark.output_line(), cover.numberOfSubsets()))\n\n\t# Community sizes and F1 scores\n\twith open(result_dir + 'cover_comm_sizes.result', 'a') as f:\n\t\tfor u in cover.getSubsetIds():\n\t\t\tcomm = comm_map[u]\n\t\t\tsize = comm_sizes[u]\n\t\t\tf1 = f1_score(comm, gt_map) if calc_f1 else 0\n\t\t\tf.write(create_line(*benchmark.output_line(), log2(size), f1))\n\n\t# Number of Communities per Node\n\twith open(result_dir + 'cover_node_comms.result', 'a') as f:\n\t\tfor u in benchmark.get_graph().nodes():\n\t\t\tnum_comms = len(cover.subsetsOf(u))\n\t\t\tif num_comms > 0:\n\t\t\t\tf.write(create_line(*benchmark.output_line(), log2(num_comms)))\n\n\ndef get_communities(graph, cover):\n\tcomm_map = defaultdict(lambda: set())\n\tfor u in graph.nodes():\n\t\tcomms = cover.subsetsOf(u)\n\t\tfor c in comms:\n\t\t\tcomm_map[c].add(u)\n\n\treturn comm_map\n\n\ndef f1_score(community, ground_truth):\n\tmax_f1 = 0.0\n\tfor gt_comm in ground_truth.values():\n\t\toverlap = len(gt_comm.intersection(community))\n\t\tif overlap == 0:\n\t\t\tcontinue\n\t\tprecision = overlap / len(community)\n\t\trecall = overlap / len(gt_comm)\n\t\tf1 = 2 * precision * recall / (precision + recall)\n\t\tmax_f1 = max(max_f1, f1)\n\n\treturn max_f1\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
2,
3,
5,
6,
7
]
}
|
[
2,
3,
5,
6,
7
] |
from django.db import models
# Create your models here.
class Author(models.Model):
AuthorID = models.IntegerField(primary_key=True)
Name = models.CharField(max_length=200)
Age = models.IntegerField(max_length=50)
Country = models.CharField(max_length=100)
class Book(models.Model):
ISBN = models.CharField(primary_key=True,max_length=100)
Title = models.CharField(max_length=200)
AuthorID = models.IntegerField(max_length=100)
Publisher = models.CharField(max_length=200)
PublishDate = models.CharField(max_length=200)
Price = models.FloatField(max_length=200)
|
normal
|
{
"blob_id": "817d7259b3607f3a94d2f363c9684f733ee87d37",
"index": 2124,
"step-1": "<mask token>\n\n\nclass Book(models.Model):\n ISBN = models.CharField(primary_key=True, max_length=100)\n Title = models.CharField(max_length=200)\n AuthorID = models.IntegerField(max_length=100)\n Publisher = models.CharField(max_length=200)\n PublishDate = models.CharField(max_length=200)\n Price = models.FloatField(max_length=200)\n",
"step-2": "<mask token>\n\n\nclass Author(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Book(models.Model):\n ISBN = models.CharField(primary_key=True, max_length=100)\n Title = models.CharField(max_length=200)\n AuthorID = models.IntegerField(max_length=100)\n Publisher = models.CharField(max_length=200)\n PublishDate = models.CharField(max_length=200)\n Price = models.FloatField(max_length=200)\n",
"step-3": "<mask token>\n\n\nclass Author(models.Model):\n AuthorID = models.IntegerField(primary_key=True)\n Name = models.CharField(max_length=200)\n Age = models.IntegerField(max_length=50)\n Country = models.CharField(max_length=100)\n\n\nclass Book(models.Model):\n ISBN = models.CharField(primary_key=True, max_length=100)\n Title = models.CharField(max_length=200)\n AuthorID = models.IntegerField(max_length=100)\n Publisher = models.CharField(max_length=200)\n PublishDate = models.CharField(max_length=200)\n Price = models.FloatField(max_length=200)\n",
"step-4": "from django.db import models\n\n\nclass Author(models.Model):\n AuthorID = models.IntegerField(primary_key=True)\n Name = models.CharField(max_length=200)\n Age = models.IntegerField(max_length=50)\n Country = models.CharField(max_length=100)\n\n\nclass Book(models.Model):\n ISBN = models.CharField(primary_key=True, max_length=100)\n Title = models.CharField(max_length=200)\n AuthorID = models.IntegerField(max_length=100)\n Publisher = models.CharField(max_length=200)\n PublishDate = models.CharField(max_length=200)\n Price = models.FloatField(max_length=200)\n",
"step-5": "from django.db import models\n\n# Create your models here.\nclass Author(models.Model):\n AuthorID = models.IntegerField(primary_key=True)\n Name = models.CharField(max_length=200)\n Age = models.IntegerField(max_length=50)\n Country = models.CharField(max_length=100)\n\nclass Book(models.Model):\n ISBN = models.CharField(primary_key=True,max_length=100)\n Title = models.CharField(max_length=200)\n AuthorID = models.IntegerField(max_length=100)\n Publisher = models.CharField(max_length=200)\n PublishDate = models.CharField(max_length=200)\n Price = models.FloatField(max_length=200)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import backtrader as bt
class RSIStrategy(bt.Strategy):
def __init__(self):
self.order = None
self.position.size = 0
self.sellAlert1 = False
self.sellAlert2 = False
self.buyAlert = False
self.failureNum = 0
self.successNum = 0
self.rsi_1 = bt.ind.RSI(self.datas[0].close, period=7)
self.rsi_2 = bt.ind.RSI(self.datas[1].close, period=7)
self.rsi_3 = bt.ind.RSI(self.datas[2].close, period=7)
self.rsi_4 = bt.ind.RSI(self.datas[3].close, period=7)
def log(self, txt, dt=None):
dt = dt or self.datas[0].datetime.date(0)
print('%s, %s' % (dt.isoformat(), txt))
def notify_order(self, order):
if order.status in [order.Completed]:
if order.isbuy():
return self.log(
'BUY Executed at price: {} with size: {}'.format(order.executed.price, order.executed.size))
elif order.issell():
print('Succeeded for {} times.'.format(self.successNum))
return self.log(
'SELL Executed at price: {} with size: {}'.format(order.executed.price, order.executed.size))
def next(self):
"""Here the conditions for openinng and closing a position have been set."""
if self.position.size == 0:
# The condition for activating BUY function --> By checking oversold condition.
if self.rsi_2 < 30 and self.rsi_3 < 40:
self.buyAlert = True
# If BUY is activated and below conditions are met, then aa buy order would be placed.
if self.rsi_1 < 50 and self.rsi_2 > 30 and self.rsi_3 > 25 and self.buyAlert:
size = round((self.broker.getcash() / self.data), 3)
self.order = self.buy(size=size)
self.buyAlert = False
print(round(self.broker.get_cash(), 1))
# print(self.datas[0].low[0])
if self.position.size != 0:
# The condition for activating SELL_1 function --> Waiting for RSI to reach overbought zone.
if self.rsi_4 > 67:
self.sellAlert1 = True
# If SELL_1 is activated and below conditions are met, then a sell order would be placed.
if (self.rsi_1 < 70 and self.rsi_4 < 60) and self.sellAlert1:
self.close()
self.successNum += 1
self.sellAlert1 = False
# The condition for activating SELL_2 function --> Activated at overbought condition with RSI>85
if self.rsi_4 > 85:
self.sellAlert2 = True
# If SELL_2 is activated and below conditions are met, then a sell order would be placed.
if (self.rsi_4 < 80) and self.sellAlert2:
self.close()
self.successNum += 1
self.sellAlert1 = False
self.sellAlert2 = False
# Setting Stop Loss for wrongly opened position.
if 0.82 * self.order.executed.price > self.datas[0].close > 0.8 * self.order.executed.price:
self.close()
self.failureNum += 1
print('Shit !!! Failed for {} times.'.format(self.failureNum))
|
normal
|
{
"blob_id": "9119fc1c75de980bbcf74f1e06a36ba587fc490b",
"index": 102,
"step-1": "<mask token>\n\n\nclass RSIStrategy(bt.Strategy):\n\n def __init__(self):\n self.order = None\n self.position.size = 0\n self.sellAlert1 = False\n self.sellAlert2 = False\n self.buyAlert = False\n self.failureNum = 0\n self.successNum = 0\n self.rsi_1 = bt.ind.RSI(self.datas[0].close, period=7)\n self.rsi_2 = bt.ind.RSI(self.datas[1].close, period=7)\n self.rsi_3 = bt.ind.RSI(self.datas[2].close, period=7)\n self.rsi_4 = bt.ind.RSI(self.datas[3].close, period=7)\n <mask token>\n\n def notify_order(self, order):\n if order.status in [order.Completed]:\n if order.isbuy():\n return self.log('BUY Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n elif order.issell():\n print('Succeeded for {} times.'.format(self.successNum))\n return self.log('SELL Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass RSIStrategy(bt.Strategy):\n\n def __init__(self):\n self.order = None\n self.position.size = 0\n self.sellAlert1 = False\n self.sellAlert2 = False\n self.buyAlert = False\n self.failureNum = 0\n self.successNum = 0\n self.rsi_1 = bt.ind.RSI(self.datas[0].close, period=7)\n self.rsi_2 = bt.ind.RSI(self.datas[1].close, period=7)\n self.rsi_3 = bt.ind.RSI(self.datas[2].close, period=7)\n self.rsi_4 = bt.ind.RSI(self.datas[3].close, period=7)\n <mask token>\n\n def notify_order(self, order):\n if order.status in [order.Completed]:\n if order.isbuy():\n return self.log('BUY Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n elif order.issell():\n print('Succeeded for {} times.'.format(self.successNum))\n return self.log('SELL Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n\n def next(self):\n \"\"\"Here the conditions for openinng and closing a position have been set.\"\"\"\n if self.position.size == 0:\n if self.rsi_2 < 30 and self.rsi_3 < 40:\n self.buyAlert = True\n if (self.rsi_1 < 50 and self.rsi_2 > 30 and self.rsi_3 > 25 and\n self.buyAlert):\n size = round(self.broker.getcash() / self.data, 3)\n self.order = self.buy(size=size)\n self.buyAlert = False\n print(round(self.broker.get_cash(), 1))\n if self.position.size != 0:\n if self.rsi_4 > 67:\n self.sellAlert1 = True\n if (self.rsi_1 < 70 and self.rsi_4 < 60) and self.sellAlert1:\n self.close()\n self.successNum += 1\n self.sellAlert1 = False\n if self.rsi_4 > 85:\n self.sellAlert2 = True\n if self.rsi_4 < 80 and self.sellAlert2:\n self.close()\n self.successNum += 1\n self.sellAlert1 = False\n self.sellAlert2 = False\n if 0.82 * self.order.executed.price > self.datas[0\n ].close > 0.8 * self.order.executed.price:\n self.close()\n self.failureNum += 1\n print('Shit !!! Failed for {} times.'.format(self.failureNum))\n",
"step-3": "<mask token>\n\n\nclass RSIStrategy(bt.Strategy):\n\n def __init__(self):\n self.order = None\n self.position.size = 0\n self.sellAlert1 = False\n self.sellAlert2 = False\n self.buyAlert = False\n self.failureNum = 0\n self.successNum = 0\n self.rsi_1 = bt.ind.RSI(self.datas[0].close, period=7)\n self.rsi_2 = bt.ind.RSI(self.datas[1].close, period=7)\n self.rsi_3 = bt.ind.RSI(self.datas[2].close, period=7)\n self.rsi_4 = bt.ind.RSI(self.datas[3].close, period=7)\n\n def log(self, txt, dt=None):\n dt = dt or self.datas[0].datetime.date(0)\n print('%s, %s' % (dt.isoformat(), txt))\n\n def notify_order(self, order):\n if order.status in [order.Completed]:\n if order.isbuy():\n return self.log('BUY Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n elif order.issell():\n print('Succeeded for {} times.'.format(self.successNum))\n return self.log('SELL Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n\n def next(self):\n \"\"\"Here the conditions for openinng and closing a position have been set.\"\"\"\n if self.position.size == 0:\n if self.rsi_2 < 30 and self.rsi_3 < 40:\n self.buyAlert = True\n if (self.rsi_1 < 50 and self.rsi_2 > 30 and self.rsi_3 > 25 and\n self.buyAlert):\n size = round(self.broker.getcash() / self.data, 3)\n self.order = self.buy(size=size)\n self.buyAlert = False\n print(round(self.broker.get_cash(), 1))\n if self.position.size != 0:\n if self.rsi_4 > 67:\n self.sellAlert1 = True\n if (self.rsi_1 < 70 and self.rsi_4 < 60) and self.sellAlert1:\n self.close()\n self.successNum += 1\n self.sellAlert1 = False\n if self.rsi_4 > 85:\n self.sellAlert2 = True\n if self.rsi_4 < 80 and self.sellAlert2:\n self.close()\n self.successNum += 1\n self.sellAlert1 = False\n self.sellAlert2 = False\n if 0.82 * self.order.executed.price > self.datas[0\n ].close > 0.8 * self.order.executed.price:\n self.close()\n self.failureNum += 1\n print('Shit !!! Failed for {} times.'.format(self.failureNum))\n",
"step-4": "import backtrader as bt\n\n\nclass RSIStrategy(bt.Strategy):\n\n def __init__(self):\n self.order = None\n self.position.size = 0\n self.sellAlert1 = False\n self.sellAlert2 = False\n self.buyAlert = False\n self.failureNum = 0\n self.successNum = 0\n self.rsi_1 = bt.ind.RSI(self.datas[0].close, period=7)\n self.rsi_2 = bt.ind.RSI(self.datas[1].close, period=7)\n self.rsi_3 = bt.ind.RSI(self.datas[2].close, period=7)\n self.rsi_4 = bt.ind.RSI(self.datas[3].close, period=7)\n\n def log(self, txt, dt=None):\n dt = dt or self.datas[0].datetime.date(0)\n print('%s, %s' % (dt.isoformat(), txt))\n\n def notify_order(self, order):\n if order.status in [order.Completed]:\n if order.isbuy():\n return self.log('BUY Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n elif order.issell():\n print('Succeeded for {} times.'.format(self.successNum))\n return self.log('SELL Executed at price: {} with size: {}'.\n format(order.executed.price, order.executed.size))\n\n def next(self):\n \"\"\"Here the conditions for openinng and closing a position have been set.\"\"\"\n if self.position.size == 0:\n if self.rsi_2 < 30 and self.rsi_3 < 40:\n self.buyAlert = True\n if (self.rsi_1 < 50 and self.rsi_2 > 30 and self.rsi_3 > 25 and\n self.buyAlert):\n size = round(self.broker.getcash() / self.data, 3)\n self.order = self.buy(size=size)\n self.buyAlert = False\n print(round(self.broker.get_cash(), 1))\n if self.position.size != 0:\n if self.rsi_4 > 67:\n self.sellAlert1 = True\n if (self.rsi_1 < 70 and self.rsi_4 < 60) and self.sellAlert1:\n self.close()\n self.successNum += 1\n self.sellAlert1 = False\n if self.rsi_4 > 85:\n self.sellAlert2 = True\n if self.rsi_4 < 80 and self.sellAlert2:\n self.close()\n self.successNum += 1\n self.sellAlert1 = False\n self.sellAlert2 = False\n if 0.82 * self.order.executed.price > self.datas[0\n ].close > 0.8 * self.order.executed.price:\n self.close()\n self.failureNum += 1\n print('Shit !!! Failed for {} times.'.format(self.failureNum))\n",
"step-5": "import backtrader as bt\r\n\r\n\r\nclass RSIStrategy(bt.Strategy):\r\n\r\n def __init__(self):\r\n self.order = None\r\n self.position.size = 0\r\n self.sellAlert1 = False\r\n self.sellAlert2 = False\r\n self.buyAlert = False\r\n self.failureNum = 0\r\n self.successNum = 0\r\n self.rsi_1 = bt.ind.RSI(self.datas[0].close, period=7)\r\n self.rsi_2 = bt.ind.RSI(self.datas[1].close, period=7)\r\n self.rsi_3 = bt.ind.RSI(self.datas[2].close, period=7)\r\n self.rsi_4 = bt.ind.RSI(self.datas[3].close, period=7)\r\n\r\n def log(self, txt, dt=None):\r\n dt = dt or self.datas[0].datetime.date(0)\r\n print('%s, %s' % (dt.isoformat(), txt))\r\n\r\n def notify_order(self, order):\r\n if order.status in [order.Completed]:\r\n if order.isbuy():\r\n return self.log(\r\n 'BUY Executed at price: {} with size: {}'.format(order.executed.price, order.executed.size))\r\n\r\n elif order.issell():\r\n print('Succeeded for {} times.'.format(self.successNum))\r\n return self.log(\r\n 'SELL Executed at price: {} with size: {}'.format(order.executed.price, order.executed.size))\r\n\r\n def next(self):\r\n \"\"\"Here the conditions for openinng and closing a position have been set.\"\"\"\r\n if self.position.size == 0:\r\n # The condition for activating BUY function --> By checking oversold condition.\r\n if self.rsi_2 < 30 and self.rsi_3 < 40:\r\n self.buyAlert = True\r\n # If BUY is activated and below conditions are met, then aa buy order would be placed.\r\n if self.rsi_1 < 50 and self.rsi_2 > 30 and self.rsi_3 > 25 and self.buyAlert:\r\n size = round((self.broker.getcash() / self.data), 3)\r\n self.order = self.buy(size=size)\r\n self.buyAlert = False\r\n print(round(self.broker.get_cash(), 1))\r\n # print(self.datas[0].low[0])\r\n\r\n if self.position.size != 0:\r\n # The condition for activating SELL_1 function --> Waiting for RSI to reach overbought zone.\r\n if self.rsi_4 > 67:\r\n self.sellAlert1 = True\r\n # If SELL_1 is activated and below conditions are met, then a sell order would be placed.\r\n if (self.rsi_1 < 70 and self.rsi_4 < 60) and self.sellAlert1:\r\n self.close()\r\n self.successNum += 1\r\n self.sellAlert1 = False\r\n\r\n # The condition for activating SELL_2 function --> Activated at overbought condition with RSI>85\r\n if self.rsi_4 > 85:\r\n self.sellAlert2 = True\r\n # If SELL_2 is activated and below conditions are met, then a sell order would be placed.\r\n if (self.rsi_4 < 80) and self.sellAlert2:\r\n self.close()\r\n self.successNum += 1\r\n self.sellAlert1 = False\r\n self.sellAlert2 = False\r\n\r\n # Setting Stop Loss for wrongly opened position.\r\n if 0.82 * self.order.executed.price > self.datas[0].close > 0.8 * self.order.executed.price:\r\n self.close()\r\n self.failureNum += 1\r\n print('Shit !!! Failed for {} times.'.format(self.failureNum))\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from scrapera.image.duckduckgo import DuckDuckGoScraper
scraper = DuckDuckGoScraper()
scraper.scrape('spongebob squarepants', 1, r'path/to/output/directory')
|
normal
|
{
"blob_id": "d234034f7f232e842d0b4e465ea6ec314af6964d",
"index": 4209,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nscraper.scrape('spongebob squarepants', 1, 'path/to/output/directory')\n",
"step-3": "<mask token>\nscraper = DuckDuckGoScraper()\nscraper.scrape('spongebob squarepants', 1, 'path/to/output/directory')\n",
"step-4": "from scrapera.image.duckduckgo import DuckDuckGoScraper\nscraper = DuckDuckGoScraper()\nscraper.scrape('spongebob squarepants', 1, 'path/to/output/directory')\n",
"step-5": "from scrapera.image.duckduckgo import DuckDuckGoScraper\n\nscraper = DuckDuckGoScraper()\nscraper.scrape('spongebob squarepants', 1, r'path/to/output/directory')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# common methods to delete data from list
fruits = ['orange', ' apple', 'pear', 'banana', 'kiwi']
#pop method
# fruits.pop(1)
# del
# del fruits[1]
# remove
# fruits.remove('banana')
# append, extend, insert
# pop, remove, del
print(fruits)
|
normal
|
{
"blob_id": "a245cb1f232b152edf40b6399686c6811c522d99",
"index": 6458,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(fruits)\n",
"step-3": "fruits = ['orange', ' apple', 'pear', 'banana', 'kiwi']\nprint(fruits)\n",
"step-4": "# common methods to delete data from list\r\nfruits = ['orange', ' apple', 'pear', 'banana', 'kiwi']\r\n#pop method\r\n# fruits.pop(1)\r\n\r\n\r\n# del\r\n# del fruits[1]\r\n\r\n# remove\r\n\r\n# fruits.remove('banana')\r\n\r\n# append, extend, insert\r\n# pop, remove, del\r\n\r\nprint(fruits)\r\n\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
message = input()
vowel = 'aeiouAEIOU'
consonant = 'bcdfghjklmnpqrstvwxyz'
consonant += consonant.upper()
vowel_count = 0
consonant_count = 0
for c in message:
if c in vowel:
vowel_count += 1
elif c in consonant:
consonant_count += 1
print(vowel_count, consonant_count)
|
normal
|
{
"blob_id": "edf704d720abdb09d176937664c9ba98bcd253a5",
"index": 8320,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nconsonant += consonant.upper()\n<mask token>\nfor c in message:\n if c in vowel:\n vowel_count += 1\n elif c in consonant:\n consonant_count += 1\nprint(vowel_count, consonant_count)\n",
"step-3": "message = input()\nvowel = 'aeiouAEIOU'\nconsonant = 'bcdfghjklmnpqrstvwxyz'\nconsonant += consonant.upper()\nvowel_count = 0\nconsonant_count = 0\nfor c in message:\n if c in vowel:\n vowel_count += 1\n elif c in consonant:\n consonant_count += 1\nprint(vowel_count, consonant_count)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import hlp
import pdb
class Nnt(list):
"""
Generic layer of neural network
"""
def __init__(self):
"""
Initialize the neural network base object.
"""
self.tag = None
def y(self, x):
"""
build sybolic expression of output {y} given input {x}
this also the defaut expression returned when the Net object is
called as a function
"""
return x
def __call__(self, x):
"""
build symbolic expression of output given input. This makes the
object callable.
"""
return self.y(x)
def p(self):
"""
return independent parameters - the shared tensor variables in
output {y}'s expression.
"""
return hlp.parms(self.y(0))
def __repr__(self):
return '{}{}'.format(
"" if self.tag is None else self.tag,
super(Nnt, self).__repr__())
|
normal
|
{
"blob_id": "fb53ea6a7184c0b06fb8a4cbfaf2145cc5c2e8e2",
"index": 9468,
"step-1": "<mask token>\n\n\nclass Nnt(list):\n <mask token>\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n <mask token>\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-2": "<mask token>\n\n\nclass Nnt(list):\n <mask token>\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n\n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-3": "<mask token>\n\n\nclass Nnt(list):\n \"\"\"\n Generic layer of neural network\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n\n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-4": "import hlp\nimport pdb\n\n\nclass Nnt(list):\n \"\"\"\n Generic layer of neural network\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n\n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format('' if self.tag is None else self.tag, super(\n Nnt, self).__repr__())\n",
"step-5": "import hlp\nimport pdb\n\nclass Nnt(list):\n \"\"\"\n Generic layer of neural network\n \"\"\"\n def __init__(self):\n \"\"\"\n Initialize the neural network base object.\n \"\"\"\n self.tag = None\n\n def y(self, x):\n \"\"\"\n build sybolic expression of output {y} given input {x}\n this also the defaut expression returned when the Net object is\n called as a function\n \"\"\"\n return x\n\n def __call__(self, x):\n \"\"\"\n build symbolic expression of output given input. This makes the\n object callable.\n \"\"\"\n return self.y(x)\n \n def p(self):\n \"\"\"\n return independent parameters - the shared tensor variables in\n output {y}'s expression.\n \"\"\"\n return hlp.parms(self.y(0))\n\n def __repr__(self):\n return '{}{}'.format(\n \"\" if self.tag is None else self.tag,\n super(Nnt, self).__repr__())\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
import torch
import numpy as np
import h5py
from torch.utils.data import Dataset, DataLoader
from config import PARAS
"""
Be careful:
We use log mel-spectrogram for training,
while the mask generated is for power mel-spectrogram
"""
def create_gt_mask(vocal_spec, bg_spec):
"""
Take in log spectrogram and return a mask map for TF bins
1 if the vocal sound is dominated in the TF-bin, while 0 for not
"""
vocal_spec = vocal_spec.numpy()
bg_spec = bg_spec.numpy()
return np.array(vocal_spec > bg_spec, dtype=np.float32)
class TorchData(Dataset):
def __init__(self, dataset_path):
"""
Take the h5py dataset
"""
super(TorchData, self).__init__()
self.dataset = h5py.File(dataset_path, 'r')
self.bg = self.dataset['bg']
self.vocal = self.dataset['vocal']
self.mix = self.dataset['mix']
self.len = self.bg.shape[0]
def __len__(self):
return self.len
def __getitem__(self, index):
bg = self.bg[index].astype(np.float32)
vocal = self.vocal[index].astype(np.float32)
mix = self.mix[index].astype(np.float32)
mix = torch.from_numpy(mix)
bg = torch.from_numpy(bg)
vocal = torch.from_numpy(vocal)
target = torch.from_numpy(create_gt_mask(vocal, bg))
sample = {
'vocal': vocal, # this is used for test
'bg': bg, # this is used for test
'mix': mix,
'target': target,
}
return sample
# define the data loaders
def torch_dataset_loader(dataset, batch_size, shuffle, kwargs):
"""
take the h5py dataset
"""
loader = DataLoader(TorchData(dataset),
batch_size=batch_size,
shuffle=shuffle,
**kwargs)
return loader
train_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE, True, PARAS.kwargs)
validation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)
test_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)
if __name__ == '__main__':
for index, data_item in enumerate(test_loader):
print(data_item['vocal'].shape)
print(data_item['bg'].shape)
print(data_item['mix'].shape)
print(data_item['target'].shape)
break
|
normal
|
{
"blob_id": "1133d3cf900e31278dc491565c99969a116e6c83",
"index": 1998,
"step-1": "<mask token>\n\n\nclass TorchData(Dataset):\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}\n return sample\n\n\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=\n shuffle, **kwargs)\n return loader\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}\n return sample\n\n\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=\n shuffle, **kwargs)\n return loader\n\n\ntrain_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,\n True, PARAS.kwargs)\nvalidation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.\n BATCH_SIZE, False, PARAS.kwargs)\ntest_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, \n False, PARAS.kwargs)\nif __name__ == '__main__':\n for index, data_item in enumerate(test_loader):\n print(data_item['vocal'].shape)\n print(data_item['bg'].shape)\n print(data_item['mix'].shape)\n print(data_item['target'].shape)\n break\n",
"step-4": "import torch\nimport numpy as np\nimport h5py\nfrom torch.utils.data import Dataset, DataLoader\nfrom config import PARAS\n<mask token>\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n sample = {'vocal': vocal, 'bg': bg, 'mix': mix, 'target': target}\n return sample\n\n\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset), batch_size=batch_size, shuffle=\n shuffle, **kwargs)\n return loader\n\n\ntrain_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE,\n True, PARAS.kwargs)\nvalidation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.\n BATCH_SIZE, False, PARAS.kwargs)\ntest_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, \n False, PARAS.kwargs)\nif __name__ == '__main__':\n for index, data_item in enumerate(test_loader):\n print(data_item['vocal'].shape)\n print(data_item['bg'].shape)\n print(data_item['mix'].shape)\n print(data_item['target'].shape)\n break\n",
"step-5": "import torch\nimport numpy as np\nimport h5py\nfrom torch.utils.data import Dataset, DataLoader\nfrom config import PARAS\n\n\"\"\"\nBe careful:\nWe use log mel-spectrogram for training,\nwhile the mask generated is for power mel-spectrogram\n\"\"\"\n\n\ndef create_gt_mask(vocal_spec, bg_spec):\n \"\"\"\n Take in log spectrogram and return a mask map for TF bins\n 1 if the vocal sound is dominated in the TF-bin, while 0 for not\n \"\"\"\n vocal_spec = vocal_spec.numpy()\n bg_spec = bg_spec.numpy()\n return np.array(vocal_spec > bg_spec, dtype=np.float32)\n\n\nclass TorchData(Dataset):\n\n def __init__(self, dataset_path):\n \"\"\"\n Take the h5py dataset\n \"\"\"\n super(TorchData, self).__init__()\n self.dataset = h5py.File(dataset_path, 'r')\n self.bg = self.dataset['bg']\n self.vocal = self.dataset['vocal']\n self.mix = self.dataset['mix']\n self.len = self.bg.shape[0]\n\n def __len__(self):\n return self.len\n\n def __getitem__(self, index):\n bg = self.bg[index].astype(np.float32)\n vocal = self.vocal[index].astype(np.float32)\n mix = self.mix[index].astype(np.float32)\n\n mix = torch.from_numpy(mix)\n bg = torch.from_numpy(bg)\n vocal = torch.from_numpy(vocal)\n target = torch.from_numpy(create_gt_mask(vocal, bg))\n\n sample = {\n 'vocal': vocal, # this is used for test\n 'bg': bg, # this is used for test\n 'mix': mix,\n 'target': target,\n }\n\n return sample\n\n\n# define the data loaders\ndef torch_dataset_loader(dataset, batch_size, shuffle, kwargs):\n \"\"\"\n take the h5py dataset\n \"\"\"\n loader = DataLoader(TorchData(dataset),\n batch_size=batch_size,\n shuffle=shuffle,\n **kwargs)\n return loader\n\n\ntrain_loader = torch_dataset_loader(PARAS.TRAIN_DATA_PATH, PARAS.BATCH_SIZE, True, PARAS.kwargs)\nvalidation_loader = torch_dataset_loader(PARAS.VAL_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)\ntest_loader = torch_dataset_loader(PARAS.TEST_DATA_PATH, PARAS.BATCH_SIZE, False, PARAS.kwargs)\n\n\nif __name__ == '__main__':\n\n for index, data_item in enumerate(test_loader):\n print(data_item['vocal'].shape)\n print(data_item['bg'].shape)\n print(data_item['mix'].shape)\n print(data_item['target'].shape)\n break\n",
"step-ids": [
1,
6,
8,
9,
10
]
}
|
[
1,
6,
8,
9,
10
] |
# -*- coding: utf-8 -*-
"""
CST 383, measles simulation homework
# Here's a question. Suppose 1% of people have measles, that the
# test for measles if 98% accurate if you do have measles, and 98%
# accurate if you don't have measles. Then what is the probability
# that you have measles, given that you have tested positive for them?
#
# Try guessing an answer before you start on this assignment.
#
# In this homework we will use simulation to estimate the answer,
# and we'll also compute the answer using Bayes' Law. There
# are three parts below:
# 1. Warm up by simulating some coin flips.
# 2. Use simulation to answer the question above.
# 3. Use Bayes' Law to answer the question without simulation.
"""
import numpy as np
import matplotlib.pyplot as plt
# Instructions:
# Problems start with #@ and then give a number. Enter your
# Python code after each problem. Do not use any variables
# in your answer except for the ones that the problem says
# you can assume are defined.
#
# Part 1: warmup
#
#@ 1
# Simulate flipping a coin 200 times that has a 90% chance of
# landing heads. Store your result in a NumPy array x of length
# 200 that contains only 0 or 1, where 1 represents heads.
# Use np.random.choice().
# (assignment to x)
x = np.random.choice(2, 200, p=[0.1, 0.9])
#@ 2
# Repeat the problem above, but this time use np.random.sample(),
# which gives values between 0 and 1. Obviously you will need to do
# further processing to turn the output of sample() into your
# array x. This will take a little thought.
# (assignment to x)
x = np.random.sample(size=200)
weights = [0.1, 0.9]
cs = np.cumsum(weights)
def calculated_weights(x):
return sum(cs < x)
vectroized_calculated_weights = np.vectorize(calculated_weights)
x = vectroized_calculated_weights(x)
#@ 3
# compute the fraction of values in array x that are 1.
# (expression)
len(x[x == 1]) / len(x)
#@ 4
# Flip the weighted coin of problem 1 200 times, compute the fraction
# of values that are 1, and repeat this entire process 100 times to
# get an array of length 100. Assign this array to variable y1.
# (assignment to y1)
def t200():
return np.random.choice(2, 200, p=[0.1, 0.9])
y1 = np.array([len(t200()[t200()==1])/len(t200()) for i in range(100)])
#@ 5
# plot a histogram of y1 using matplotlib
# (produce a plot)
plt.hist(y1)
plt.title("Fraction of 1's for 200 biased coin tosses a 100 times")
plt.xlabel("Fraction of 1's in a given attempt (of 200 tosses)")
plt.ylabel("frequency")
#@ 6
# compute a NumPy array y2 that is just like y1, except that in creating y2
# we do 1000 coin flips in each experiment, not 200.
# (assignment to y2)
def t1000():
return np.random.choice(2, 1000, p=[0.1, 0.9])
y2 = np.array([len(t1000()[t1000()==1])/len(t1000()) for i in range(100)])
#@ 7
# plot histograms for y1 and y2, with the histogram for y1 above
# the plot for y2. Our lecture notes show how to do this; see
# the 'multiple subplots' slide. Use matplotlib. In both histograms,
# let the x axis values range from 0.85 to 0.95. Please study
# the two histograms and think about why they are different.
# Assume y1 and y2 are defined.
# (produce a plot)
fig, ax = plt.subplots(2)
fig.suptitle("Histograms for Y1 and Y2")
ax[0].hist(y1)
ax[1].hist(y2)
ax[0].set_xlim([0.85, 0.95])
ax[1].set_xlim([0.85, 0.95])
#
# Part 2 - simulate the answer to the question
#
#@ 8
# Simulate the overall occurrence of measles among 10,000 people,
# based on the assumption that each person has a 0.01% chance of
# having measles.
# Compute a NumPy array x of length 10,000, where each value is
# either 0 or 1. Each of the 10,000 values should be found by
# "flipping a 0/1 coin" that is weighted 99% to 0. Approximately
# 99% of the values in x should be 0, and the others should be one.
# (assignment to x)
x = np.random.choice(2, 10000, p=[0.99, 0.01])
#@ 9
# Simulate the measles test results on the people without measles,
# based on the assumption that the measles test gives the right
# answer about 95% of the time on people without measles.
# Create an array y0, which is as long as the number of 0's in
# array x, by flipping a 0/1 coin that is weighted 95% to 0.
# Assume x is defined.
# (assignment to y0)
y0 = np.random.choice(2, len(x[x==0]), p=[0.95, 0.05])
#@ 10
# Simulate the measles test results on the people with measles,
# based on the assumption that the measles test gives the right
# answer about 98% of the time on people with measles.
# Create an array y1, which is as long as the number of 1's in
# array x, by flipping a 0/1 coin that is weighted 98% to 1.
# Assume x is defined.
# (assignment to y1)
y1 = np.random.choice(2, len(x[x==1]), p=[0.02, 0.98])
#@ 11
# Collect the measles-free people among those who tested positive.
# Compute a vector pos_no_meas that is all 0's, and is as long as the
# number of 1's in y0.
# Assume y0 is defined.
# (assignment to pos_no_meas)
pos_no_meas = np.zeros(len(y0[y0==1]))
#@ 12
# Collect the measles-infected people among those who tested positive.
# Compute a vector pos_with_meas that is all 1's, and is as long as
# the number of 1's in y1.
# Assume y1 is defined.
# (assignment to pos_with_meas)
pos_with_meas = np.ones(len(y1[y1==1]))
#@ 13
# Collect information about all people who tested positive.
# Concatenate arrays pos_no_meas and pos_with_meas, and assign
# the result to array 'tested_pos'. A 0 in in this array means
# no measles; a 1 means measles.
# Assume pos_no_meas and pos_with_meas are defined.
# (assignment to tested_pos)
tested_pos = np.concatenate((pos_no_meas, pos_with_meas))
#@ 14
# Estimate the probability of having measles if you've tested
# positive for measles. Compute the fraction of values in
# tested_positive that are 1, and assign the result to
# variable 'p'.
# Assume tested_pos is defined.
# (assignment to p)
p = len(tested_pos[tested_pos == 1]) / len(tested_pos)
#@ 15
# Package up your code into a function 'prob_cond_given_pos'. This
# function will return the probability of having a condition, based
# on certain probabilities.
# The function should have the following parameters:
# prob_cond - probability of a condition (above you used 0.01)
# prob_pos_given_cond - probability of testing positive given condition (you used 0.98)
# prob_neg_given_no_cond - probability of testing negative given no condition (you used 0.95)
# The function must return the probability of having the condition.
#
# Your function should return a slightly different value every time.
# When you run prob_cond_given_pos(0.01, 0.98, 0.95), you should get an answer
# similar to the value of p you just computed.
#
# Here is the output from tests I ran with my code:
# test 1:
# np.array([prob_cond_given_pos(0.5, 0.9, 0.8) for i in range(1000)]).mean()
# output: 0.8180582615720287
# test 2:
# np.array([prob_cond_given_pos(0.3, 0.8, 0.7) for i in range(1000)]).mean()
# output: 0.5334712339397902
# test 3:
# np.array([prob_cond_given_pos(0.5, 0.9, 0.8) for i in range(100)]).std()
# output: 0.00550051982001144
#
## I provided the function header. You should fill out the function body,
# including the return statement.
# (define a function)
def prob_cond_given_pos(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond):
x = np.random.choice(2, 10000, p=[1-prob_cond, prob_cond])
y0 = np.random.choice(2, len(x[x==0]), p=[prob_neg_given_no_cond, 1-prob_neg_given_no_cond])
y1 = np.random.choice(2, len(x[x==1]), p=[1-prob_pos_given_cond, prob_pos_given_cond])
pos_no_meas = np.zeros(len(y0[y0==1]))
pos_with_meas = np.ones(len(y1[y1==1]))
tested_pos = np.concatenate((pos_no_meas, pos_with_meas))
return len(tested_pos[tested_pos == 1]) / len(tested_pos)
#
# Part 3 - compute the answer using Bayes' Law
#
#@ 16
# Write a function 'prob_cond_given_pos_bayes'. This function
# will take the same parameters as prob_cond_given_pos, but will
# use Bayes' Law to compute the result.
#
# Here is some output from my code:
# test1:
# prob_cond_given_pos_bayes(0.5, 0.9, 0.8)
# output: 0.1818...
# test 2:
# prob_cond_given_pos_bayes(0.3, 0.8, 0.7)
# output: 0.5333...
#
# I provided the function header. You should fill out the function body,
# including the return statement.
# (define a function)
def prob_cond_given_pos_bayes(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond):
return (prob_pos_given_cond*prob_cond) / ((prob_pos_given_cond*prob_cond)+(1-prob_neg_given_no_cond)*(1-prob_cond))
#@ 17
# How does the probability of having a condition given you
# tested positive for it change based on how rare the
# condition is?
# Produce a histogram showing the probability of having measles
# given you tested positive for measles. Compute
# prob_cond_given_pos_bayes(x, 0.98, 0.95) for x ranging
# from 0.001 to 0.10 (x is the probability of having the
# condition). Use at least 100 values of x.
# Plot the results as a scatter plot, with x on the x axis
# and probability on the y axis. Label the x and y axes
# appropriately. Use matplotlib.
# Assume function prob_cond_given_pos_bayes() is defined.
# (produce a plot)
#x = np.arange(0.001, 0.1, ((0.1-0.001)/100))
x = np.linspace(0.001, 0.1, 100)
plt.scatter(x, prob_cond_given_pos_bayes(x, 0.98, 0.95))
plt.xlabel("Probability of condition (%)")
plt.ylabel("Probability of condition if tested positive (%)")
|
normal
|
{
"blob_id": "076d9f0c14a8070993039bbda2ffe4d52c8d2273",
"index": 1512,
"step-1": "<mask token>\n\n\ndef t200():\n return np.random.choice(2, 200, p=[0.1, 0.9])\n\n\n<mask token>\n\n\ndef t1000():\n return np.random.choice(2, 1000, p=[0.1, 0.9])\n\n\n<mask token>\n\n\ndef prob_cond_given_pos(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond\n ):\n x = np.random.choice(2, 10000, p=[1 - prob_cond, prob_cond])\n y0 = np.random.choice(2, len(x[x == 0]), p=[prob_neg_given_no_cond, 1 -\n prob_neg_given_no_cond])\n y1 = np.random.choice(2, len(x[x == 1]), p=[1 - prob_pos_given_cond,\n prob_pos_given_cond])\n pos_no_meas = np.zeros(len(y0[y0 == 1]))\n pos_with_meas = np.ones(len(y1[y1 == 1]))\n tested_pos = np.concatenate((pos_no_meas, pos_with_meas))\n return len(tested_pos[tested_pos == 1]) / len(tested_pos)\n\n\ndef prob_cond_given_pos_bayes(prob_cond, prob_pos_given_cond,\n prob_neg_given_no_cond):\n return prob_pos_given_cond * prob_cond / (prob_pos_given_cond *\n prob_cond + (1 - prob_neg_given_no_cond) * (1 - prob_cond))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef calculated_weights(x):\n return sum(cs < x)\n\n\n<mask token>\nlen(x[x == 1]) / len(x)\n\n\ndef t200():\n return np.random.choice(2, 200, p=[0.1, 0.9])\n\n\n<mask token>\nplt.hist(y1)\nplt.title(\"Fraction of 1's for 200 biased coin tosses a 100 times\")\nplt.xlabel(\"Fraction of 1's in a given attempt (of 200 tosses)\")\nplt.ylabel('frequency')\n\n\ndef t1000():\n return np.random.choice(2, 1000, p=[0.1, 0.9])\n\n\n<mask token>\nfig.suptitle('Histograms for Y1 and Y2')\nax[0].hist(y1)\nax[1].hist(y2)\nax[0].set_xlim([0.85, 0.95])\nax[1].set_xlim([0.85, 0.95])\n<mask token>\n\n\ndef prob_cond_given_pos(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond\n ):\n x = np.random.choice(2, 10000, p=[1 - prob_cond, prob_cond])\n y0 = np.random.choice(2, len(x[x == 0]), p=[prob_neg_given_no_cond, 1 -\n prob_neg_given_no_cond])\n y1 = np.random.choice(2, len(x[x == 1]), p=[1 - prob_pos_given_cond,\n prob_pos_given_cond])\n pos_no_meas = np.zeros(len(y0[y0 == 1]))\n pos_with_meas = np.ones(len(y1[y1 == 1]))\n tested_pos = np.concatenate((pos_no_meas, pos_with_meas))\n return len(tested_pos[tested_pos == 1]) / len(tested_pos)\n\n\ndef prob_cond_given_pos_bayes(prob_cond, prob_pos_given_cond,\n prob_neg_given_no_cond):\n return prob_pos_given_cond * prob_cond / (prob_pos_given_cond *\n prob_cond + (1 - prob_neg_given_no_cond) * (1 - prob_cond))\n\n\n<mask token>\nplt.scatter(x, prob_cond_given_pos_bayes(x, 0.98, 0.95))\nplt.xlabel('Probability of condition (%)')\nplt.ylabel('Probability of condition if tested positive (%)')\n",
"step-3": "<mask token>\nx = np.random.choice(2, 200, p=[0.1, 0.9])\nx = np.random.sample(size=200)\nweights = [0.1, 0.9]\ncs = np.cumsum(weights)\n\n\ndef calculated_weights(x):\n return sum(cs < x)\n\n\nvectroized_calculated_weights = np.vectorize(calculated_weights)\nx = vectroized_calculated_weights(x)\nlen(x[x == 1]) / len(x)\n\n\ndef t200():\n return np.random.choice(2, 200, p=[0.1, 0.9])\n\n\ny1 = np.array([(len(t200()[t200() == 1]) / len(t200())) for i in range(100)])\nplt.hist(y1)\nplt.title(\"Fraction of 1's for 200 biased coin tosses a 100 times\")\nplt.xlabel(\"Fraction of 1's in a given attempt (of 200 tosses)\")\nplt.ylabel('frequency')\n\n\ndef t1000():\n return np.random.choice(2, 1000, p=[0.1, 0.9])\n\n\ny2 = np.array([(len(t1000()[t1000() == 1]) / len(t1000())) for i in range(100)]\n )\nfig, ax = plt.subplots(2)\nfig.suptitle('Histograms for Y1 and Y2')\nax[0].hist(y1)\nax[1].hist(y2)\nax[0].set_xlim([0.85, 0.95])\nax[1].set_xlim([0.85, 0.95])\nx = np.random.choice(2, 10000, p=[0.99, 0.01])\ny0 = np.random.choice(2, len(x[x == 0]), p=[0.95, 0.05])\ny1 = np.random.choice(2, len(x[x == 1]), p=[0.02, 0.98])\npos_no_meas = np.zeros(len(y0[y0 == 1]))\npos_with_meas = np.ones(len(y1[y1 == 1]))\ntested_pos = np.concatenate((pos_no_meas, pos_with_meas))\np = len(tested_pos[tested_pos == 1]) / len(tested_pos)\n\n\ndef prob_cond_given_pos(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond\n ):\n x = np.random.choice(2, 10000, p=[1 - prob_cond, prob_cond])\n y0 = np.random.choice(2, len(x[x == 0]), p=[prob_neg_given_no_cond, 1 -\n prob_neg_given_no_cond])\n y1 = np.random.choice(2, len(x[x == 1]), p=[1 - prob_pos_given_cond,\n prob_pos_given_cond])\n pos_no_meas = np.zeros(len(y0[y0 == 1]))\n pos_with_meas = np.ones(len(y1[y1 == 1]))\n tested_pos = np.concatenate((pos_no_meas, pos_with_meas))\n return len(tested_pos[tested_pos == 1]) / len(tested_pos)\n\n\ndef prob_cond_given_pos_bayes(prob_cond, prob_pos_given_cond,\n prob_neg_given_no_cond):\n return prob_pos_given_cond * prob_cond / (prob_pos_given_cond *\n prob_cond + (1 - prob_neg_given_no_cond) * (1 - prob_cond))\n\n\nx = np.linspace(0.001, 0.1, 100)\nplt.scatter(x, prob_cond_given_pos_bayes(x, 0.98, 0.95))\nplt.xlabel('Probability of condition (%)')\nplt.ylabel('Probability of condition if tested positive (%)')\n",
"step-4": "<mask token>\nimport numpy as np\nimport matplotlib.pyplot as plt\nx = np.random.choice(2, 200, p=[0.1, 0.9])\nx = np.random.sample(size=200)\nweights = [0.1, 0.9]\ncs = np.cumsum(weights)\n\n\ndef calculated_weights(x):\n return sum(cs < x)\n\n\nvectroized_calculated_weights = np.vectorize(calculated_weights)\nx = vectroized_calculated_weights(x)\nlen(x[x == 1]) / len(x)\n\n\ndef t200():\n return np.random.choice(2, 200, p=[0.1, 0.9])\n\n\ny1 = np.array([(len(t200()[t200() == 1]) / len(t200())) for i in range(100)])\nplt.hist(y1)\nplt.title(\"Fraction of 1's for 200 biased coin tosses a 100 times\")\nplt.xlabel(\"Fraction of 1's in a given attempt (of 200 tosses)\")\nplt.ylabel('frequency')\n\n\ndef t1000():\n return np.random.choice(2, 1000, p=[0.1, 0.9])\n\n\ny2 = np.array([(len(t1000()[t1000() == 1]) / len(t1000())) for i in range(100)]\n )\nfig, ax = plt.subplots(2)\nfig.suptitle('Histograms for Y1 and Y2')\nax[0].hist(y1)\nax[1].hist(y2)\nax[0].set_xlim([0.85, 0.95])\nax[1].set_xlim([0.85, 0.95])\nx = np.random.choice(2, 10000, p=[0.99, 0.01])\ny0 = np.random.choice(2, len(x[x == 0]), p=[0.95, 0.05])\ny1 = np.random.choice(2, len(x[x == 1]), p=[0.02, 0.98])\npos_no_meas = np.zeros(len(y0[y0 == 1]))\npos_with_meas = np.ones(len(y1[y1 == 1]))\ntested_pos = np.concatenate((pos_no_meas, pos_with_meas))\np = len(tested_pos[tested_pos == 1]) / len(tested_pos)\n\n\ndef prob_cond_given_pos(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond\n ):\n x = np.random.choice(2, 10000, p=[1 - prob_cond, prob_cond])\n y0 = np.random.choice(2, len(x[x == 0]), p=[prob_neg_given_no_cond, 1 -\n prob_neg_given_no_cond])\n y1 = np.random.choice(2, len(x[x == 1]), p=[1 - prob_pos_given_cond,\n prob_pos_given_cond])\n pos_no_meas = np.zeros(len(y0[y0 == 1]))\n pos_with_meas = np.ones(len(y1[y1 == 1]))\n tested_pos = np.concatenate((pos_no_meas, pos_with_meas))\n return len(tested_pos[tested_pos == 1]) / len(tested_pos)\n\n\ndef prob_cond_given_pos_bayes(prob_cond, prob_pos_given_cond,\n prob_neg_given_no_cond):\n return prob_pos_given_cond * prob_cond / (prob_pos_given_cond *\n prob_cond + (1 - prob_neg_given_no_cond) * (1 - prob_cond))\n\n\nx = np.linspace(0.001, 0.1, 100)\nplt.scatter(x, prob_cond_given_pos_bayes(x, 0.98, 0.95))\nplt.xlabel('Probability of condition (%)')\nplt.ylabel('Probability of condition if tested positive (%)')\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\n\r\nCST 383, measles simulation homework\r\n\r\n# Here's a question. Suppose 1% of people have measles, that the\r\n# test for measles if 98% accurate if you do have measles, and 98%\r\n# accurate if you don't have measles. Then what is the probability\r\n# that you have measles, given that you have tested positive for them?\r\n#\r\n# Try guessing an answer before you start on this assignment.\r\n#\r\n# In this homework we will use simulation to estimate the answer,\r\n# and we'll also compute the answer using Bayes' Law. There\r\n# are three parts below:\r\n# 1. Warm up by simulating some coin flips.\r\n# 2. Use simulation to answer the question above.\r\n# 3. Use Bayes' Law to answer the question without simulation.\r\n\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\n\r\n# Instructions: \r\n# Problems start with #@ and then give a number. Enter your\r\n# Python code after each problem. Do not use any variables\r\n# in your answer except for the ones that the problem says\r\n# you can assume are defined.\r\n\r\n\r\n#\r\n# Part 1: warmup\r\n#\r\n\r\n#@ 1\r\n# Simulate flipping a coin 200 times that has a 90% chance of\r\n# landing heads. Store your result in a NumPy array x of length\r\n# 200 that contains only 0 or 1, where 1 represents heads.\r\n# Use np.random.choice(). \r\n# (assignment to x)\r\nx = np.random.choice(2, 200, p=[0.1, 0.9])\r\n\r\n#@ 2\r\n# Repeat the problem above, but this time use np.random.sample(),\r\n# which gives values between 0 and 1. Obviously you will need to do\r\n# further processing to turn the output of sample() into your\r\n# array x. This will take a little thought.\r\n# (assignment to x)\r\nx = np.random.sample(size=200)\r\n\r\nweights = [0.1, 0.9]\r\ncs = np.cumsum(weights)\r\n\r\ndef calculated_weights(x):\r\n return sum(cs < x)\r\n\r\nvectroized_calculated_weights = np.vectorize(calculated_weights)\r\nx = vectroized_calculated_weights(x)\r\n\r\n#@ 3\r\n# compute the fraction of values in array x that are 1.\r\n# (expression)\r\nlen(x[x == 1]) / len(x)\r\n\r\n\r\n#@ 4\r\n# Flip the weighted coin of problem 1 200 times, compute the fraction\r\n# of values that are 1, and repeat this entire process 100 times to\r\n# get an array of length 100. Assign this array to variable y1.\r\n# (assignment to y1)\r\ndef t200():\r\n return np.random.choice(2, 200, p=[0.1, 0.9])\r\n\r\ny1 = np.array([len(t200()[t200()==1])/len(t200()) for i in range(100)])\r\n\r\n#@ 5\r\n# plot a histogram of y1 using matplotlib\r\n# (produce a plot)\r\nplt.hist(y1)\r\nplt.title(\"Fraction of 1's for 200 biased coin tosses a 100 times\")\r\nplt.xlabel(\"Fraction of 1's in a given attempt (of 200 tosses)\")\r\nplt.ylabel(\"frequency\")\r\n\r\n#@ 6\r\n# compute a NumPy array y2 that is just like y1, except that in creating y2\r\n# we do 1000 coin flips in each experiment, not 200.\r\n# (assignment to y2)\r\ndef t1000():\r\n return np.random.choice(2, 1000, p=[0.1, 0.9])\r\n\r\ny2 = np.array([len(t1000()[t1000()==1])/len(t1000()) for i in range(100)])\r\n\r\n#@ 7\r\n# plot histograms for y1 and y2, with the histogram for y1 above \r\n# the plot for y2. Our lecture notes show how to do this; see\r\n# the 'multiple subplots' slide. Use matplotlib. In both histograms, \r\n# let the x axis values range from 0.85 to 0.95. Please study\r\n# the two histograms and think about why they are different.\r\n# Assume y1 and y2 are defined.\r\n# (produce a plot)\r\n\r\nfig, ax = plt.subplots(2)\r\nfig.suptitle(\"Histograms for Y1 and Y2\")\r\nax[0].hist(y1)\r\nax[1].hist(y2)\r\nax[0].set_xlim([0.85, 0.95])\r\nax[1].set_xlim([0.85, 0.95])\r\n\r\n#\r\n# Part 2 - simulate the answer to the question\r\n#\r\n\r\n#@ 8\r\n# Simulate the overall occurrence of measles among 10,000 people,\r\n# based on the assumption that each person has a 0.01% chance of\r\n# having measles. \r\n# Compute a NumPy array x of length 10,000, where each value is \r\n# either 0 or 1. Each of the 10,000 values should be found by \r\n# \"flipping a 0/1 coin\" that is weighted 99% to 0. Approximately \r\n# 99% of the values in x should be 0, and the others should be one.\r\n# (assignment to x)\r\nx = np.random.choice(2, 10000, p=[0.99, 0.01])\r\n\r\n#@ 9\r\n# Simulate the measles test results on the people without measles,\r\n# based on the assumption that the measles test gives the right\r\n# answer about 95% of the time on people without measles.\r\n# Create an array y0, which is as long as the number of 0's in\r\n# array x, by flipping a 0/1 coin that is weighted 95% to 0.\r\n# Assume x is defined.\r\n# (assignment to y0)\r\ny0 = np.random.choice(2, len(x[x==0]), p=[0.95, 0.05])\r\n\r\n\r\n#@ 10\r\n# Simulate the measles test results on the people with measles,\r\n# based on the assumption that the measles test gives the right\r\n# answer about 98% of the time on people with measles.\r\n# Create an array y1, which is as long as the number of 1's in\r\n# array x, by flipping a 0/1 coin that is weighted 98% to 1.\r\n# Assume x is defined.\r\n# (assignment to y1)\r\ny1 = np.random.choice(2, len(x[x==1]), p=[0.02, 0.98])\r\n\r\n\r\n#@ 11\r\n# Collect the measles-free people among those who tested positive.\r\n# Compute a vector pos_no_meas that is all 0's, and is as long as the\r\n# number of 1's in y0.\r\n# Assume y0 is defined.\r\n# (assignment to pos_no_meas)\r\npos_no_meas = np.zeros(len(y0[y0==1]))\r\n\r\n#@ 12\r\n# Collect the measles-infected people among those who tested positive.\r\n# Compute a vector pos_with_meas that is all 1's, and is as long as\r\n# the number of 1's in y1.\r\n# Assume y1 is defined.\r\n# (assignment to pos_with_meas)\r\npos_with_meas = np.ones(len(y1[y1==1]))\r\n\r\n#@ 13\r\n# Collect information about all people who tested positive.\r\n# Concatenate arrays pos_no_meas and pos_with_meas, and assign\r\n# the result to array 'tested_pos'. A 0 in in this array means \r\n# no measles; a 1 means measles.\r\n# Assume pos_no_meas and pos_with_meas are defined.\r\n# (assignment to tested_pos)\r\ntested_pos = np.concatenate((pos_no_meas, pos_with_meas))\r\n\r\n#@ 14\r\n# Estimate the probability of having measles if you've tested\r\n# positive for measles. Compute the fraction of values in \r\n# tested_positive that are 1, and assign the result to \r\n# variable 'p'.\r\n# Assume tested_pos is defined.\r\n# (assignment to p) \r\np = len(tested_pos[tested_pos == 1]) / len(tested_pos)\r\n\r\n\r\n#@ 15\r\n# Package up your code into a function 'prob_cond_given_pos'. This\r\n# function will return the probability of having a condition, based\r\n# on certain probabilities.\r\n# The function should have the following parameters:\r\n# prob_cond - probability of a condition (above you used 0.01)\r\n# prob_pos_given_cond - probability of testing positive given condition (you used 0.98)\r\n# prob_neg_given_no_cond - probability of testing negative given no condition (you used 0.95)\r\n# The function must return the probability of having the condition.\r\n#\r\n# Your function should return a slightly different value every time.\r\n# When you run prob_cond_given_pos(0.01, 0.98, 0.95), you should get an answer\r\n# similar to the value of p you just computed.\r\n#\r\n# Here is the output from tests I ran with my code:\r\n# test 1:\r\n# np.array([prob_cond_given_pos(0.5, 0.9, 0.8) for i in range(1000)]).mean()\r\n# output: 0.8180582615720287\r\n# test 2:\r\n# np.array([prob_cond_given_pos(0.3, 0.8, 0.7) for i in range(1000)]).mean()\r\n# output: 0.5334712339397902\r\n# test 3:\r\n# np.array([prob_cond_given_pos(0.5, 0.9, 0.8) for i in range(100)]).std()\r\n# output: 0.00550051982001144\r\n#\r\n## I provided the function header. You should fill out the function body,\r\n# including the return statement.\r\n# (define a function)\r\n\r\ndef prob_cond_given_pos(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond):\r\n x = np.random.choice(2, 10000, p=[1-prob_cond, prob_cond])\r\n y0 = np.random.choice(2, len(x[x==0]), p=[prob_neg_given_no_cond, 1-prob_neg_given_no_cond])\r\n y1 = np.random.choice(2, len(x[x==1]), p=[1-prob_pos_given_cond, prob_pos_given_cond])\r\n pos_no_meas = np.zeros(len(y0[y0==1]))\r\n pos_with_meas = np.ones(len(y1[y1==1]))\r\n tested_pos = np.concatenate((pos_no_meas, pos_with_meas))\r\n return len(tested_pos[tested_pos == 1]) / len(tested_pos)\r\n\r\n#\r\n# Part 3 - compute the answer using Bayes' Law\r\n#\r\n\r\n#@ 16\r\n# Write a function 'prob_cond_given_pos_bayes'. This function\r\n# will take the same parameters as prob_cond_given_pos, but will\r\n# use Bayes' Law to compute the result.\r\n#\r\n# Here is some output from my code:\r\n# test1:\r\n# prob_cond_given_pos_bayes(0.5, 0.9, 0.8)\r\n# output: 0.1818...\r\n# test 2:\r\n# prob_cond_given_pos_bayes(0.3, 0.8, 0.7) \r\n# output: 0.5333...\r\n#\r\n# I provided the function header. You should fill out the function body,\r\n# including the return statement.\r\n# (define a function)\r\n\r\ndef prob_cond_given_pos_bayes(prob_cond, prob_pos_given_cond, prob_neg_given_no_cond):\r\n return (prob_pos_given_cond*prob_cond) / ((prob_pos_given_cond*prob_cond)+(1-prob_neg_given_no_cond)*(1-prob_cond))\r\n\r\n#@ 17\r\n# How does the probability of having a condition given you\r\n# tested positive for it change based on how rare the \r\n# condition is? \r\n# Produce a histogram showing the probability of having measles\r\n# given you tested positive for measles. Compute \r\n# prob_cond_given_pos_bayes(x, 0.98, 0.95) for x ranging\r\n# from 0.001 to 0.10 (x is the probability of having the \r\n# condition). Use at least 100 values of x.\r\n# Plot the results as a scatter plot, with x on the x axis\r\n# and probability on the y axis. Label the x and y axes\r\n# appropriately. Use matplotlib.\r\n# Assume function prob_cond_given_pos_bayes() is defined.\r\n# (produce a plot)\r\n#x = np.arange(0.001, 0.1, ((0.1-0.001)/100))\r\nx = np.linspace(0.001, 0.1, 100)\r\nplt.scatter(x, prob_cond_given_pos_bayes(x, 0.98, 0.95))\r\nplt.xlabel(\"Probability of condition (%)\")\r\nplt.ylabel(\"Probability of condition if tested positive (%)\")",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
# Import this.
|
normal
|
{
"blob_id": "1f69bcd204c9be26756d964f4deb61296e40ff10",
"index": 9658,
"step-1": "# Import this.\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
from app.request import send_tor_signal
from app.utils.session_utils import generate_user_keys
from app.utils.gen_ddg_bangs import gen_bangs_json
from flask import Flask
from flask_session import Session
import json
import os
from stem import Signal
app = Flask(__name__, static_folder=os.path.dirname(
os.path.abspath(__file__)) + '/static')
app.user_elements = {}
app.default_key_set = generate_user_keys()
app.no_cookie_ips = []
app.config['SECRET_KEY'] = os.urandom(32)
app.config['SESSION_TYPE'] = 'filesystem'
app.config['VERSION_NUMBER'] = '0.3.1'
app.config['APP_ROOT'] = os.getenv(
'APP_ROOT',
os.path.dirname(os.path.abspath(__file__)))
app.config['LANGUAGES'] = json.load(open(
os.path.join(app.config['APP_ROOT'], 'misc/languages.json')))
app.config['COUNTRIES'] = json.load(open(
os.path.join(app.config['APP_ROOT'], 'misc/countries.json')))
app.config['STATIC_FOLDER'] = os.getenv(
'STATIC_FOLDER',
os.path.join(app.config['APP_ROOT'], 'static'))
app.config['CONFIG_PATH'] = os.getenv(
'CONFIG_VOLUME',
os.path.join(app.config['STATIC_FOLDER'], 'config'))
app.config['DEFAULT_CONFIG'] = os.path.join(
app.config['CONFIG_PATH'],
'config.json')
app.config['SESSION_FILE_DIR'] = os.path.join(
app.config['CONFIG_PATH'],
'session')
app.config['BANG_PATH'] = os.getenv(
'CONFIG_VOLUME',
os.path.join(app.config['STATIC_FOLDER'], 'bangs'))
app.config['BANG_FILE'] = os.path.join(
app.config['BANG_PATH'],
'bangs.json')
if not os.path.exists(app.config['CONFIG_PATH']):
os.makedirs(app.config['CONFIG_PATH'])
if not os.path.exists(app.config['SESSION_FILE_DIR']):
os.makedirs(app.config['SESSION_FILE_DIR'])
# Generate DDG bang filter, and create path if it doesn't exist yet
if not os.path.exists(app.config['BANG_PATH']):
os.makedirs(app.config['BANG_PATH'])
if not os.path.exists(app.config['BANG_FILE']):
gen_bangs_json(app.config['BANG_FILE'])
Session(app)
# Attempt to acquire tor identity, to determine if Tor config is available
send_tor_signal(Signal.HEARTBEAT)
from app import routes # noqa
|
normal
|
{
"blob_id": "26fb607623fda333c37e254470ca6d07708671a8",
"index": 5877,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif not os.path.exists(app.config['CONFIG_PATH']):\n os.makedirs(app.config['CONFIG_PATH'])\nif not os.path.exists(app.config['SESSION_FILE_DIR']):\n os.makedirs(app.config['SESSION_FILE_DIR'])\nif not os.path.exists(app.config['BANG_PATH']):\n os.makedirs(app.config['BANG_PATH'])\nif not os.path.exists(app.config['BANG_FILE']):\n gen_bangs_json(app.config['BANG_FILE'])\nSession(app)\nsend_tor_signal(Signal.HEARTBEAT)\n<mask token>\n",
"step-3": "<mask token>\napp = Flask(__name__, static_folder=os.path.dirname(os.path.abspath(\n __file__)) + '/static')\napp.user_elements = {}\napp.default_key_set = generate_user_keys()\napp.no_cookie_ips = []\napp.config['SECRET_KEY'] = os.urandom(32)\napp.config['SESSION_TYPE'] = 'filesystem'\napp.config['VERSION_NUMBER'] = '0.3.1'\napp.config['APP_ROOT'] = os.getenv('APP_ROOT', os.path.dirname(os.path.\n abspath(__file__)))\napp.config['LANGUAGES'] = json.load(open(os.path.join(app.config['APP_ROOT'\n ], 'misc/languages.json')))\napp.config['COUNTRIES'] = json.load(open(os.path.join(app.config['APP_ROOT'\n ], 'misc/countries.json')))\napp.config['STATIC_FOLDER'] = os.getenv('STATIC_FOLDER', os.path.join(app.\n config['APP_ROOT'], 'static'))\napp.config['CONFIG_PATH'] = os.getenv('CONFIG_VOLUME', os.path.join(app.\n config['STATIC_FOLDER'], 'config'))\napp.config['DEFAULT_CONFIG'] = os.path.join(app.config['CONFIG_PATH'],\n 'config.json')\napp.config['SESSION_FILE_DIR'] = os.path.join(app.config['CONFIG_PATH'],\n 'session')\napp.config['BANG_PATH'] = os.getenv('CONFIG_VOLUME', os.path.join(app.\n config['STATIC_FOLDER'], 'bangs'))\napp.config['BANG_FILE'] = os.path.join(app.config['BANG_PATH'], 'bangs.json')\nif not os.path.exists(app.config['CONFIG_PATH']):\n os.makedirs(app.config['CONFIG_PATH'])\nif not os.path.exists(app.config['SESSION_FILE_DIR']):\n os.makedirs(app.config['SESSION_FILE_DIR'])\nif not os.path.exists(app.config['BANG_PATH']):\n os.makedirs(app.config['BANG_PATH'])\nif not os.path.exists(app.config['BANG_FILE']):\n gen_bangs_json(app.config['BANG_FILE'])\nSession(app)\nsend_tor_signal(Signal.HEARTBEAT)\n<mask token>\n",
"step-4": "from app.request import send_tor_signal\nfrom app.utils.session_utils import generate_user_keys\nfrom app.utils.gen_ddg_bangs import gen_bangs_json\nfrom flask import Flask\nfrom flask_session import Session\nimport json\nimport os\nfrom stem import Signal\napp = Flask(__name__, static_folder=os.path.dirname(os.path.abspath(\n __file__)) + '/static')\napp.user_elements = {}\napp.default_key_set = generate_user_keys()\napp.no_cookie_ips = []\napp.config['SECRET_KEY'] = os.urandom(32)\napp.config['SESSION_TYPE'] = 'filesystem'\napp.config['VERSION_NUMBER'] = '0.3.1'\napp.config['APP_ROOT'] = os.getenv('APP_ROOT', os.path.dirname(os.path.\n abspath(__file__)))\napp.config['LANGUAGES'] = json.load(open(os.path.join(app.config['APP_ROOT'\n ], 'misc/languages.json')))\napp.config['COUNTRIES'] = json.load(open(os.path.join(app.config['APP_ROOT'\n ], 'misc/countries.json')))\napp.config['STATIC_FOLDER'] = os.getenv('STATIC_FOLDER', os.path.join(app.\n config['APP_ROOT'], 'static'))\napp.config['CONFIG_PATH'] = os.getenv('CONFIG_VOLUME', os.path.join(app.\n config['STATIC_FOLDER'], 'config'))\napp.config['DEFAULT_CONFIG'] = os.path.join(app.config['CONFIG_PATH'],\n 'config.json')\napp.config['SESSION_FILE_DIR'] = os.path.join(app.config['CONFIG_PATH'],\n 'session')\napp.config['BANG_PATH'] = os.getenv('CONFIG_VOLUME', os.path.join(app.\n config['STATIC_FOLDER'], 'bangs'))\napp.config['BANG_FILE'] = os.path.join(app.config['BANG_PATH'], 'bangs.json')\nif not os.path.exists(app.config['CONFIG_PATH']):\n os.makedirs(app.config['CONFIG_PATH'])\nif not os.path.exists(app.config['SESSION_FILE_DIR']):\n os.makedirs(app.config['SESSION_FILE_DIR'])\nif not os.path.exists(app.config['BANG_PATH']):\n os.makedirs(app.config['BANG_PATH'])\nif not os.path.exists(app.config['BANG_FILE']):\n gen_bangs_json(app.config['BANG_FILE'])\nSession(app)\nsend_tor_signal(Signal.HEARTBEAT)\nfrom app import routes\n",
"step-5": "from app.request import send_tor_signal\r\nfrom app.utils.session_utils import generate_user_keys\r\nfrom app.utils.gen_ddg_bangs import gen_bangs_json\r\nfrom flask import Flask\r\nfrom flask_session import Session\r\nimport json\r\nimport os\r\nfrom stem import Signal\r\n\r\napp = Flask(__name__, static_folder=os.path.dirname(\r\n os.path.abspath(__file__)) + '/static')\r\napp.user_elements = {}\r\napp.default_key_set = generate_user_keys()\r\napp.no_cookie_ips = []\r\napp.config['SECRET_KEY'] = os.urandom(32)\r\napp.config['SESSION_TYPE'] = 'filesystem'\r\napp.config['VERSION_NUMBER'] = '0.3.1'\r\napp.config['APP_ROOT'] = os.getenv(\r\n 'APP_ROOT',\r\n os.path.dirname(os.path.abspath(__file__)))\r\napp.config['LANGUAGES'] = json.load(open(\r\n os.path.join(app.config['APP_ROOT'], 'misc/languages.json')))\r\napp.config['COUNTRIES'] = json.load(open(\r\n os.path.join(app.config['APP_ROOT'], 'misc/countries.json')))\r\napp.config['STATIC_FOLDER'] = os.getenv(\r\n 'STATIC_FOLDER',\r\n os.path.join(app.config['APP_ROOT'], 'static'))\r\napp.config['CONFIG_PATH'] = os.getenv(\r\n 'CONFIG_VOLUME',\r\n os.path.join(app.config['STATIC_FOLDER'], 'config'))\r\napp.config['DEFAULT_CONFIG'] = os.path.join(\r\n app.config['CONFIG_PATH'],\r\n 'config.json')\r\napp.config['SESSION_FILE_DIR'] = os.path.join(\r\n app.config['CONFIG_PATH'],\r\n 'session')\r\napp.config['BANG_PATH'] = os.getenv(\r\n 'CONFIG_VOLUME',\r\n os.path.join(app.config['STATIC_FOLDER'], 'bangs'))\r\napp.config['BANG_FILE'] = os.path.join(\r\n app.config['BANG_PATH'],\r\n 'bangs.json')\r\n\r\nif not os.path.exists(app.config['CONFIG_PATH']):\r\n os.makedirs(app.config['CONFIG_PATH'])\r\n\r\nif not os.path.exists(app.config['SESSION_FILE_DIR']):\r\n os.makedirs(app.config['SESSION_FILE_DIR'])\r\n\r\n# Generate DDG bang filter, and create path if it doesn't exist yet\r\nif not os.path.exists(app.config['BANG_PATH']):\r\n os.makedirs(app.config['BANG_PATH'])\r\nif not os.path.exists(app.config['BANG_FILE']):\r\n gen_bangs_json(app.config['BANG_FILE'])\r\n\r\nSession(app)\r\n\r\n# Attempt to acquire tor identity, to determine if Tor config is available\r\nsend_tor_signal(Signal.HEARTBEAT)\r\n\r\nfrom app import routes # noqa\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from splinter import Browser
from time import sleep
from datetime import datetime, timedelta
import os, sys
import urllib
import cv2
import numpy as np
from PIL import Image
import imutils
import csv
class Scraper():
start_date = datetime(2018, 1, 8)
url = 'http://spaceweather.com/'
def scrape(self):
self.browser = Browser('firefox')
self.browser.driver.set_page_load_timeout(60)
self.browser.visit(self.url)
for day in self.get_days():
self.scrape_day(day)
def scrape_day(self, day):
self.browser.select('month', day.strftime('%m'))
self.browser.select('day', day.strftime('%d'))
self.browser.select('year', day.strftime('%Y'))
button = self.browser.find_by_name('view')
button.click()
text = self.browser.find_by_css('.solarWindText')[4].text
number = int(text.split(' ')[2].strip())
link = self.browser.find_link_by_partial_href('images{}/'.format(day.strftime('%Y')))['href']
folder_name = "data/{}{}{}".format(day.strftime('%Y'), day.strftime('%m'), day.strftime('%d'))
image_name = "{}/image.gif".format(folder_name)
txt_name = "{}/data.txt".format(folder_name)
os.mkdir(folder_name)
urllib.urlretrieve(link, image_name)
img = Image.open(image_name)
img.save("{}/image.png".format(folder_name), 'png', optimize=True, quality=70)
txt_file = open(txt_name, 'w')
txt_file.write(str(number))
txt_file.close()
print("Downloaded data for {}, sunspots: {}".format(day.strftime('%m/%d/%Y'), number))
def get_days(self):
days = []
for i in range(0, 8):
base = self.start_date + timedelta(days=7 * i)
first = base
second = base + timedelta(days=2)
third = base + timedelta(days=4)
days.append(first)
days.append(second)
days.append(third)
return days
class Entry():
folder = None
date = None
sunspots = -1
image_path = None
counted_sunspots = 0
sections = [0, 0, 0, 0]
def nothing(self, *arg):
pass
def __init__(self, folder, date, sunspots, image_path):
self.folder = folder
self.date = date
self.sunspots = sunspots
self.image_path = image_path
def process(self):
frame = cv2.imread(self.image_path)
height, width, channels = frame.shape
frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)
hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)
colorLow = np.array([0,90,80])
colorHigh = np.array([10,255,255])
mask = cv2.inRange(hsv, colorLow, colorHigh)
result = cv2.bitwise_and(frame, frame, mask=mask)
image_edged = cv2.Canny(mask, 50, 100)
image_edged = cv2.dilate(image_edged, None, iterations=1)
image_edged = cv2.erode(image_edged, None, iterations=1)
cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if imutils.is_cv2() else cnts[1]
image_contours = cv2.bitwise_not(result)
self.counted_sunspots = 0
self.sections = [0, 0, 0, 0]
section_1_start, section_1_end = 0, height/4
section_2_start, section_2_end = height/4, height/4 * 2
section_3_start, section_3_end = height/4 * 2, height/4 * 3
section_4_start, section_4_end = height/4 * 3, height/4 * 4
cv2.line(image_contours, (0, section_1_end), (width, section_1_end), (0, 0, 0), 5)
cv2.line(image_contours, (0, section_2_end), (width, section_2_end), (0, 0, 0), 10)
cv2.line(image_contours, (0, section_3_end), (width, section_3_end), (0, 0, 0), 5)
cv2.circle(image_contours, (width/2, height/2), width/2, (0, 0, 0), 5)
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50), font, 2, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)
for c in cnts:
if cv2.contourArea(c) < 5:
continue
(x,y),radius = cv2.minEnclosingCircle(c)
x = int(x)
y = int(y)
radius = int(radius)
cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)
self.counted_sunspots = self.counted_sunspots + 1
if y >= section_1_start and y <= section_1_end:
#cv2.putText(image_contours, '1', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[0] = self.sections[0] + 1
elif y >= section_2_start and y <= section_2_end:
#cv2.putText(image_contours, '2', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[1] = self.sections[1] + 1
elif y >= section_3_start and y <= section_3_end:
#cv2.putText(image_contours, '3', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[2] = self.sections[2] + 1
elif y >= section_4_start and y <= section_4_end:
#cv2.putText(image_contours, '4', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[3] = self.sections[3] + 1
print('Counted sunspots: {}'.format(self.counted_sunspots))
print(self.sections)
cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
colorLow = np.array([0,0,90])
colorHigh = np.array([0,0,255])
mask = cv2.inRange(hsv, colorLow, colorHigh)
image_contours[mask > 0] = (0, 0, 0)
vis = np.concatenate((frame, image_contours), axis=1)
cv2.imwrite('out/images/{}.png'.format(self.folder), vis)
class Processor():
entries = []
def load(self):
folders = os.listdir("data")
for folder in folders:
year = int(folder[:4])
month = int(folder[4:6])
day = int(folder[6:8])
date = datetime(year, month, day)
image_name = "data/{}/image.png".format(folder)
txt_name = "data/{}/data.txt".format(folder)
txt_file = open(txt_name, 'r')
content = txt_file.readlines()
txt_file.close()
number = int(content[0])
print(folder)
entry = Entry(folder, date, number, image_name)
entry.process()
self.entries.append(entry)
self.entries.sort(key=lambda x: x.date, reverse=False)
def compute(self):
for section in range(0, 4):
total = 0
for entry in self.entries:
total += entry.sections[section]
average = float(total) / float(len(self.entries))
print('-------[Section {}]-------'.format(section + 1))
print('Total: {}'.format(total))
print('Average: {}'.format(average))
total = 0
sections_data = [["date", "section_1", "section_2", "section_3", "section_4"]]
numbers_data = [["date", "reported", "visible"]]
for entry in self.entries:
total += entry.counted_sunspots
sections_data.append([entry.date.strftime("%Y/%m/%d")] + entry.sections)
numbers_data.append([entry.date.strftime("%Y/%m/%d")] + [entry.sunspots, entry.counted_sunspots])
average = float(total) / float(len(self.entries))
print('---------[TOTAL]---------')
print('Total: {}'.format(total))
print('Average: {}'.format(average))
csv_file = open('out/sections.csv', 'w')
writer = csv.writer(csv_file)
writer.writerows(sections_data)
csv_file.close()
csv_file = open('out/numbers.csv', 'w')
writer = csv.writer(csv_file)
writer.writerows(numbers_data)
csv_file.close()
scraper = Scraper()
scraper.scrape()
processor = Processor()
processor.load()
processor.compute()
|
normal
|
{
"blob_id": "c55991e738c89ee09dabd79d514e710e0fcbac85",
"index": 422,
"step-1": "<mask token>\n\n\nclass Scraper:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Scraper:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Scraper:\n <mask token>\n <mask token>\n <mask token>\n\n def scrape_day(self, day):\n self.browser.select('month', day.strftime('%m'))\n self.browser.select('day', day.strftime('%d'))\n self.browser.select('year', day.strftime('%Y'))\n button = self.browser.find_by_name('view')\n button.click()\n text = self.browser.find_by_css('.solarWindText')[4].text\n number = int(text.split(' ')[2].strip())\n link = self.browser.find_link_by_partial_href('images{}/'.format(\n day.strftime('%Y')))['href']\n folder_name = 'data/{}{}{}'.format(day.strftime('%Y'), day.strftime\n ('%m'), day.strftime('%d'))\n image_name = '{}/image.gif'.format(folder_name)\n txt_name = '{}/data.txt'.format(folder_name)\n os.mkdir(folder_name)\n urllib.urlretrieve(link, image_name)\n img = Image.open(image_name)\n img.save('{}/image.png'.format(folder_name), 'png', optimize=True,\n quality=70)\n txt_file = open(txt_name, 'w')\n txt_file.write(str(number))\n txt_file.close()\n print('Downloaded data for {}, sunspots: {}'.format(day.strftime(\n '%m/%d/%Y'), number))\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\n<mask token>\n",
"step-4": "from splinter import Browser\nfrom time import sleep\nfrom datetime import datetime, timedelta\nimport os, sys\nimport urllib\nimport cv2\nimport numpy as np\nfrom PIL import Image\nimport imutils\nimport csv\n\n\nclass Scraper:\n start_date = datetime(2018, 1, 8)\n url = 'http://spaceweather.com/'\n\n def scrape(self):\n self.browser = Browser('firefox')\n self.browser.driver.set_page_load_timeout(60)\n self.browser.visit(self.url)\n for day in self.get_days():\n self.scrape_day(day)\n\n def scrape_day(self, day):\n self.browser.select('month', day.strftime('%m'))\n self.browser.select('day', day.strftime('%d'))\n self.browser.select('year', day.strftime('%Y'))\n button = self.browser.find_by_name('view')\n button.click()\n text = self.browser.find_by_css('.solarWindText')[4].text\n number = int(text.split(' ')[2].strip())\n link = self.browser.find_link_by_partial_href('images{}/'.format(\n day.strftime('%Y')))['href']\n folder_name = 'data/{}{}{}'.format(day.strftime('%Y'), day.strftime\n ('%m'), day.strftime('%d'))\n image_name = '{}/image.gif'.format(folder_name)\n txt_name = '{}/data.txt'.format(folder_name)\n os.mkdir(folder_name)\n urllib.urlretrieve(link, image_name)\n img = Image.open(image_name)\n img.save('{}/image.png'.format(folder_name), 'png', optimize=True,\n quality=70)\n txt_file = open(txt_name, 'w')\n txt_file.write(str(number))\n txt_file.close()\n print('Downloaded data for {}, sunspots: {}'.format(day.strftime(\n '%m/%d/%Y'), number))\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\nscraper = Scraper()\nscraper.scrape()\nprocessor = Processor()\nprocessor.load()\nprocessor.compute()\n",
"step-5": "from splinter import Browser\nfrom time import sleep\nfrom datetime import datetime, timedelta\nimport os, sys\nimport urllib\nimport cv2\nimport numpy as np\nfrom PIL import Image\nimport imutils\nimport csv\n\nclass Scraper():\n start_date = datetime(2018, 1, 8)\n url = 'http://spaceweather.com/'\n\n def scrape(self):\n self.browser = Browser('firefox')\n self.browser.driver.set_page_load_timeout(60)\n self.browser.visit(self.url)\n for day in self.get_days():\n self.scrape_day(day)\n\n def scrape_day(self, day):\n self.browser.select('month', day.strftime('%m'))\n self.browser.select('day', day.strftime('%d'))\n self.browser.select('year', day.strftime('%Y'))\n button = self.browser.find_by_name('view')\n button.click()\n text = self.browser.find_by_css('.solarWindText')[4].text\n number = int(text.split(' ')[2].strip())\n link = self.browser.find_link_by_partial_href('images{}/'.format(day.strftime('%Y')))['href']\n folder_name = \"data/{}{}{}\".format(day.strftime('%Y'), day.strftime('%m'), day.strftime('%d'))\n image_name = \"{}/image.gif\".format(folder_name)\n txt_name = \"{}/data.txt\".format(folder_name)\n os.mkdir(folder_name)\n urllib.urlretrieve(link, image_name)\n img = Image.open(image_name)\n img.save(\"{}/image.png\".format(folder_name), 'png', optimize=True, quality=70)\n txt_file = open(txt_name, 'w')\n txt_file.write(str(number))\n txt_file.close()\n print(\"Downloaded data for {}, sunspots: {}\".format(day.strftime('%m/%d/%Y'), number))\n\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\nclass Entry():\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n \n colorLow = np.array([0,90,80])\n colorHigh = np.array([10,255,255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height/4\n section_2_start, section_2_end = height/4, height/4 * 2\n section_3_start, section_3_end = height/4 * 2, height/4 * 3\n section_4_start, section_4_end = height/4 * 3, height/4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end), (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end), (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end), (0, 0, 0), 5)\n cv2.circle(image_contours, (width/2, height/2), width/2, (0, 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x,y),radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n #cv2.putText(image_contours, '1', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n #cv2.putText(image_contours, '2', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n #cv2.putText(image_contours, '3', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n #cv2.putText(image_contours, '4', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n\n colorLow = np.array([0,0,90])\n colorHigh = np.array([0,0,255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = (0, 0, 0)\n vis = np.concatenate((frame, image_contours), axis=1)\n\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\nclass Processor():\n entries = []\n \n def load(self):\n folders = os.listdir(\"data\")\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = \"data/{}/image.png\".format(folder)\n txt_name = \"data/{}/data.txt\".format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [[\"date\", \"section_1\", \"section_2\", \"section_3\", \"section_4\"]]\n numbers_data = [[\"date\", \"reported\", \"visible\"]]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime(\"%Y/%m/%d\")] + entry.sections)\n numbers_data.append([entry.date.strftime(\"%Y/%m/%d\")] + [entry.sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\nscraper = Scraper()\nscraper.scrape()\nprocessor = Processor()\nprocessor.load()\nprocessor.compute()",
"step-ids": [
10,
11,
12,
17,
18
]
}
|
[
10,
11,
12,
17,
18
] |
class Solution(object):
def sortArrayByParityII(self, A):
"""
:type A: List[int]
:rtype: List[int]
"""
i = 0
for j in range(1, len(A), 2):
if A[j] % 2 == 1:
continue
else:
while i + 2 < len(A) and A[i] % 2 == 0:
i += 2
A[i], A[j] = A[j], A[i]
i += 2
return A
|
normal
|
{
"blob_id": "429af603bf8f1c003799c3d94c0ce9a2c2f80dfc",
"index": 3835,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def sortArrayByParityII(self, A):\n \"\"\"\n :type A: List[int]\n :rtype: List[int]\n \"\"\"\n i = 0\n for j in range(1, len(A), 2):\n if A[j] % 2 == 1:\n continue\n else:\n while i + 2 < len(A) and A[i] % 2 == 0:\n i += 2\n A[i], A[j] = A[j], A[i]\n i += 2\n return A\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
'''
This program will simulate leveling a DnD character, showing their ending HP, and stats.
'''
import argparse
import csv
import json
import re
import time
from openpyxl import load_workbook
from pandas import DataFrame
from src import classes, util
def import_race_data(file_path):
'''
This method imports data from the inputed CSV and returns a dictionary containing
all of the data formated by race and subrace
Arguments:
:param import_data: (str) The filepath to the data
Returns:
dict: The dictionary of all of the data
'''
retval = {}
# Open csv file and read in all data
with open(file_path) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
race = row['Race']
subrace = row['Subrace']
if(subrace):
if(race in retval):
if('Subraces' not in retval[race]):
retval[race]['Subraces'] = {}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = {'Subraces':{}}
retval[race]['Subraces'][subrace] = row
else:
retval[race] = row
return retval
def update_mode(args):
'''
This method is the main method for running this program in Update mode.
Update mode takes in a specifically formated XLSX file and outputs a JSON
file containing all of the data for races and subraces needed by the
program in run mode
Arguments:
:param args: (dict) A dictionary containing the needed arguments
Returns:
bool: Whether or not the update completed successfully or not
'''
# Lets first open the workbook
try:
workbook = load_workbook(args['xlsx_file'])
except:
return False
# Now turn the Race sheet into a dataframe
df = DataFrame()
for name in workbook.sheetnames:
if('Race' in name):
df = DataFrame(workbook[name].values)
# If we find nothing, return failure
if(df.empty):
return False
# Lets remove the title row
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
# Now lets get the headers, find the last column, and remove this row
end_col = (df.iloc[0, :].values == None).argmax()
df.drop(df.iloc[:, end_col:], axis=1, inplace=True)
df.columns = list(df.iloc[0, :])
df.drop(0, axis=0, inplace=True)
df.reset_index(inplace=True, drop=True)
# Now lets resize this dataframe to only contain the information we want
# We first scroll down the rows to find the first blank cell, that is the
# end of the rows
end_row = (df.iloc[:, 0].values == None).argmax()
df.drop(df[end_row:].index, axis=0, inplace=True)
# Now let's get the race names and source names
hyperlink_re = re.compile(r'(?<=,")(.+)(?=")')
df['Race'] = df['Race'].apply(
lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)
)
df['Source'] = df['Source'].apply(
lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)
)
# Now make sure the stat fields are correct integers
# Loop through dataframe so we can assemble the json in the format we want
data = {}
asi_re = re.compile(r'ASI: ([+-]\d) \(x(\d)\)(?:\s{1}\((.+)\))?')
for index, row in df.iterrows():
# First lets index this record into the correct spot in the array
row = dict(row)
race = row['Race']
subrace = row['Subrace']
if(subrace):
if(race in data):
if('Subraces' not in data[race]):
data[race]['Subraces'] = {}
data[race]['Subraces'][subrace] = row
else:
data[race] = {'Subraces':{}}
data[race]['Subraces'][subrace] = row
else:
data[race] = row
# Now that we have added this row, check if there are any special ASI rules to note
if(row['Additional'] is not None):
matches = asi_re.search(row['Additional'])
if(matches):
# We found something
asi = {'size': matches.group(1), 'number': matches.group(2)}
# Check if we have restrictions
if(matches.group(3)):
# We either can put the point into a number of options, or not
# into one stat
if('-' in matches.group(3)):
# We cannot use this stat
asi['not_allowed'] = matches.group(3).split('-')[1]
if('|' in matches.group(3)):
# We can only use one or the other
asi['allowed'] = [x.capitalize() for x in matches.group(3).split(' | ')]
# Now add this to the row of data
if(subrace):
data[race]['Subraces'][subrace]['ASI'] = asi
else:
data[race]['ASI'] = asi
# Done! Let's dump this file
with open('race_data.json', 'w') as fp:
json.dump(data, fp, indent=2)
return True
def run_mode(args):
'''
This method is the main method for running this program in Run mode.
This mode goes through the character simulation
Arguments:
:param args: (dict) A dictionary containing the needed arguments
'''
char = classes.Character(
"Human", None, ['Str','Dex','Con','Int','Wis','Cha'],
classes.StatSelection.ROLL_4D6_DROP_ONE, classes.HPSelection.ROLL_HP,
classes.ASISelection.STRICT_FOCUS
)
print(char.id)
print(char.stats)
char = classes.Character(
"Human", "Variant", ['Str','Dex','Con','Int','Wis','Cha'],
classes.StatSelection.ROLL_3D6, classes.HPSelection.ROLL_HP,
classes.ASISelection.FOCUS_ODD_TO_EVEN
)
print(char.id)
print(char.stats)
if __name__ == "__main__":
# Setup argument parsers and parse arguments
main_parser = argparse.ArgumentParser(description='Character Simulator')
subparsers = main_parser.add_subparsers(help='Mode Help')
update_parser = subparsers.add_parser('update', help='Update Help')
update_parser.add_argument('xlsx_file', type=str, help='Path to the .xlsx race file')
run_parser = subparsers.add_parser('run', help='Run Help')
args = vars(main_parser.parse_args())
# If we are in update mode, update the json file
if('xlsx_file' in args):
update_mode(args)
else:
run_mode(args)
|
normal
|
{
"blob_id": "022c8d6c31ad5494b03bfe93d17396eac25b011e",
"index": 8706,
"step-1": "<mask token>\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef import_race_data(file_path):\n \"\"\"\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n \"\"\"\n retval = {}\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in retval:\n if 'Subraces' not in retval[race]:\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces': {}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n return retval\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef import_race_data(file_path):\n \"\"\"\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n \"\"\"\n retval = {}\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in retval:\n if 'Subraces' not in retval[race]:\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces': {}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n return retval\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\nif __name__ == '__main__':\n main_parser = argparse.ArgumentParser(description='Character Simulator')\n subparsers = main_parser.add_subparsers(help='Mode Help')\n update_parser = subparsers.add_parser('update', help='Update Help')\n update_parser.add_argument('xlsx_file', type=str, help=\n 'Path to the .xlsx race file')\n run_parser = subparsers.add_parser('run', help='Run Help')\n args = vars(main_parser.parse_args())\n if 'xlsx_file' in args:\n update_mode(args)\n else:\n run_mode(args)\n",
"step-4": "<mask token>\nimport argparse\nimport csv\nimport json\nimport re\nimport time\nfrom openpyxl import load_workbook\nfrom pandas import DataFrame\nfrom src import classes, util\n\n\ndef import_race_data(file_path):\n \"\"\"\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n \"\"\"\n retval = {}\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in retval:\n if 'Subraces' not in retval[race]:\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces': {}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n return retval\n\n\ndef update_mode(args):\n \"\"\"\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n \"\"\"\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n df = DataFrame()\n for name in workbook.sheetnames:\n if 'Race' in name:\n df = DataFrame(workbook[name].values)\n if df.empty:\n return False\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n hyperlink_re = re.compile('(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n df['Source'] = df['Source'].apply(lambda x: x if hyperlink_re.search(x) is\n None else hyperlink_re.search(x).group(1))\n data = {}\n asi_re = re.compile('ASI: ([+-]\\\\d) \\\\(x(\\\\d)\\\\)(?:\\\\s{1}\\\\((.+)\\\\))?')\n for index, row in df.iterrows():\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n if subrace:\n if race in data:\n if 'Subraces' not in data[race]:\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces': {}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n if row['Additional'] is not None:\n matches = asi_re.search(row['Additional'])\n if matches:\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n if matches.group(3):\n if '-' in matches.group(3):\n asi['not_allowed'] = matches.group(3).split('-')[1]\n if '|' in matches.group(3):\n asi['allowed'] = [x.capitalize() for x in matches.\n group(3).split(' | ')]\n if subrace:\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n return True\n\n\ndef run_mode(args):\n \"\"\"\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n \"\"\"\n char = classes.Character('Human', None, ['Str', 'Dex', 'Con', 'Int',\n 'Wis', 'Cha'], classes.StatSelection.ROLL_4D6_DROP_ONE, classes.\n HPSelection.ROLL_HP, classes.ASISelection.STRICT_FOCUS)\n print(char.id)\n print(char.stats)\n char = classes.Character('Human', 'Variant', ['Str', 'Dex', 'Con',\n 'Int', 'Wis', 'Cha'], classes.StatSelection.ROLL_3D6, classes.\n HPSelection.ROLL_HP, classes.ASISelection.FOCUS_ODD_TO_EVEN)\n print(char.id)\n print(char.stats)\n\n\nif __name__ == '__main__':\n main_parser = argparse.ArgumentParser(description='Character Simulator')\n subparsers = main_parser.add_subparsers(help='Mode Help')\n update_parser = subparsers.add_parser('update', help='Update Help')\n update_parser.add_argument('xlsx_file', type=str, help=\n 'Path to the .xlsx race file')\n run_parser = subparsers.add_parser('run', help='Run Help')\n args = vars(main_parser.parse_args())\n if 'xlsx_file' in args:\n update_mode(args)\n else:\n run_mode(args)\n",
"step-5": "'''\nThis program will simulate leveling a DnD character, showing their ending HP, and stats.\n'''\nimport argparse\nimport csv\nimport json\nimport re\nimport time\nfrom openpyxl import load_workbook\nfrom pandas import DataFrame\nfrom src import classes, util\n\n\ndef import_race_data(file_path):\n '''\n This method imports data from the inputed CSV and returns a dictionary containing\n all of the data formated by race and subrace\n\n Arguments:\n :param import_data: (str) The filepath to the data\n\n Returns:\n dict: The dictionary of all of the data\n '''\n retval = {}\n\n # Open csv file and read in all data\n with open(file_path) as csv_file:\n reader = csv.DictReader(csv_file)\n for row in reader:\n race = row['Race']\n subrace = row['Subrace']\n\n if(subrace):\n if(race in retval):\n if('Subraces' not in retval[race]):\n retval[race]['Subraces'] = {}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = {'Subraces':{}}\n retval[race]['Subraces'][subrace] = row\n else:\n retval[race] = row\n\n return retval\n\ndef update_mode(args):\n '''\n This method is the main method for running this program in Update mode.\n\n Update mode takes in a specifically formated XLSX file and outputs a JSON\n file containing all of the data for races and subraces needed by the\n program in run mode\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n\n Returns:\n bool: Whether or not the update completed successfully or not\n '''\n # Lets first open the workbook\n try:\n workbook = load_workbook(args['xlsx_file'])\n except:\n return False\n\n # Now turn the Race sheet into a dataframe\n df = DataFrame()\n for name in workbook.sheetnames:\n if('Race' in name):\n df = DataFrame(workbook[name].values)\n\n # If we find nothing, return failure\n if(df.empty):\n return False\n\n # Lets remove the title row\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n\n # Now lets get the headers, find the last column, and remove this row\n end_col = (df.iloc[0, :].values == None).argmax()\n df.drop(df.iloc[:, end_col:], axis=1, inplace=True)\n df.columns = list(df.iloc[0, :])\n df.drop(0, axis=0, inplace=True)\n df.reset_index(inplace=True, drop=True)\n\n # Now lets resize this dataframe to only contain the information we want\n # We first scroll down the rows to find the first blank cell, that is the\n # end of the rows\n end_row = (df.iloc[:, 0].values == None).argmax()\n df.drop(df[end_row:].index, axis=0, inplace=True)\n\n # Now let's get the race names and source names\n hyperlink_re = re.compile(r'(?<=,\")(.+)(?=\")')\n df['Race'] = df['Race'].apply(\n lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)\n )\n df['Source'] = df['Source'].apply(\n lambda x: x if hyperlink_re.search(x) is None else hyperlink_re.search(x).group(1)\n )\n\n # Now make sure the stat fields are correct integers\n\n # Loop through dataframe so we can assemble the json in the format we want\n data = {}\n asi_re = re.compile(r'ASI: ([+-]\\d) \\(x(\\d)\\)(?:\\s{1}\\((.+)\\))?')\n for index, row in df.iterrows():\n # First lets index this record into the correct spot in the array\n row = dict(row)\n race = row['Race']\n subrace = row['Subrace']\n\n if(subrace):\n if(race in data):\n if('Subraces' not in data[race]):\n data[race]['Subraces'] = {}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = {'Subraces':{}}\n data[race]['Subraces'][subrace] = row\n else:\n data[race] = row\n\n # Now that we have added this row, check if there are any special ASI rules to note\n if(row['Additional'] is not None):\n matches = asi_re.search(row['Additional'])\n if(matches):\n # We found something\n asi = {'size': matches.group(1), 'number': matches.group(2)}\n\n # Check if we have restrictions\n if(matches.group(3)):\n # We either can put the point into a number of options, or not\n # into one stat\n if('-' in matches.group(3)):\n # We cannot use this stat\n asi['not_allowed'] = matches.group(3).split('-')[1]\n\n if('|' in matches.group(3)):\n # We can only use one or the other\n asi['allowed'] = [x.capitalize() for x in matches.group(3).split(' | ')]\n \n # Now add this to the row of data\n if(subrace):\n data[race]['Subraces'][subrace]['ASI'] = asi\n else:\n data[race]['ASI'] = asi\n\n # Done! Let's dump this file\n with open('race_data.json', 'w') as fp:\n json.dump(data, fp, indent=2)\n\n return True\n\ndef run_mode(args):\n '''\n This method is the main method for running this program in Run mode.\n\n This mode goes through the character simulation\n\n Arguments:\n :param args: (dict) A dictionary containing the needed arguments\n '''\n char = classes.Character(\n \"Human\", None, ['Str','Dex','Con','Int','Wis','Cha'], \n classes.StatSelection.ROLL_4D6_DROP_ONE, classes.HPSelection.ROLL_HP,\n classes.ASISelection.STRICT_FOCUS\n )\n print(char.id)\n print(char.stats)\n char = classes.Character(\n \"Human\", \"Variant\", ['Str','Dex','Con','Int','Wis','Cha'], \n classes.StatSelection.ROLL_3D6, classes.HPSelection.ROLL_HP,\n classes.ASISelection.FOCUS_ODD_TO_EVEN\n )\n print(char.id)\n print(char.stats)\n\n\nif __name__ == \"__main__\":\n # Setup argument parsers and parse arguments\n main_parser = argparse.ArgumentParser(description='Character Simulator')\n subparsers = main_parser.add_subparsers(help='Mode Help')\n\n update_parser = subparsers.add_parser('update', help='Update Help')\n update_parser.add_argument('xlsx_file', type=str, help='Path to the .xlsx race file')\n\n run_parser = subparsers.add_parser('run', help='Run Help')\n\n args = vars(main_parser.parse_args())\n\n # If we are in update mode, update the json file\n if('xlsx_file' in args):\n update_mode(args)\n else:\n run_mode(args)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from collections import namedtuple
from os import getenv
from pathlib import Path
TMP = getenv("TMP", "/tmp")
PYBITES_FAKER_DIR = Path(getenv("PYBITES_FAKER_DIR", TMP))
CACHE_FILENAME = "pybites-fake-data.pkl"
FAKE_DATA_CACHE = PYBITES_FAKER_DIR / CACHE_FILENAME
BITE_FEED = "https://codechalleng.es/api/bites/"
BLOG_FEED = "https://pybit.es/feeds/all.rss.xml"
Bite = namedtuple("Bite", "number title level")
Article = namedtuple("Article", "author title tags")
|
normal
|
{
"blob_id": "7336b8dec95d23cbcebbff2a813bbbd5575ba58f",
"index": 2327,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nTMP = getenv('TMP', '/tmp')\nPYBITES_FAKER_DIR = Path(getenv('PYBITES_FAKER_DIR', TMP))\nCACHE_FILENAME = 'pybites-fake-data.pkl'\nFAKE_DATA_CACHE = PYBITES_FAKER_DIR / CACHE_FILENAME\nBITE_FEED = 'https://codechalleng.es/api/bites/'\nBLOG_FEED = 'https://pybit.es/feeds/all.rss.xml'\nBite = namedtuple('Bite', 'number title level')\nArticle = namedtuple('Article', 'author title tags')\n",
"step-3": "from collections import namedtuple\nfrom os import getenv\nfrom pathlib import Path\nTMP = getenv('TMP', '/tmp')\nPYBITES_FAKER_DIR = Path(getenv('PYBITES_FAKER_DIR', TMP))\nCACHE_FILENAME = 'pybites-fake-data.pkl'\nFAKE_DATA_CACHE = PYBITES_FAKER_DIR / CACHE_FILENAME\nBITE_FEED = 'https://codechalleng.es/api/bites/'\nBLOG_FEED = 'https://pybit.es/feeds/all.rss.xml'\nBite = namedtuple('Bite', 'number title level')\nArticle = namedtuple('Article', 'author title tags')\n",
"step-4": "from collections import namedtuple\nfrom os import getenv\nfrom pathlib import Path\n\nTMP = getenv(\"TMP\", \"/tmp\")\nPYBITES_FAKER_DIR = Path(getenv(\"PYBITES_FAKER_DIR\", TMP))\nCACHE_FILENAME = \"pybites-fake-data.pkl\"\nFAKE_DATA_CACHE = PYBITES_FAKER_DIR / CACHE_FILENAME\nBITE_FEED = \"https://codechalleng.es/api/bites/\"\nBLOG_FEED = \"https://pybit.es/feeds/all.rss.xml\"\n\nBite = namedtuple(\"Bite\", \"number title level\")\nArticle = namedtuple(\"Article\", \"author title tags\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
"""
测试如何使用python的pymongo模块操作MongoDB
@author: hch
@date : 2020/10/8
"""
import logging
import time
import traceback
from pprint import pprint
from pymongo import MongoClient
from pymongo.cursor import Cursor
from pymongo.results import DeleteResult, InsertOneResult, UpdateResult
class MongoTest:
client = None
try:
client = MongoClient('mongodb://root:root@localhost:27017/test?authSource=admin')
print('init mongo client:', client)
except Exception as e:
# traceback.print_exc()
logging.exception(e)
@classmethod
def get_connection(cls) -> MongoClient:
return cls.client or MongoClient('mongodb://root:root@localhost:27017/test?authSource=admin')
@classmethod
def insert(cls, db: str, collection: str, data: dict) -> InsertOneResult:
return cls.client.get_database(db).get_collection(collection).insert_one(data)
@classmethod
def find(cls, db: str, collection: str, condition: dict) -> Cursor:
return cls.client.get_database(db).get_collection(collection).find(condition)
@classmethod
def delete(cls, db: str, collection: str, condition: dict) -> DeleteResult:
return cls.client.get_database(db).get_collection(collection).delete_one(condition)
@classmethod
def update(cls, db: str, collection: str, condition: dict, update: dict) -> UpdateResult:
return cls.client.get_database(db).get_collection(collection).update_one(condition, update)
if __name__ == '__main__':
# client = MongoTest.get_connection()
# client = MongoClient('mongodb://root@localhost:27017/test?authSource=admin')
# print(client.test.__class__) # <class 'pymongo.database.Database'>
# print(client.test.inventory.__class__) # <class 'pymongo.collection.Collection'>
# client.test.inventory.insert_one(
# {
# "item": "pymongo",
# "qty": 100,
# "tags": ["cotton"],
# "size": {"h": 28, "w": 35.5, "uom": "cm"}
# }
# )
# MongoTest.insert('test', 'inventory',
# {
# "item": "pymongo" + time.strftime('%Y%m%d%H%M%S', time.localtime()),
# "qty": 100,
# "tags": ["cotton"],
# "size": {"h": 28, "w": 35.5, "uom": "cm"}
# }
# )
for result in MongoTest.find('test', 'inventory', {}):
pprint(result)
MongoTest.delete('test', 'inventory', {'item': 'pymongo20201008204049'})
MongoTest.update('test', 'inventory', {"item": "pymongo"},
{"$set": {"size.uom": "cm", "status": "P"},
"$currentDate": {"lastModified": True}})
|
normal
|
{
"blob_id": "b46fe26f1a3c9e93e735b752e54132bd95408251",
"index": 2451,
"step-1": "<mask token>\n\n\nclass MongoTest:\n <mask token>\n try:\n client = MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n print('init mongo client:', client)\n except Exception as e:\n logging.exception(e)\n\n @classmethod\n def get_connection(cls) ->MongoClient:\n return cls.client or MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n\n @classmethod\n def insert(cls, db: str, collection: str, data: dict) ->InsertOneResult:\n return cls.client.get_database(db).get_collection(collection\n ).insert_one(data)\n\n @classmethod\n def find(cls, db: str, collection: str, condition: dict) ->Cursor:\n return cls.client.get_database(db).get_collection(collection).find(\n condition)\n\n @classmethod\n def delete(cls, db: str, collection: str, condition: dict) ->DeleteResult:\n return cls.client.get_database(db).get_collection(collection\n ).delete_one(condition)\n\n @classmethod\n def update(cls, db: str, collection: str, condition: dict, update: dict\n ) ->UpdateResult:\n return cls.client.get_database(db).get_collection(collection\n ).update_one(condition, update)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MongoTest:\n client = None\n try:\n client = MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n print('init mongo client:', client)\n except Exception as e:\n logging.exception(e)\n\n @classmethod\n def get_connection(cls) ->MongoClient:\n return cls.client or MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n\n @classmethod\n def insert(cls, db: str, collection: str, data: dict) ->InsertOneResult:\n return cls.client.get_database(db).get_collection(collection\n ).insert_one(data)\n\n @classmethod\n def find(cls, db: str, collection: str, condition: dict) ->Cursor:\n return cls.client.get_database(db).get_collection(collection).find(\n condition)\n\n @classmethod\n def delete(cls, db: str, collection: str, condition: dict) ->DeleteResult:\n return cls.client.get_database(db).get_collection(collection\n ).delete_one(condition)\n\n @classmethod\n def update(cls, db: str, collection: str, condition: dict, update: dict\n ) ->UpdateResult:\n return cls.client.get_database(db).get_collection(collection\n ).update_one(condition, update)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MongoTest:\n client = None\n try:\n client = MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n print('init mongo client:', client)\n except Exception as e:\n logging.exception(e)\n\n @classmethod\n def get_connection(cls) ->MongoClient:\n return cls.client or MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n\n @classmethod\n def insert(cls, db: str, collection: str, data: dict) ->InsertOneResult:\n return cls.client.get_database(db).get_collection(collection\n ).insert_one(data)\n\n @classmethod\n def find(cls, db: str, collection: str, condition: dict) ->Cursor:\n return cls.client.get_database(db).get_collection(collection).find(\n condition)\n\n @classmethod\n def delete(cls, db: str, collection: str, condition: dict) ->DeleteResult:\n return cls.client.get_database(db).get_collection(collection\n ).delete_one(condition)\n\n @classmethod\n def update(cls, db: str, collection: str, condition: dict, update: dict\n ) ->UpdateResult:\n return cls.client.get_database(db).get_collection(collection\n ).update_one(condition, update)\n\n\nif __name__ == '__main__':\n for result in MongoTest.find('test', 'inventory', {}):\n pprint(result)\n MongoTest.delete('test', 'inventory', {'item': 'pymongo20201008204049'})\n MongoTest.update('test', 'inventory', {'item': 'pymongo'}, {'$set': {\n 'size.uom': 'cm', 'status': 'P'}, '$currentDate': {'lastModified': \n True}})\n",
"step-4": "<mask token>\nimport logging\nimport time\nimport traceback\nfrom pprint import pprint\nfrom pymongo import MongoClient\nfrom pymongo.cursor import Cursor\nfrom pymongo.results import DeleteResult, InsertOneResult, UpdateResult\n\n\nclass MongoTest:\n client = None\n try:\n client = MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n print('init mongo client:', client)\n except Exception as e:\n logging.exception(e)\n\n @classmethod\n def get_connection(cls) ->MongoClient:\n return cls.client or MongoClient(\n 'mongodb://root:root@localhost:27017/test?authSource=admin')\n\n @classmethod\n def insert(cls, db: str, collection: str, data: dict) ->InsertOneResult:\n return cls.client.get_database(db).get_collection(collection\n ).insert_one(data)\n\n @classmethod\n def find(cls, db: str, collection: str, condition: dict) ->Cursor:\n return cls.client.get_database(db).get_collection(collection).find(\n condition)\n\n @classmethod\n def delete(cls, db: str, collection: str, condition: dict) ->DeleteResult:\n return cls.client.get_database(db).get_collection(collection\n ).delete_one(condition)\n\n @classmethod\n def update(cls, db: str, collection: str, condition: dict, update: dict\n ) ->UpdateResult:\n return cls.client.get_database(db).get_collection(collection\n ).update_one(condition, update)\n\n\nif __name__ == '__main__':\n for result in MongoTest.find('test', 'inventory', {}):\n pprint(result)\n MongoTest.delete('test', 'inventory', {'item': 'pymongo20201008204049'})\n MongoTest.update('test', 'inventory', {'item': 'pymongo'}, {'$set': {\n 'size.uom': 'cm', 'status': 'P'}, '$currentDate': {'lastModified': \n True}})\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\n测试如何使用python的pymongo模块操作MongoDB\n\n@author: hch\n@date : 2020/10/8\n\"\"\"\nimport logging\nimport time\nimport traceback\nfrom pprint import pprint\n\nfrom pymongo import MongoClient\nfrom pymongo.cursor import Cursor\nfrom pymongo.results import DeleteResult, InsertOneResult, UpdateResult\n\n\nclass MongoTest:\n client = None\n\n try:\n client = MongoClient('mongodb://root:root@localhost:27017/test?authSource=admin')\n print('init mongo client:', client)\n except Exception as e:\n # traceback.print_exc()\n logging.exception(e)\n\n @classmethod\n def get_connection(cls) -> MongoClient:\n return cls.client or MongoClient('mongodb://root:root@localhost:27017/test?authSource=admin')\n\n @classmethod\n def insert(cls, db: str, collection: str, data: dict) -> InsertOneResult:\n return cls.client.get_database(db).get_collection(collection).insert_one(data)\n\n @classmethod\n def find(cls, db: str, collection: str, condition: dict) -> Cursor:\n return cls.client.get_database(db).get_collection(collection).find(condition)\n\n @classmethod\n def delete(cls, db: str, collection: str, condition: dict) -> DeleteResult:\n return cls.client.get_database(db).get_collection(collection).delete_one(condition)\n\n @classmethod\n def update(cls, db: str, collection: str, condition: dict, update: dict) -> UpdateResult:\n return cls.client.get_database(db).get_collection(collection).update_one(condition, update)\n\n\nif __name__ == '__main__':\n # client = MongoTest.get_connection()\n # client = MongoClient('mongodb://root@localhost:27017/test?authSource=admin')\n # print(client.test.__class__) # <class 'pymongo.database.Database'>\n # print(client.test.inventory.__class__) # <class 'pymongo.collection.Collection'>\n\n # client.test.inventory.insert_one(\n # {\n # \"item\": \"pymongo\",\n # \"qty\": 100,\n # \"tags\": [\"cotton\"],\n # \"size\": {\"h\": 28, \"w\": 35.5, \"uom\": \"cm\"}\n # }\n # )\n\n # MongoTest.insert('test', 'inventory',\n # {\n # \"item\": \"pymongo\" + time.strftime('%Y%m%d%H%M%S', time.localtime()),\n # \"qty\": 100,\n # \"tags\": [\"cotton\"],\n # \"size\": {\"h\": 28, \"w\": 35.5, \"uom\": \"cm\"}\n # }\n # )\n for result in MongoTest.find('test', 'inventory', {}):\n pprint(result)\n MongoTest.delete('test', 'inventory', {'item': 'pymongo20201008204049'})\n MongoTest.update('test', 'inventory', {\"item\": \"pymongo\"},\n {\"$set\": {\"size.uom\": \"cm\", \"status\": \"P\"},\n \"$currentDate\": {\"lastModified\": True}})\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
from django.db import models
from django.utils import timezone
from pprint import pprint
class Cast(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
image = models.ImageField(upload_to='cast', blank=True, null=True)
description = models.CharField(max_length=400, blank=True, null=True)
def __str__(self):
return self.name
class Issue(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
image = models.ImageField(upload_to='issues', blank=True, null=True)
issue_number = models.IntegerField(blank=True, null=True)
def __str__(self):
return self.title
class Comic(models.Model):
MAX_PAGES_PER_ISSUE = 1000
sort_number = models.IntegerField(blank=True, null=True)
page_number = models.IntegerField(blank=True, null=True )
last_page = models.IntegerField(default=1)
title = models.CharField(max_length=200, blank=True, null=True)
issue = models.ForeignKey(Issue, blank=True, null=True, on_delete=models.DO_NOTHING)
image = models.ImageField(upload_to='comics', blank=True, null=True)
date_added = models.DateTimeField(
help_text="Posted on: ",
default = timezone.now, null=True, blank=True
)
cast_members = models.ManyToManyField(Cast, related_name="comics", blank=True)
class Meta:
ordering = ['-sort_number', '-date_added']
def __str__(self):
return self.title
@staticmethod
def sortOrder(page_number):
# TODO: ADD ISSUE 3 LOGIC WHEN WE GET THERE
if int(page_number) < 33:
issue_num = 1
else:
issue_num = 2
# print('ISSUE NUM: ', issue_num)
order = issue_num * Comic.MAX_PAGES_PER_ISSUE + int(page_number)
# print ('SORT ORDER: ', order)
return order
def save(self, *args, **kwargs):
self.sort_number = Comic.sortOrder(self.page_number)
super(Comic, self).save(*args, **kwargs) # Call the "real" save() method.
class ComicManager(models.Model):
last_page = models.IntegerField(default=1)
class Meta:
verbose_name_plural = ("Comic Manager")
def __str__(self):
return str(self.last_page)
def save(self, *args, **kwargs):
super(ComicManager, self).save(*args, **kwargs)
# TODO - automate this so that anytime a comic is saved it checks last page status and runs here
# update all Comic instances to have this last page
comics = Comic.objects.all()
for comic in comics:
if comic.last_page < self.last_page:
comic.last_page = self.last_page
comic.save()
class HeaderImage(models.Model):
title = models.CharField(max_length=100, blank=True, null=True)
image = models.ImageField(upload_to='images', blank=True, null=True)
class Meta:
verbose_name_plural = ('Header Images')
def __str__(self):
return self.title
|
normal
|
{
"blob_id": "45dc9d362a2ddfd408f93452bda0b7338057ca81",
"index": 8322,
"step-1": "<mask token>\n\n\nclass Comic(models.Model):\n MAX_PAGES_PER_ISSUE = 1000\n sort_number = models.IntegerField(blank=True, null=True)\n page_number = models.IntegerField(blank=True, null=True)\n last_page = models.IntegerField(default=1)\n title = models.CharField(max_length=200, blank=True, null=True)\n issue = models.ForeignKey(Issue, blank=True, null=True, on_delete=\n models.DO_NOTHING)\n image = models.ImageField(upload_to='comics', blank=True, null=True)\n date_added = models.DateTimeField(help_text='Posted on: ', default=\n timezone.now, null=True, blank=True)\n cast_members = models.ManyToManyField(Cast, related_name='comics',\n blank=True)\n\n\n class Meta:\n ordering = ['-sort_number', '-date_added']\n\n def __str__(self):\n return self.title\n\n @staticmethod\n def sortOrder(page_number):\n if int(page_number) < 33:\n issue_num = 1\n else:\n issue_num = 2\n order = issue_num * Comic.MAX_PAGES_PER_ISSUE + int(page_number)\n return order\n\n def save(self, *args, **kwargs):\n self.sort_number = Comic.sortOrder(self.page_number)\n super(Comic, self).save(*args, **kwargs)\n\n\nclass ComicManager(models.Model):\n last_page = models.IntegerField(default=1)\n\n\n class Meta:\n verbose_name_plural = 'Comic Manager'\n\n def __str__(self):\n return str(self.last_page)\n\n def save(self, *args, **kwargs):\n super(ComicManager, self).save(*args, **kwargs)\n comics = Comic.objects.all()\n for comic in comics:\n if comic.last_page < self.last_page:\n comic.last_page = self.last_page\n comic.save()\n\n\nclass HeaderImage(models.Model):\n title = models.CharField(max_length=100, blank=True, null=True)\n image = models.ImageField(upload_to='images', blank=True, null=True)\n\n\n class Meta:\n verbose_name_plural = 'Header Images'\n\n def __str__(self):\n return self.title\n",
"step-2": "<mask token>\n\n\nclass Issue(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Comic(models.Model):\n MAX_PAGES_PER_ISSUE = 1000\n sort_number = models.IntegerField(blank=True, null=True)\n page_number = models.IntegerField(blank=True, null=True)\n last_page = models.IntegerField(default=1)\n title = models.CharField(max_length=200, blank=True, null=True)\n issue = models.ForeignKey(Issue, blank=True, null=True, on_delete=\n models.DO_NOTHING)\n image = models.ImageField(upload_to='comics', blank=True, null=True)\n date_added = models.DateTimeField(help_text='Posted on: ', default=\n timezone.now, null=True, blank=True)\n cast_members = models.ManyToManyField(Cast, related_name='comics',\n blank=True)\n\n\n class Meta:\n ordering = ['-sort_number', '-date_added']\n\n def __str__(self):\n return self.title\n\n @staticmethod\n def sortOrder(page_number):\n if int(page_number) < 33:\n issue_num = 1\n else:\n issue_num = 2\n order = issue_num * Comic.MAX_PAGES_PER_ISSUE + int(page_number)\n return order\n\n def save(self, *args, **kwargs):\n self.sort_number = Comic.sortOrder(self.page_number)\n super(Comic, self).save(*args, **kwargs)\n\n\nclass ComicManager(models.Model):\n last_page = models.IntegerField(default=1)\n\n\n class Meta:\n verbose_name_plural = 'Comic Manager'\n\n def __str__(self):\n return str(self.last_page)\n\n def save(self, *args, **kwargs):\n super(ComicManager, self).save(*args, **kwargs)\n comics = Comic.objects.all()\n for comic in comics:\n if comic.last_page < self.last_page:\n comic.last_page = self.last_page\n comic.save()\n\n\nclass HeaderImage(models.Model):\n title = models.CharField(max_length=100, blank=True, null=True)\n image = models.ImageField(upload_to='images', blank=True, null=True)\n\n\n class Meta:\n verbose_name_plural = 'Header Images'\n\n def __str__(self):\n return self.title\n",
"step-3": "<mask token>\n\n\nclass Issue(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.title\n\n\nclass Comic(models.Model):\n MAX_PAGES_PER_ISSUE = 1000\n sort_number = models.IntegerField(blank=True, null=True)\n page_number = models.IntegerField(blank=True, null=True)\n last_page = models.IntegerField(default=1)\n title = models.CharField(max_length=200, blank=True, null=True)\n issue = models.ForeignKey(Issue, blank=True, null=True, on_delete=\n models.DO_NOTHING)\n image = models.ImageField(upload_to='comics', blank=True, null=True)\n date_added = models.DateTimeField(help_text='Posted on: ', default=\n timezone.now, null=True, blank=True)\n cast_members = models.ManyToManyField(Cast, related_name='comics',\n blank=True)\n\n\n class Meta:\n ordering = ['-sort_number', '-date_added']\n\n def __str__(self):\n return self.title\n\n @staticmethod\n def sortOrder(page_number):\n if int(page_number) < 33:\n issue_num = 1\n else:\n issue_num = 2\n order = issue_num * Comic.MAX_PAGES_PER_ISSUE + int(page_number)\n return order\n\n def save(self, *args, **kwargs):\n self.sort_number = Comic.sortOrder(self.page_number)\n super(Comic, self).save(*args, **kwargs)\n\n\nclass ComicManager(models.Model):\n last_page = models.IntegerField(default=1)\n\n\n class Meta:\n verbose_name_plural = 'Comic Manager'\n\n def __str__(self):\n return str(self.last_page)\n\n def save(self, *args, **kwargs):\n super(ComicManager, self).save(*args, **kwargs)\n comics = Comic.objects.all()\n for comic in comics:\n if comic.last_page < self.last_page:\n comic.last_page = self.last_page\n comic.save()\n\n\nclass HeaderImage(models.Model):\n title = models.CharField(max_length=100, blank=True, null=True)\n image = models.ImageField(upload_to='images', blank=True, null=True)\n\n\n class Meta:\n verbose_name_plural = 'Header Images'\n\n def __str__(self):\n return self.title\n",
"step-4": "from django.db import models\nfrom django.utils import timezone\nfrom pprint import pprint\n\n\nclass Cast(models.Model):\n name = models.CharField(max_length=50, blank=True, null=True)\n image = models.ImageField(upload_to='cast', blank=True, null=True)\n description = models.CharField(max_length=400, blank=True, null=True)\n\n def __str__(self):\n return self.name\n\n\nclass Issue(models.Model):\n title = models.CharField(max_length=200, blank=True, null=True)\n image = models.ImageField(upload_to='issues', blank=True, null=True)\n issue_number = models.IntegerField(blank=True, null=True)\n\n def __str__(self):\n return self.title\n\n\nclass Comic(models.Model):\n MAX_PAGES_PER_ISSUE = 1000\n sort_number = models.IntegerField(blank=True, null=True)\n page_number = models.IntegerField(blank=True, null=True)\n last_page = models.IntegerField(default=1)\n title = models.CharField(max_length=200, blank=True, null=True)\n issue = models.ForeignKey(Issue, blank=True, null=True, on_delete=\n models.DO_NOTHING)\n image = models.ImageField(upload_to='comics', blank=True, null=True)\n date_added = models.DateTimeField(help_text='Posted on: ', default=\n timezone.now, null=True, blank=True)\n cast_members = models.ManyToManyField(Cast, related_name='comics',\n blank=True)\n\n\n class Meta:\n ordering = ['-sort_number', '-date_added']\n\n def __str__(self):\n return self.title\n\n @staticmethod\n def sortOrder(page_number):\n if int(page_number) < 33:\n issue_num = 1\n else:\n issue_num = 2\n order = issue_num * Comic.MAX_PAGES_PER_ISSUE + int(page_number)\n return order\n\n def save(self, *args, **kwargs):\n self.sort_number = Comic.sortOrder(self.page_number)\n super(Comic, self).save(*args, **kwargs)\n\n\nclass ComicManager(models.Model):\n last_page = models.IntegerField(default=1)\n\n\n class Meta:\n verbose_name_plural = 'Comic Manager'\n\n def __str__(self):\n return str(self.last_page)\n\n def save(self, *args, **kwargs):\n super(ComicManager, self).save(*args, **kwargs)\n comics = Comic.objects.all()\n for comic in comics:\n if comic.last_page < self.last_page:\n comic.last_page = self.last_page\n comic.save()\n\n\nclass HeaderImage(models.Model):\n title = models.CharField(max_length=100, blank=True, null=True)\n image = models.ImageField(upload_to='images', blank=True, null=True)\n\n\n class Meta:\n verbose_name_plural = 'Header Images'\n\n def __str__(self):\n return self.title\n",
"step-5": "from django.db import models\nfrom django.utils import timezone\nfrom pprint import pprint\n\nclass Cast(models.Model):\n name = models.CharField(max_length=50, blank=True, null=True)\n image = models.ImageField(upload_to='cast', blank=True, null=True)\n description = models.CharField(max_length=400, blank=True, null=True)\n\n def __str__(self):\n return self.name\n\n\nclass Issue(models.Model):\n title = models.CharField(max_length=200, blank=True, null=True)\n image = models.ImageField(upload_to='issues', blank=True, null=True)\n issue_number = models.IntegerField(blank=True, null=True)\n\n def __str__(self):\n return self.title\n\n\nclass Comic(models.Model):\n MAX_PAGES_PER_ISSUE = 1000\n sort_number = models.IntegerField(blank=True, null=True)\n page_number = models.IntegerField(blank=True, null=True )\n last_page = models.IntegerField(default=1)\n title = models.CharField(max_length=200, blank=True, null=True)\n issue = models.ForeignKey(Issue, blank=True, null=True, on_delete=models.DO_NOTHING)\n image = models.ImageField(upload_to='comics', blank=True, null=True)\n date_added = models.DateTimeField(\n help_text=\"Posted on: \",\n default = timezone.now, null=True, blank=True \n )\n cast_members = models.ManyToManyField(Cast, related_name=\"comics\", blank=True)\n\n class Meta:\n ordering = ['-sort_number', '-date_added']\n\n def __str__(self):\n return self.title\n\n @staticmethod\n def sortOrder(page_number):\n # TODO: ADD ISSUE 3 LOGIC WHEN WE GET THERE\n if int(page_number) < 33:\n issue_num = 1\n else:\n issue_num = 2\n # print('ISSUE NUM: ', issue_num)\n order = issue_num * Comic.MAX_PAGES_PER_ISSUE + int(page_number) \n # print ('SORT ORDER: ', order)\n return order\n\n def save(self, *args, **kwargs):\n self.sort_number = Comic.sortOrder(self.page_number)\n super(Comic, self).save(*args, **kwargs) # Call the \"real\" save() method.\n\n\n\nclass ComicManager(models.Model):\n last_page = models.IntegerField(default=1)\n\n class Meta:\n verbose_name_plural = (\"Comic Manager\")\n\n def __str__(self):\n return str(self.last_page)\n\n def save(self, *args, **kwargs):\n super(ComicManager, self).save(*args, **kwargs)\n # TODO - automate this so that anytime a comic is saved it checks last page status and runs here\n # update all Comic instances to have this last page\n comics = Comic.objects.all()\n for comic in comics:\n if comic.last_page < self.last_page:\n comic.last_page = self.last_page\n comic.save()\n\n\n\nclass HeaderImage(models.Model):\n title = models.CharField(max_length=100, blank=True, null=True)\n image = models.ImageField(upload_to='images', blank=True, null=True)\n\n class Meta: \n verbose_name_plural = ('Header Images')\n\n def __str__(self):\n return self.title\n\n",
"step-ids": [
12,
13,
14,
19,
20
]
}
|
[
12,
13,
14,
19,
20
] |
import boto3
import json
region = 'us-east-2'
ec2 = boto3.resource('ec2',region)
ImageId = 'ami-07efac79022b86107'
KeyName = 'aws_keypair'
InstanceType = 't2.micro'
#IamInstanceProfile =
instances = ec2.create_instances(
ImageId =ImageId,
MinCount = 1,
MaxCount = 5,
KeyName = KeyName,
InstanceType = InstanceType,
IamInstanceProfile = {
'Name' : 'Test-ec2-pro',
}
)
|
normal
|
{
"blob_id": "b7606befe123c4fb6840a1bc62e43e6721edfcc3",
"index": 5005,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nregion = 'us-east-2'\nec2 = boto3.resource('ec2', region)\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\ninstances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5,\n KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name':\n 'Test-ec2-pro'})\n",
"step-3": "import boto3\nimport json\nregion = 'us-east-2'\nec2 = boto3.resource('ec2', region)\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\ninstances = ec2.create_instances(ImageId=ImageId, MinCount=1, MaxCount=5,\n KeyName=KeyName, InstanceType=InstanceType, IamInstanceProfile={'Name':\n 'Test-ec2-pro'})\n",
"step-4": "import boto3\nimport json\n\nregion = 'us-east-2'\n\nec2 = boto3.resource('ec2',region)\n\nImageId = 'ami-07efac79022b86107'\nKeyName = 'aws_keypair'\nInstanceType = 't2.micro'\n#IamInstanceProfile =\ninstances = ec2.create_instances(\n ImageId =ImageId,\n MinCount = 1,\n MaxCount = 5,\n KeyName = KeyName,\n InstanceType = InstanceType,\n IamInstanceProfile = {\n 'Name' : 'Test-ec2-pro',\n\n }\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-26 20:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Cbrowser', '0002_links_l_title'),
]
operations = [
migrations.AddField(
model_name='student',
name='dp',
field=models.CharField(default='https://thebenclark.files.wordpress.com/2014/03/facebook-default-no-profile-pic.jpg', max_length=1000),
),
migrations.AddField(
model_name='student',
name='gpa',
field=models.IntegerField(default=0),
),
]
|
normal
|
{
"blob_id": "ffd11d49f8499b4bfec8f17d07b66d899dd23d2e",
"index": 6924,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Cbrowser', '0002_links_l_title')]\n operations = [migrations.AddField(model_name='student', name='dp',\n field=models.CharField(default=\n 'https://thebenclark.files.wordpress.com/2014/03/facebook-default-no-profile-pic.jpg'\n , max_length=1000)), migrations.AddField(model_name='student', name\n ='gpa', field=models.IntegerField(default=0))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('Cbrowser', '0002_links_l_title')]\n operations = [migrations.AddField(model_name='student', name='dp',\n field=models.CharField(default=\n 'https://thebenclark.files.wordpress.com/2014/03/facebook-default-no-profile-pic.jpg'\n , max_length=1000)), migrations.AddField(model_name='student', name\n ='gpa', field=models.IntegerField(default=0))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-02-26 20:13\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('Cbrowser', '0002_links_l_title'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='student',\n name='dp',\n field=models.CharField(default='https://thebenclark.files.wordpress.com/2014/03/facebook-default-no-profile-pic.jpg', max_length=1000),\n ),\n migrations.AddField(\n model_name='student',\n name='gpa',\n field=models.IntegerField(default=0),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import math
def hipotenusa(a,b):
return math.sqrt((a*a)+(b*b))
def main():
cateto1=input('dime un cateto')
cateto2=input('dime el otro cateto')
print ('la hipotenusa es: '),hipotenusa(cateto1,cateto2)
main()
|
normal
|
{
"blob_id": "50ae2b4c6d51451031fc31ebbc43c820da54d827",
"index": 7898,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n cateto1 = input('dime un cateto')\n cateto2 = input('dime el otro cateto')\n print('la hipotenusa es: '), hipotenusa(cateto1, cateto2)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hipotenusa(a, b):\n return math.sqrt(a * a + b * b)\n\n\ndef main():\n cateto1 = input('dime un cateto')\n cateto2 = input('dime el otro cateto')\n print('la hipotenusa es: '), hipotenusa(cateto1, cateto2)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef hipotenusa(a, b):\n return math.sqrt(a * a + b * b)\n\n\ndef main():\n cateto1 = input('dime un cateto')\n cateto2 = input('dime el otro cateto')\n print('la hipotenusa es: '), hipotenusa(cateto1, cateto2)\n\n\nmain()\n",
"step-5": "import math\r\ndef hipotenusa(a,b):\r\n return math.sqrt((a*a)+(b*b))\r\n\r\ndef main():\r\n cateto1=input('dime un cateto')\r\n cateto2=input('dime el otro cateto')\r\n print ('la hipotenusa es: '),hipotenusa(cateto1,cateto2)\r\n\r\nmain()\r\n",
"step-ids": [
0,
1,
2,
3,
5
]
}
|
[
0,
1,
2,
3,
5
] |
import jiml.cli
def write_file(path, text):
path.write_text(text)
return path
def test_argparse(tmp_path):
tmpl = write_file(tmp_path / 't.yaml', 'key: {{ var }}')
inp = write_file(tmp_path / 'i.json', '{"var": "Hello!"}')
out = tmp_path / 'o.json'
jiml.cli.main(jiml.cli.parse_args(
'-t', str(tmpl),
'-i', str(inp),
'-o', str(out),
))
|
normal
|
{
"blob_id": "700d35f9e941fe9325821a377ec1ca1c245ddaec",
"index": 176,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef write_file(path, text):\n path.write_text(text)\n return path\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef write_file(path, text):\n path.write_text(text)\n return path\n\n\ndef test_argparse(tmp_path):\n tmpl = write_file(tmp_path / 't.yaml', 'key: {{ var }}')\n inp = write_file(tmp_path / 'i.json', '{\"var\": \"Hello!\"}')\n out = tmp_path / 'o.json'\n jiml.cli.main(jiml.cli.parse_args('-t', str(tmpl), '-i', str(inp), '-o',\n str(out)))\n",
"step-4": "import jiml.cli\n\n\ndef write_file(path, text):\n path.write_text(text)\n return path\n\n\ndef test_argparse(tmp_path):\n tmpl = write_file(tmp_path / 't.yaml', 'key: {{ var }}')\n inp = write_file(tmp_path / 'i.json', '{\"var\": \"Hello!\"}')\n out = tmp_path / 'o.json'\n jiml.cli.main(jiml.cli.parse_args('-t', str(tmpl), '-i', str(inp), '-o',\n str(out)))\n",
"step-5": "import jiml.cli\n\n\ndef write_file(path, text):\n path.write_text(text)\n return path\n\n\ndef test_argparse(tmp_path):\n tmpl = write_file(tmp_path / 't.yaml', 'key: {{ var }}')\n inp = write_file(tmp_path / 'i.json', '{\"var\": \"Hello!\"}')\n out = tmp_path / 'o.json'\n\n jiml.cli.main(jiml.cli.parse_args(\n '-t', str(tmpl),\n '-i', str(inp),\n '-o', str(out),\n ))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import tempfile
import unittest
from unittest.mock import mock_open, patch, MagicMock, call
import compare_apple_music_and_spotify as music_compare
class get_apple_music_data(unittest.TestCase):
def test_open_file(self):
with patch("builtins.open", mock_open(read_data="data")) as mock_file:
apple_music_data_parser = music_compare.AppleMusicDataParser()
apple_music_data_parser.create("/apple_music")
assert open("/apple_music").read() == "data"
mock_file.assert_called_with("/apple_music")
def test_save_one_artist_from_line(self):
with patch("builtins.open", mock_open(read_data="""<key>Sort Artist</key><string>Drew Goddard</string>""")):
apple_music_data_parser = music_compare.AppleMusicDataParser()
apple_music_data_parser.create("/apple_music")
self.assertEqual("Drew Goddard", apple_music_data_parser.one_song_and_artist.get('Artist'))
def test_save_one_song(self):
with patch("builtins.open",
mock_open(read_data="""<key>Sort Name</key><string>The Cabin In the Woods</string>""")):
apple_music_data_parser = music_compare.AppleMusicDataParser()
apple_music_data_parser.create("/apple_music")
self.assertEqual("The Cabin In the Woods", apple_music_data_parser.one_song_and_artist.get('Song'))
def test_save_one_song_and_artist(self):
with patch("builtins.open", mock_open(read_data="""<key>Sort Artist</key><string>Drew Goddard</string>
<key>Sort Name</key><string>The Cabin In the Woods</string>""")):
apple_music_data_parser = music_compare.AppleMusicDataParser()
apple_music_data_parser.create("/apple_music")
self.assertEqual([{'Artist': "Drew Goddard", 'Song': "The Cabin In the Woods"}],
apple_music_data_parser.all_songs_and_artists)
def test_save_several_songs_and_artists(self):
with patch("builtins.open", mock_open(read_data='''<key>Sort Name</key><string>The Cabin In the Woods</string>
<key>Sort Artist</key><string>Drew Goddard</string>
<key>Sort Name</key><string>Pulp Fiction</string>
<key>Sort Artist</key><string>Quentin Tarantino</string>''')):
apple_music_data_parser = music_compare.AppleMusicDataParser()
apple_music_data_parser.create("/apple_music")
self.assertEqual([{'Artist': "Drew Goddard", 'Song': "The Cabin In the Woods"},
{'Artist': "Quentin Tarantino", 'Song': "Pulp Fiction"}],
apple_music_data_parser.all_songs_and_artists)
class spotify_data_parser(unittest.TestCase):
def test_open_file_and_return_formated_data_split_by_coma(self):
with patch("builtins.open", mock_open(read_data="split,by,")):
result = music_compare.spotify_data_parser().read_file("/test_path")
open.assert_called_once_with("/test_path", "r", newline='')
self.assertTrue(result, "_csv.DictReader")
def test_no_artist_found_on_line(self):
lines_csv_dict_reader_formated = {
"not found": "not important",
}
result= music_compare.spotify_data_parser().is_artist(lines_csv_dict_reader_formated)
self.assertEqual(False,result)
def test_artist_found_on_line(self):
lines_csv_dict_reader_formated = {
"Artist Name": "Avenged Sevenfold",
}
result= music_compare.spotify_data_parser().is_artist(lines_csv_dict_reader_formated)
self.assertEqual(True,result)
def test_song_not_found_on_line(self):
lines_csv_dict_reader_formated = {
"not found": "Nightmare",
}
result= music_compare.spotify_data_parser().is_song(lines_csv_dict_reader_formated)
self.assertEqual(False,result)
def test_song_found_on_line(self):
lines_csv_dict_reader_formated = {
"Track Name": "Nightmare",
}
result= music_compare.spotify_data_parser().is_song(lines_csv_dict_reader_formated)
self.assertEqual(True,result)
def test_dont_save_if_artist_not_found(self):
lines_csv_dict_reader_formated = {
"not found": "not important",
}
music_compare.spotify_data_parser().save_artist(lines_csv_dict_reader_formated)
self.assertEqual({},music_compare.spotify_data_parser().one_song_and_artist)
def test_save_if_artist_found(self):
lines_csv_dict_reader_formated = {
"Artist Name": "test_artist",
}
self.spotify_data_parser = music_compare.spotify_data_parser()
self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)
self.assertEqual('test_artist', self.spotify_data_parser.one_song_and_artist.get('Artist'))
def test_dont_save_if_song_not_found(self):
lines_csv_dict_reader_formated = {
"not found": "not important",
}
music_compare.spotify_data_parser().save_song(lines_csv_dict_reader_formated)
self.assertEqual({},music_compare.spotify_data_parser().one_song_and_artist)
def test_save_if_song_found(self):
lines_csv_dict_reader_formated = {
"Track Name": "test_song",
}
self.spotify_data_parser = music_compare.spotify_data_parser()
self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)
self.assertEqual('test_song', self.spotify_data_parser .one_song_and_artist.get('Song'))
def test_combine_song_found_and_NOT_artist(self):
lines_csv_dict_reader_formated = {
"Name": "test_song",
"Artist": "test_artist"
}
self.spotify_data_parser = music_compare.spotify_data_parser()
self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)
self.spotify_data_parser.combine_song_and_artist()
self.assertEqual([], self.spotify_data_parser.all_songs_and_artists)
def test_combine_song_and_artist_if_found(self):
lines_csv_dict_reader_formated = {
"Track Name": "test_song",
"Artist Name": "test_artist"
}
self.spotify_data_parser = music_compare.spotify_data_parser()
self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)
self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)
self.spotify_data_parser.combine_song_and_artist()
self.assertEqual([{'Artist': 'test_artist', 'Song': 'test_song'}],
self.spotify_data_parser.all_songs_and_artists)
def test_combine_several_songs_and_artists(self):
with patch("builtins.open", mock_open(read_data='''Spotify URI,Track Name,Artist Name,Album Name,Disc Number,Track Number,Track Duration (ms),Added By,Added At
"spotify:track:4UEo1b0wWrtHMC8bVqPiH8","Nightmare","Avenged Sevenfold","Nightmare","1","1","374453","spotify:user:","2010-10-17T20:18:40Z"
"spotify:track:1d5UuboIPRMD4HaU3yycKC","Somewhere I Belong","Linkin Park","Meteora (Bonus Edition)","1","3","213933","spotify:user:","2010-10-17T20:24:25Z"''')):
self.spotify_data_parser = music_compare.spotify_data_parser()
self.spotify_data_parser.create("/test_path")
self.assertEqual([{'Artist': 'Avenged Sevenfold', 'Song': 'Nightmare'},
{'Artist': 'Linkin Park', 'Song': 'Somewhere I Belong'}],
self.spotify_data_parser.all_songs_and_artists)
class apple_music_and_spotify_comparer(unittest.TestCase):
def setUp(self):
self.comparer = music_compare.apple_music_and_spotify_comparer()
@patch.object(music_compare.spotify_data_parser, 'create')
@patch.object(music_compare.AppleMusicDataParser, 'create')
def test_save_data_from_spotify_and_apple_music_in_class(self, apple_music, spotify):
test = music_compare.apple_music_and_spotify_comparer()
spotify.return_value = [{'Artist': 'test_artist1', 'Song': 'test_song1'}]
apple_music.return_value = [{'Artist': 'test_artist2', 'Song': 'test_song2'}]
test.save_data_locally("/spotify", "/apple_music")
self.assertEqual([{'Artist': 'test_artist1', 'Song': 'test_song1'}], test.spotify_lib)
self.assertEqual([{'Artist': 'test_artist2', 'Song': 'test_song2'}], test.apple_music_lib)
@patch.object(music_compare.spotify_data_parser, 'create')
@patch.object(music_compare.AppleMusicDataParser, 'create')
def test_print_song_and_artist_when_song_not_found_in_apple_music(self, apple_music, spotify):
spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'}]
apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'}]
with patch("builtins.print") as mock_print:
self.comparer.find_matches("/spotify", "/apple_music")
mock_print.assert_has_calls(
[call('following songs not found in apple_music:'),
call('test_song_no_match by artist test_artist_no_match')])
@patch.object(music_compare.spotify_data_parser, 'create')
@patch.object(music_compare.AppleMusicDataParser, 'create')
def test_print_song_and_artist_when_song_not_found_in_spotify(self, apple_music, spotify):
spotify.return_value = [{'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'}]
apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'}]
with patch("builtins.print") as mock_print:
self.comparer.find_matches("/spotify", "/apple_music")
mock_print.assert_has_calls([call('following songs not found in spotify:'),
call('test_song by artist test_artist'),
call()])
@patch.object(music_compare.spotify_data_parser, 'create')
@patch.object(music_compare.AppleMusicDataParser, 'create')
def test_print_several_songs_and_artists_when_song_not_found_in_apple_music(self, apple_music, spotify):
spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'},
{'Artist': 'test_artist_no_match2', 'Song': 'test_song_no_match2'},
{'Artist': 'test_artist2', 'Song': 'test_song2'}]
apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist2', 'Song': 'test_song2'}]
with patch("builtins.print") as mock_print:
self.comparer.find_matches("/spotify", "/apple_music")
self.assertEqual(3, mock_print.call_count)
mock_print.assert_has_calls(
[call('following songs not found in apple_music:'),
call('test_song_no_match by artist test_artist_no_match'),
call('test_song_no_match2 by artist test_artist_no_match2')],
any_order=False)
@patch.object(music_compare.spotify_data_parser, 'create')
@patch.object(music_compare.AppleMusicDataParser, 'create')
def test_print_several_songs_and_artists_when_song_not_found_in_spotify(self, apple_music, spotify):
apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'},
{'Artist': 'test_artist_no_match2', 'Song': 'test_song_no_match2'},
{'Artist': 'test_artist2', 'Song': 'test_song2'}]
spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist2', 'Song': 'test_song2'}]
with patch("builtins.print") as mock_print:
self.comparer.find_matches("/spotify", "/apple_music")
self.assertEqual(4, mock_print.call_count)
mock_print.assert_has_calls(
[call('following songs not found in spotify:'),
call('test_song_no_match by artist test_artist_no_match'),
call('test_song_no_match2 by artist test_artist_no_match2'),
call()],
any_order=False)
@patch.object(music_compare.spotify_data_parser, 'create')
@patch.object(music_compare.AppleMusicDataParser, 'create')
def test_print_several_songs_and_artists_when_some_songs_missing_in_spotify_and_in_apple_music(self, apple_music,
spotify):
apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist_only_apple_music', 'Song': 'test_song_only_apple_music'}]
spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},
{'Artist': 'test_artist_only_spotify', 'Song': 'test_song_only_spotify'}]
with patch("builtins.print") as mock_print:
self.comparer.find_matches("/spotify", "/apple_music")
self.assertEqual(5, mock_print.call_count)
mock_print.assert_has_calls([call("following songs not found in spotify:"),
call('test_song_only_apple_music by artist test_artist_only_apple_music'),
call(),
call("following songs not found in apple_music:"),
call('test_song_only_spotify by artist test_artist_only_spotify')
])
|
normal
|
{
"blob_id": "eec08b3fdd4beb7d88ac0dc6d2e8776cf54fda35",
"index": 2727,
"step-1": "<mask token>\n\n\nclass spotify_data_parser(unittest.TestCase):\n\n def test_open_file_and_return_formated_data_split_by_coma(self):\n with patch('builtins.open', mock_open(read_data='split,by,')):\n result = music_compare.spotify_data_parser().read_file('/test_path'\n )\n open.assert_called_once_with('/test_path', 'r', newline='')\n self.assertTrue(result, '_csv.DictReader')\n\n def test_no_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'Avenged Sevenfold'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_song_not_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_song_found_on_line(self):\n lines_csv_dict_reader_formated = {'Track Name': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_dont_save_if_artist_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n\n def test_save_if_artist_found(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.assertEqual('test_artist', self.spotify_data_parser.\n one_song_and_artist.get('Artist'))\n\n def test_dont_save_if_song_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_song(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n <mask token>\n\n def test_combine_song_found_and_NOT_artist(self):\n lines_csv_dict_reader_formated = {'Name': 'test_song', 'Artist':\n 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([], self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_song_and_artist_if_found(self):\n lines_csv_dict_reader_formated = {'Track Name': 'test_song',\n 'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([{'Artist': 'test_artist', 'Song': 'test_song'}],\n self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_several_songs_and_artists(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"Spotify URI,Track Name,Artist Name,Album Name,Disc Number,Track Number,Track Duration (ms),Added By,Added At\n\"spotify:track:4UEo1b0wWrtHMC8bVqPiH8\",\"Nightmare\",\"Avenged Sevenfold\",\"Nightmare\",\"1\",\"1\",\"374453\",\"spotify:user:\",\"2010-10-17T20:18:40Z\"\n\"spotify:track:1d5UuboIPRMD4HaU3yycKC\",\"Somewhere I Belong\",\"Linkin Park\",\"Meteora (Bonus Edition)\",\"1\",\"3\",\"213933\",\"spotify:user:\",\"2010-10-17T20:24:25Z\\\"\"\"\"\n )):\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.create('/test_path')\n self.assertEqual([{'Artist': 'Avenged Sevenfold', 'Song':\n 'Nightmare'}, {'Artist': 'Linkin Park', 'Song':\n 'Somewhere I Belong'}], self.spotify_data_parser.\n all_songs_and_artists)\n\n\nclass apple_music_and_spotify_comparer(unittest.TestCase):\n\n def setUp(self):\n self.comparer = music_compare.apple_music_and_spotify_comparer()\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_save_data_from_spotify_and_apple_music_in_class(self,\n apple_music, spotify):\n test = music_compare.apple_music_and_spotify_comparer()\n spotify.return_value = [{'Artist': 'test_artist1', 'Song':\n 'test_song1'}]\n apple_music.return_value = [{'Artist': 'test_artist2', 'Song':\n 'test_song2'}]\n test.save_data_locally('/spotify', '/apple_music')\n self.assertEqual([{'Artist': 'test_artist1', 'Song': 'test_song1'}],\n test.spotify_lib)\n self.assertEqual([{'Artist': 'test_artist2', 'Song': 'test_song2'}],\n test.apple_music_lib)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_apple_music(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match')])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_spotify(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song by artist test_artist'), call()])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_apple_music(\n self, apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(3, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2')],\n any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_spotify(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(4, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2'),\n call()], any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_some_songs_missing_in_spotify_and_in_apple_music(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_apple_music', 'Song':\n 'test_song_only_apple_music'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_spotify', 'Song':\n 'test_song_only_spotify'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(5, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_only_apple_music by artist test_artist_only_apple_music'\n ), call(), call('following songs not found in apple_music:'\n ), call(\n 'test_song_only_spotify by artist test_artist_only_spotify')])\n",
"step-2": "<mask token>\n\n\nclass get_apple_music_data(unittest.TestCase):\n <mask token>\n <mask token>\n\n def test_save_one_song(self):\n with patch('builtins.open', mock_open(read_data=\n '<key>Sort Name</key><string>The Cabin In the Woods</string>')):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual('The Cabin In the Woods',\n apple_music_data_parser.one_song_and_artist.get('Song'))\n <mask token>\n <mask token>\n\n\nclass spotify_data_parser(unittest.TestCase):\n\n def test_open_file_and_return_formated_data_split_by_coma(self):\n with patch('builtins.open', mock_open(read_data='split,by,')):\n result = music_compare.spotify_data_parser().read_file('/test_path'\n )\n open.assert_called_once_with('/test_path', 'r', newline='')\n self.assertTrue(result, '_csv.DictReader')\n\n def test_no_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'Avenged Sevenfold'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_song_not_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_song_found_on_line(self):\n lines_csv_dict_reader_formated = {'Track Name': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_dont_save_if_artist_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n\n def test_save_if_artist_found(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.assertEqual('test_artist', self.spotify_data_parser.\n one_song_and_artist.get('Artist'))\n\n def test_dont_save_if_song_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_song(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n\n def test_save_if_song_found(self):\n lines_csv_dict_reader_formated = {'Track Name': 'test_song'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.assertEqual('test_song', self.spotify_data_parser.\n one_song_and_artist.get('Song'))\n\n def test_combine_song_found_and_NOT_artist(self):\n lines_csv_dict_reader_formated = {'Name': 'test_song', 'Artist':\n 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([], self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_song_and_artist_if_found(self):\n lines_csv_dict_reader_formated = {'Track Name': 'test_song',\n 'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([{'Artist': 'test_artist', 'Song': 'test_song'}],\n self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_several_songs_and_artists(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"Spotify URI,Track Name,Artist Name,Album Name,Disc Number,Track Number,Track Duration (ms),Added By,Added At\n\"spotify:track:4UEo1b0wWrtHMC8bVqPiH8\",\"Nightmare\",\"Avenged Sevenfold\",\"Nightmare\",\"1\",\"1\",\"374453\",\"spotify:user:\",\"2010-10-17T20:18:40Z\"\n\"spotify:track:1d5UuboIPRMD4HaU3yycKC\",\"Somewhere I Belong\",\"Linkin Park\",\"Meteora (Bonus Edition)\",\"1\",\"3\",\"213933\",\"spotify:user:\",\"2010-10-17T20:24:25Z\\\"\"\"\"\n )):\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.create('/test_path')\n self.assertEqual([{'Artist': 'Avenged Sevenfold', 'Song':\n 'Nightmare'}, {'Artist': 'Linkin Park', 'Song':\n 'Somewhere I Belong'}], self.spotify_data_parser.\n all_songs_and_artists)\n\n\nclass apple_music_and_spotify_comparer(unittest.TestCase):\n\n def setUp(self):\n self.comparer = music_compare.apple_music_and_spotify_comparer()\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_save_data_from_spotify_and_apple_music_in_class(self,\n apple_music, spotify):\n test = music_compare.apple_music_and_spotify_comparer()\n spotify.return_value = [{'Artist': 'test_artist1', 'Song':\n 'test_song1'}]\n apple_music.return_value = [{'Artist': 'test_artist2', 'Song':\n 'test_song2'}]\n test.save_data_locally('/spotify', '/apple_music')\n self.assertEqual([{'Artist': 'test_artist1', 'Song': 'test_song1'}],\n test.spotify_lib)\n self.assertEqual([{'Artist': 'test_artist2', 'Song': 'test_song2'}],\n test.apple_music_lib)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_apple_music(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match')])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_spotify(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song by artist test_artist'), call()])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_apple_music(\n self, apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(3, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2')],\n any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_spotify(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(4, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2'),\n call()], any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_some_songs_missing_in_spotify_and_in_apple_music(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_apple_music', 'Song':\n 'test_song_only_apple_music'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_spotify', 'Song':\n 'test_song_only_spotify'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(5, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_only_apple_music by artist test_artist_only_apple_music'\n ), call(), call('following songs not found in apple_music:'\n ), call(\n 'test_song_only_spotify by artist test_artist_only_spotify')])\n",
"step-3": "<mask token>\n\n\nclass get_apple_music_data(unittest.TestCase):\n\n def test_open_file(self):\n with patch('builtins.open', mock_open(read_data='data')) as mock_file:\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n assert open('/apple_music').read() == 'data'\n mock_file.assert_called_with('/apple_music')\n\n def test_save_one_artist_from_line(self):\n with patch('builtins.open', mock_open(read_data=\n '<key>Sort Artist</key><string>Drew Goddard</string>')):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual('Drew Goddard', apple_music_data_parser.\n one_song_and_artist.get('Artist'))\n\n def test_save_one_song(self):\n with patch('builtins.open', mock_open(read_data=\n '<key>Sort Name</key><string>The Cabin In the Woods</string>')):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual('The Cabin In the Woods',\n apple_music_data_parser.one_song_and_artist.get('Song'))\n\n def test_save_one_song_and_artist(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"<key>Sort Artist</key><string>Drew Goddard</string>\n <key>Sort Name</key><string>The Cabin In the Woods</string>\"\"\"\n )):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual([{'Artist': 'Drew Goddard', 'Song':\n 'The Cabin In the Woods'}], apple_music_data_parser.\n all_songs_and_artists)\n\n def test_save_several_songs_and_artists(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"<key>Sort Name</key><string>The Cabin In the Woods</string>\n <key>Sort Artist</key><string>Drew Goddard</string>\n <key>Sort Name</key><string>Pulp Fiction</string>\n\t<key>Sort Artist</key><string>Quentin Tarantino</string>\"\"\"\n )):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual([{'Artist': 'Drew Goddard', 'Song':\n 'The Cabin In the Woods'}, {'Artist': 'Quentin Tarantino',\n 'Song': 'Pulp Fiction'}], apple_music_data_parser.\n all_songs_and_artists)\n\n\nclass spotify_data_parser(unittest.TestCase):\n\n def test_open_file_and_return_formated_data_split_by_coma(self):\n with patch('builtins.open', mock_open(read_data='split,by,')):\n result = music_compare.spotify_data_parser().read_file('/test_path'\n )\n open.assert_called_once_with('/test_path', 'r', newline='')\n self.assertTrue(result, '_csv.DictReader')\n\n def test_no_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'Avenged Sevenfold'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_song_not_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_song_found_on_line(self):\n lines_csv_dict_reader_formated = {'Track Name': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_dont_save_if_artist_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n\n def test_save_if_artist_found(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.assertEqual('test_artist', self.spotify_data_parser.\n one_song_and_artist.get('Artist'))\n\n def test_dont_save_if_song_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_song(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n\n def test_save_if_song_found(self):\n lines_csv_dict_reader_formated = {'Track Name': 'test_song'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.assertEqual('test_song', self.spotify_data_parser.\n one_song_and_artist.get('Song'))\n\n def test_combine_song_found_and_NOT_artist(self):\n lines_csv_dict_reader_formated = {'Name': 'test_song', 'Artist':\n 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([], self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_song_and_artist_if_found(self):\n lines_csv_dict_reader_formated = {'Track Name': 'test_song',\n 'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([{'Artist': 'test_artist', 'Song': 'test_song'}],\n self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_several_songs_and_artists(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"Spotify URI,Track Name,Artist Name,Album Name,Disc Number,Track Number,Track Duration (ms),Added By,Added At\n\"spotify:track:4UEo1b0wWrtHMC8bVqPiH8\",\"Nightmare\",\"Avenged Sevenfold\",\"Nightmare\",\"1\",\"1\",\"374453\",\"spotify:user:\",\"2010-10-17T20:18:40Z\"\n\"spotify:track:1d5UuboIPRMD4HaU3yycKC\",\"Somewhere I Belong\",\"Linkin Park\",\"Meteora (Bonus Edition)\",\"1\",\"3\",\"213933\",\"spotify:user:\",\"2010-10-17T20:24:25Z\\\"\"\"\"\n )):\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.create('/test_path')\n self.assertEqual([{'Artist': 'Avenged Sevenfold', 'Song':\n 'Nightmare'}, {'Artist': 'Linkin Park', 'Song':\n 'Somewhere I Belong'}], self.spotify_data_parser.\n all_songs_and_artists)\n\n\nclass apple_music_and_spotify_comparer(unittest.TestCase):\n\n def setUp(self):\n self.comparer = music_compare.apple_music_and_spotify_comparer()\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_save_data_from_spotify_and_apple_music_in_class(self,\n apple_music, spotify):\n test = music_compare.apple_music_and_spotify_comparer()\n spotify.return_value = [{'Artist': 'test_artist1', 'Song':\n 'test_song1'}]\n apple_music.return_value = [{'Artist': 'test_artist2', 'Song':\n 'test_song2'}]\n test.save_data_locally('/spotify', '/apple_music')\n self.assertEqual([{'Artist': 'test_artist1', 'Song': 'test_song1'}],\n test.spotify_lib)\n self.assertEqual([{'Artist': 'test_artist2', 'Song': 'test_song2'}],\n test.apple_music_lib)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_apple_music(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match')])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_spotify(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song by artist test_artist'), call()])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_apple_music(\n self, apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(3, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2')],\n any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_spotify(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(4, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2'),\n call()], any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_some_songs_missing_in_spotify_and_in_apple_music(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_apple_music', 'Song':\n 'test_song_only_apple_music'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_spotify', 'Song':\n 'test_song_only_spotify'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(5, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_only_apple_music by artist test_artist_only_apple_music'\n ), call(), call('following songs not found in apple_music:'\n ), call(\n 'test_song_only_spotify by artist test_artist_only_spotify')])\n",
"step-4": "import tempfile\nimport unittest\nfrom unittest.mock import mock_open, patch, MagicMock, call\nimport compare_apple_music_and_spotify as music_compare\n\n\nclass get_apple_music_data(unittest.TestCase):\n\n def test_open_file(self):\n with patch('builtins.open', mock_open(read_data='data')) as mock_file:\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n assert open('/apple_music').read() == 'data'\n mock_file.assert_called_with('/apple_music')\n\n def test_save_one_artist_from_line(self):\n with patch('builtins.open', mock_open(read_data=\n '<key>Sort Artist</key><string>Drew Goddard</string>')):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual('Drew Goddard', apple_music_data_parser.\n one_song_and_artist.get('Artist'))\n\n def test_save_one_song(self):\n with patch('builtins.open', mock_open(read_data=\n '<key>Sort Name</key><string>The Cabin In the Woods</string>')):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual('The Cabin In the Woods',\n apple_music_data_parser.one_song_and_artist.get('Song'))\n\n def test_save_one_song_and_artist(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"<key>Sort Artist</key><string>Drew Goddard</string>\n <key>Sort Name</key><string>The Cabin In the Woods</string>\"\"\"\n )):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual([{'Artist': 'Drew Goddard', 'Song':\n 'The Cabin In the Woods'}], apple_music_data_parser.\n all_songs_and_artists)\n\n def test_save_several_songs_and_artists(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"<key>Sort Name</key><string>The Cabin In the Woods</string>\n <key>Sort Artist</key><string>Drew Goddard</string>\n <key>Sort Name</key><string>Pulp Fiction</string>\n\t<key>Sort Artist</key><string>Quentin Tarantino</string>\"\"\"\n )):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create('/apple_music')\n self.assertEqual([{'Artist': 'Drew Goddard', 'Song':\n 'The Cabin In the Woods'}, {'Artist': 'Quentin Tarantino',\n 'Song': 'Pulp Fiction'}], apple_music_data_parser.\n all_songs_and_artists)\n\n\nclass spotify_data_parser(unittest.TestCase):\n\n def test_open_file_and_return_formated_data_split_by_coma(self):\n with patch('builtins.open', mock_open(read_data='split,by,')):\n result = music_compare.spotify_data_parser().read_file('/test_path'\n )\n open.assert_called_once_with('/test_path', 'r', newline='')\n self.assertTrue(result, '_csv.DictReader')\n\n def test_no_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'Avenged Sevenfold'}\n result = music_compare.spotify_data_parser().is_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_song_not_found_on_line(self):\n lines_csv_dict_reader_formated = {'not found': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(False, result)\n\n def test_song_found_on_line(self):\n lines_csv_dict_reader_formated = {'Track Name': 'Nightmare'}\n result = music_compare.spotify_data_parser().is_song(\n lines_csv_dict_reader_formated)\n self.assertEqual(True, result)\n\n def test_dont_save_if_artist_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_artist(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n\n def test_save_if_artist_found(self):\n lines_csv_dict_reader_formated = {'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.assertEqual('test_artist', self.spotify_data_parser.\n one_song_and_artist.get('Artist'))\n\n def test_dont_save_if_song_not_found(self):\n lines_csv_dict_reader_formated = {'not found': 'not important'}\n music_compare.spotify_data_parser().save_song(\n lines_csv_dict_reader_formated)\n self.assertEqual({}, music_compare.spotify_data_parser().\n one_song_and_artist)\n\n def test_save_if_song_found(self):\n lines_csv_dict_reader_formated = {'Track Name': 'test_song'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.assertEqual('test_song', self.spotify_data_parser.\n one_song_and_artist.get('Song'))\n\n def test_combine_song_found_and_NOT_artist(self):\n lines_csv_dict_reader_formated = {'Name': 'test_song', 'Artist':\n 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([], self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_song_and_artist_if_found(self):\n lines_csv_dict_reader_formated = {'Track Name': 'test_song',\n 'Artist Name': 'test_artist'}\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([{'Artist': 'test_artist', 'Song': 'test_song'}],\n self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_several_songs_and_artists(self):\n with patch('builtins.open', mock_open(read_data=\n \"\"\"Spotify URI,Track Name,Artist Name,Album Name,Disc Number,Track Number,Track Duration (ms),Added By,Added At\n\"spotify:track:4UEo1b0wWrtHMC8bVqPiH8\",\"Nightmare\",\"Avenged Sevenfold\",\"Nightmare\",\"1\",\"1\",\"374453\",\"spotify:user:\",\"2010-10-17T20:18:40Z\"\n\"spotify:track:1d5UuboIPRMD4HaU3yycKC\",\"Somewhere I Belong\",\"Linkin Park\",\"Meteora (Bonus Edition)\",\"1\",\"3\",\"213933\",\"spotify:user:\",\"2010-10-17T20:24:25Z\\\"\"\"\"\n )):\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.create('/test_path')\n self.assertEqual([{'Artist': 'Avenged Sevenfold', 'Song':\n 'Nightmare'}, {'Artist': 'Linkin Park', 'Song':\n 'Somewhere I Belong'}], self.spotify_data_parser.\n all_songs_and_artists)\n\n\nclass apple_music_and_spotify_comparer(unittest.TestCase):\n\n def setUp(self):\n self.comparer = music_compare.apple_music_and_spotify_comparer()\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_save_data_from_spotify_and_apple_music_in_class(self,\n apple_music, spotify):\n test = music_compare.apple_music_and_spotify_comparer()\n spotify.return_value = [{'Artist': 'test_artist1', 'Song':\n 'test_song1'}]\n apple_music.return_value = [{'Artist': 'test_artist2', 'Song':\n 'test_song2'}]\n test.save_data_locally('/spotify', '/apple_music')\n self.assertEqual([{'Artist': 'test_artist1', 'Song': 'test_song1'}],\n test.spotify_lib)\n self.assertEqual([{'Artist': 'test_artist2', 'Song': 'test_song2'}],\n test.apple_music_lib)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_apple_music(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match')])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_spotify(self,\n apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song by artist test_artist'), call()])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_apple_music(\n self, apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(3, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in apple_music:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2')],\n any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_spotify(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_no_match', 'Song':\n 'test_song_no_match'}, {'Artist': 'test_artist_no_match2',\n 'Song': 'test_song_no_match2'}, {'Artist': 'test_artist2',\n 'Song': 'test_song2'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(4, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_no_match by artist test_artist_no_match'), call(\n 'test_song_no_match2 by artist test_artist_no_match2'),\n call()], any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_some_songs_missing_in_spotify_and_in_apple_music(\n self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_apple_music', 'Song':\n 'test_song_only_apple_music'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song':\n 'test_song'}, {'Artist': 'test_artist_only_spotify', 'Song':\n 'test_song_only_spotify'}]\n with patch('builtins.print') as mock_print:\n self.comparer.find_matches('/spotify', '/apple_music')\n self.assertEqual(5, mock_print.call_count)\n mock_print.assert_has_calls([call(\n 'following songs not found in spotify:'), call(\n 'test_song_only_apple_music by artist test_artist_only_apple_music'\n ), call(), call('following songs not found in apple_music:'\n ), call(\n 'test_song_only_spotify by artist test_artist_only_spotify')])\n",
"step-5": "import tempfile\nimport unittest\n\nfrom unittest.mock import mock_open, patch, MagicMock, call\nimport compare_apple_music_and_spotify as music_compare\n\n\nclass get_apple_music_data(unittest.TestCase):\n def test_open_file(self):\n with patch(\"builtins.open\", mock_open(read_data=\"data\")) as mock_file:\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create(\"/apple_music\")\n assert open(\"/apple_music\").read() == \"data\"\n mock_file.assert_called_with(\"/apple_music\")\n\n def test_save_one_artist_from_line(self):\n with patch(\"builtins.open\", mock_open(read_data=\"\"\"<key>Sort Artist</key><string>Drew Goddard</string>\"\"\")):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create(\"/apple_music\")\n self.assertEqual(\"Drew Goddard\", apple_music_data_parser.one_song_and_artist.get('Artist'))\n\n def test_save_one_song(self):\n with patch(\"builtins.open\",\n mock_open(read_data=\"\"\"<key>Sort Name</key><string>The Cabin In the Woods</string>\"\"\")):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create(\"/apple_music\")\n self.assertEqual(\"The Cabin In the Woods\", apple_music_data_parser.one_song_and_artist.get('Song'))\n\n def test_save_one_song_and_artist(self):\n with patch(\"builtins.open\", mock_open(read_data=\"\"\"<key>Sort Artist</key><string>Drew Goddard</string>\n <key>Sort Name</key><string>The Cabin In the Woods</string>\"\"\")):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create(\"/apple_music\")\n self.assertEqual([{'Artist': \"Drew Goddard\", 'Song': \"The Cabin In the Woods\"}],\n apple_music_data_parser.all_songs_and_artists)\n\n def test_save_several_songs_and_artists(self):\n with patch(\"builtins.open\", mock_open(read_data='''<key>Sort Name</key><string>The Cabin In the Woods</string>\n <key>Sort Artist</key><string>Drew Goddard</string>\n <key>Sort Name</key><string>Pulp Fiction</string>\n\t<key>Sort Artist</key><string>Quentin Tarantino</string>''')):\n apple_music_data_parser = music_compare.AppleMusicDataParser()\n apple_music_data_parser.create(\"/apple_music\")\n self.assertEqual([{'Artist': \"Drew Goddard\", 'Song': \"The Cabin In the Woods\"},\n {'Artist': \"Quentin Tarantino\", 'Song': \"Pulp Fiction\"}],\n apple_music_data_parser.all_songs_and_artists)\n\n\n\nclass spotify_data_parser(unittest.TestCase):\n\n def test_open_file_and_return_formated_data_split_by_coma(self):\n with patch(\"builtins.open\", mock_open(read_data=\"split,by,\")):\n result = music_compare.spotify_data_parser().read_file(\"/test_path\")\n open.assert_called_once_with(\"/test_path\", \"r\", newline='')\n self.assertTrue(result, \"_csv.DictReader\")\n\n def test_no_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {\n \"not found\": \"not important\",\n }\n result= music_compare.spotify_data_parser().is_artist(lines_csv_dict_reader_formated)\n self.assertEqual(False,result)\n\n def test_artist_found_on_line(self):\n lines_csv_dict_reader_formated = {\n \"Artist Name\": \"Avenged Sevenfold\",\n }\n result= music_compare.spotify_data_parser().is_artist(lines_csv_dict_reader_formated)\n self.assertEqual(True,result)\n\n def test_song_not_found_on_line(self):\n lines_csv_dict_reader_formated = {\n \"not found\": \"Nightmare\",\n }\n result= music_compare.spotify_data_parser().is_song(lines_csv_dict_reader_formated)\n self.assertEqual(False,result)\n\n def test_song_found_on_line(self):\n lines_csv_dict_reader_formated = {\n \"Track Name\": \"Nightmare\",\n }\n result= music_compare.spotify_data_parser().is_song(lines_csv_dict_reader_formated)\n self.assertEqual(True,result)\n\n def test_dont_save_if_artist_not_found(self):\n lines_csv_dict_reader_formated = {\n \"not found\": \"not important\",\n }\n music_compare.spotify_data_parser().save_artist(lines_csv_dict_reader_formated)\n self.assertEqual({},music_compare.spotify_data_parser().one_song_and_artist)\n\n def test_save_if_artist_found(self):\n lines_csv_dict_reader_formated = {\n \"Artist Name\": \"test_artist\",\n }\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n self.assertEqual('test_artist', self.spotify_data_parser.one_song_and_artist.get('Artist'))\n\n\n def test_dont_save_if_song_not_found(self):\n lines_csv_dict_reader_formated = {\n \"not found\": \"not important\",\n }\n music_compare.spotify_data_parser().save_song(lines_csv_dict_reader_formated)\n self.assertEqual({},music_compare.spotify_data_parser().one_song_and_artist)\n\n def test_save_if_song_found(self):\n lines_csv_dict_reader_formated = {\n \"Track Name\": \"test_song\",\n }\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.assertEqual('test_song', self.spotify_data_parser .one_song_and_artist.get('Song'))\n\n def test_combine_song_found_and_NOT_artist(self):\n lines_csv_dict_reader_formated = {\n \"Name\": \"test_song\",\n \"Artist\": \"test_artist\"\n }\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([], self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_song_and_artist_if_found(self):\n lines_csv_dict_reader_formated = {\n \"Track Name\": \"test_song\",\n \"Artist Name\": \"test_artist\"\n }\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.save_song(lines_csv_dict_reader_formated)\n self.spotify_data_parser.save_artist(lines_csv_dict_reader_formated)\n\n self.spotify_data_parser.combine_song_and_artist()\n self.assertEqual([{'Artist': 'test_artist', 'Song': 'test_song'}],\n self.spotify_data_parser.all_songs_and_artists)\n\n def test_combine_several_songs_and_artists(self):\n with patch(\"builtins.open\", mock_open(read_data='''Spotify URI,Track Name,Artist Name,Album Name,Disc Number,Track Number,Track Duration (ms),Added By,Added At\n\"spotify:track:4UEo1b0wWrtHMC8bVqPiH8\",\"Nightmare\",\"Avenged Sevenfold\",\"Nightmare\",\"1\",\"1\",\"374453\",\"spotify:user:\",\"2010-10-17T20:18:40Z\"\n\"spotify:track:1d5UuboIPRMD4HaU3yycKC\",\"Somewhere I Belong\",\"Linkin Park\",\"Meteora (Bonus Edition)\",\"1\",\"3\",\"213933\",\"spotify:user:\",\"2010-10-17T20:24:25Z\"''')):\n self.spotify_data_parser = music_compare.spotify_data_parser()\n self.spotify_data_parser.create(\"/test_path\")\n self.assertEqual([{'Artist': 'Avenged Sevenfold', 'Song': 'Nightmare'},\n {'Artist': 'Linkin Park', 'Song': 'Somewhere I Belong'}],\n self.spotify_data_parser.all_songs_and_artists)\n\n\nclass apple_music_and_spotify_comparer(unittest.TestCase):\n\n def setUp(self):\n self.comparer = music_compare.apple_music_and_spotify_comparer()\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_save_data_from_spotify_and_apple_music_in_class(self, apple_music, spotify):\n test = music_compare.apple_music_and_spotify_comparer()\n spotify.return_value = [{'Artist': 'test_artist1', 'Song': 'test_song1'}]\n apple_music.return_value = [{'Artist': 'test_artist2', 'Song': 'test_song2'}]\n test.save_data_locally(\"/spotify\", \"/apple_music\")\n self.assertEqual([{'Artist': 'test_artist1', 'Song': 'test_song1'}], test.spotify_lib)\n self.assertEqual([{'Artist': 'test_artist2', 'Song': 'test_song2'}], test.apple_music_lib)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_apple_music(self, apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'}]\n with patch(\"builtins.print\") as mock_print:\n self.comparer.find_matches(\"/spotify\", \"/apple_music\")\n mock_print.assert_has_calls(\n [call('following songs not found in apple_music:'),\n call('test_song_no_match by artist test_artist_no_match')])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_song_and_artist_when_song_not_found_in_spotify(self, apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'}]\n with patch(\"builtins.print\") as mock_print:\n self.comparer.find_matches(\"/spotify\", \"/apple_music\")\n mock_print.assert_has_calls([call('following songs not found in spotify:'),\n call('test_song by artist test_artist'),\n call()])\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_apple_music(self, apple_music, spotify):\n spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'},\n {'Artist': 'test_artist_no_match2', 'Song': 'test_song_no_match2'},\n {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch(\"builtins.print\") as mock_print:\n self.comparer.find_matches(\"/spotify\", \"/apple_music\")\n self.assertEqual(3, mock_print.call_count)\n mock_print.assert_has_calls(\n [call('following songs not found in apple_music:'),\n call('test_song_no_match by artist test_artist_no_match'),\n call('test_song_no_match2 by artist test_artist_no_match2')],\n any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_song_not_found_in_spotify(self, apple_music, spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist_no_match', 'Song': 'test_song_no_match'},\n {'Artist': 'test_artist_no_match2', 'Song': 'test_song_no_match2'},\n {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist2', 'Song': 'test_song2'}]\n with patch(\"builtins.print\") as mock_print:\n self.comparer.find_matches(\"/spotify\", \"/apple_music\")\n self.assertEqual(4, mock_print.call_count)\n mock_print.assert_has_calls(\n [call('following songs not found in spotify:'),\n call('test_song_no_match by artist test_artist_no_match'),\n call('test_song_no_match2 by artist test_artist_no_match2'),\n call()],\n any_order=False)\n\n @patch.object(music_compare.spotify_data_parser, 'create')\n @patch.object(music_compare.AppleMusicDataParser, 'create')\n def test_print_several_songs_and_artists_when_some_songs_missing_in_spotify_and_in_apple_music(self, apple_music,\n spotify):\n apple_music.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist_only_apple_music', 'Song': 'test_song_only_apple_music'}]\n spotify.return_value = [{'Artist': 'test_artist', 'Song': 'test_song'},\n {'Artist': 'test_artist_only_spotify', 'Song': 'test_song_only_spotify'}]\n\n with patch(\"builtins.print\") as mock_print:\n self.comparer.find_matches(\"/spotify\", \"/apple_music\")\n self.assertEqual(5, mock_print.call_count)\n mock_print.assert_has_calls([call(\"following songs not found in spotify:\"),\n call('test_song_only_apple_music by artist test_artist_only_apple_music'),\n call(),\n call(\"following songs not found in apple_music:\"),\n call('test_song_only_spotify by artist test_artist_only_spotify')\n ])\n",
"step-ids": [
20,
23,
27,
28,
29
]
}
|
[
20,
23,
27,
28,
29
] |
# Copyright 2011 Isaku Yamahata
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for Block Device utility functions.
"""
from unittest import mock
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import units
from nova import block_device
from nova.compute import api as compute_api
from nova import context
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import matchers
from nova.volume import cinder
class BlockDeviceTestCase(test.NoDBTestCase):
def setUp(self):
super(BlockDeviceTestCase, self).setUp()
BDM = block_device.BlockDeviceDict
self.new_mapping = [
BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'volume_size': 1,
'guest_format': 'swap',
'boot_index': -1}),
BDM({'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'source_type': 'blank',
'destination_type': 'local',
'volume_size': 10,
'delete_on_termination': True,
'boot_index': -1}),
BDM({'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'connection_info': "{'fake': 'connection_info'}",
'boot_index': 0}),
BDM({'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2',
'boot_index': -1}),
BDM({'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'}),
]
def test_properties(self):
root_device0 = '/dev/sda'
root_device1 = '/dev/sdb'
mappings = [{'virtual': 'root',
'device': root_device0}]
properties0 = {'mappings': mappings}
properties1 = {'mappings': mappings,
'root_device_name': root_device1}
self.assertIsNone(block_device.properties_root_device_name({}))
self.assertEqual(root_device0,
block_device.properties_root_device_name(properties0))
self.assertEqual(root_device1,
block_device.properties_root_device_name(properties1))
def test_ephemeral(self):
self.assertFalse(block_device.is_ephemeral('ephemeral'))
self.assertTrue(block_device.is_ephemeral('ephemeral0'))
self.assertTrue(block_device.is_ephemeral('ephemeral1'))
self.assertTrue(block_device.is_ephemeral('ephemeral11'))
self.assertFalse(block_device.is_ephemeral('root'))
self.assertFalse(block_device.is_ephemeral('swap'))
self.assertFalse(block_device.is_ephemeral('/dev/sda1'))
self.assertEqual(0, block_device.ephemeral_num('ephemeral0'))
self.assertEqual(1, block_device.ephemeral_num('ephemeral1'))
self.assertEqual(11, block_device.ephemeral_num('ephemeral11'))
self.assertFalse(block_device.is_swap_or_ephemeral('ephemeral'))
self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral0'))
self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral1'))
self.assertTrue(block_device.is_swap_or_ephemeral('swap'))
self.assertFalse(block_device.is_swap_or_ephemeral('root'))
self.assertFalse(block_device.is_swap_or_ephemeral('/dev/sda1'))
def test_mappings_prepend_dev(self):
mapping = [
{'virtual': 'ami', 'device': '/dev/sda'},
{'virtual': 'root', 'device': 'sda'},
{'virtual': 'ephemeral0', 'device': 'sdb'},
{'virtual': 'swap', 'device': 'sdc'},
{'virtual': 'ephemeral1', 'device': 'sdd'},
{'virtual': 'ephemeral2', 'device': 'sde'}]
expected = [
{'virtual': 'ami', 'device': '/dev/sda'},
{'virtual': 'root', 'device': 'sda'},
{'virtual': 'ephemeral0', 'device': '/dev/sdb'},
{'virtual': 'swap', 'device': '/dev/sdc'},
{'virtual': 'ephemeral1', 'device': '/dev/sdd'},
{'virtual': 'ephemeral2', 'device': '/dev/sde'}]
prepended = block_device.mappings_prepend_dev(mapping)
self.assertEqual(sorted(expected, key=lambda v: v['virtual']),
sorted(prepended, key=lambda v: v['virtual']))
def test_strip_dev(self):
self.assertEqual('sda', block_device.strip_dev('/dev/sda'))
self.assertEqual('sda', block_device.strip_dev('sda'))
self.assertIsNone(block_device.strip_dev(None))
def test_strip_prefix(self):
self.assertEqual('a', block_device.strip_prefix('/dev/sda'))
self.assertEqual('a', block_device.strip_prefix('a'))
self.assertEqual('a', block_device.strip_prefix('xvda'))
self.assertEqual('a', block_device.strip_prefix('vda'))
self.assertEqual('a', block_device.strip_prefix('hda'))
self.assertIsNone(block_device.strip_prefix(None))
def test_get_device_letter(self):
self.assertEqual('', block_device.get_device_letter(''))
self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))
self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))
self.assertEqual('d', block_device.get_device_letter('/dev/d'))
self.assertEqual('a', block_device.get_device_letter('a'))
self.assertEqual('b', block_device.get_device_letter('sdb2'))
self.assertEqual('c', block_device.get_device_letter('vdc'))
self.assertEqual('c', block_device.get_device_letter('hdc'))
self.assertIsNone(block_device.get_device_letter(None))
def test_generate_device_name(self):
expected = (
('vda', ("vd", 0)),
('vdaa', ("vd", 26)),
('vdabc', ("vd", 730)),
('vdidpok', ("vd", 4194304)),
('sdc', ("sd", 2)),
('sdaa', ("sd", 26)),
('sdiw', ("sd", 256)),
('hdzz', ("hd", 701))
)
for res, args in expected:
self.assertEqual(res, block_device.generate_device_name(*args))
def test_volume_in_mapping(self):
swap = {'device_name': '/dev/sdb',
'swap_size': 1}
ephemerals = [{'num': 0,
'virtual_name': 'ephemeral0',
'device_name': '/dev/sdc1',
'size': 1},
{'num': 2,
'virtual_name': 'ephemeral2',
'device_name': '/dev/sdd',
'size': 1}]
block_device_mapping = [{'mount_device': '/dev/sde',
'device_path': 'fake_device'},
{'mount_device': '/dev/sdf',
'device_path': 'fake_device'}]
block_device_info = {
'root_device_name': '/dev/sda',
'swap': swap,
'ephemerals': ephemerals,
'block_device_mapping': block_device_mapping}
def _assert_volume_in_mapping(device_name, true_or_false):
in_mapping = block_device.volume_in_mapping(
device_name, block_device_info)
self.assertEqual(true_or_false, in_mapping)
_assert_volume_in_mapping('sda', False)
_assert_volume_in_mapping('sdb', True)
_assert_volume_in_mapping('sdc1', True)
_assert_volume_in_mapping('sdd', True)
_assert_volume_in_mapping('sde', True)
_assert_volume_in_mapping('sdf', True)
_assert_volume_in_mapping('sdg', False)
_assert_volume_in_mapping('sdh1', False)
def test_get_root_bdm(self):
root_bdm = {'device_name': 'vda', 'boot_index': 0}
bdms = [root_bdm,
{'device_name': 'vdb', 'boot_index': 1},
{'device_name': 'vdc', 'boot_index': -1},
{'device_name': 'vdd'}]
self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))
self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))
self.assertIsNone(block_device.get_root_bdm(bdms[1:]))
self.assertIsNone(block_device.get_root_bdm(bdms[2:]))
self.assertIsNone(block_device.get_root_bdm(bdms[3:]))
self.assertIsNone(block_device.get_root_bdm([]))
def test_get_bdm_ephemeral_disk_size(self):
size = block_device.get_bdm_ephemeral_disk_size(self.new_mapping)
self.assertEqual(10, size)
def test_get_bdm_swap_list(self):
swap_list = block_device.get_bdm_swap_list(self.new_mapping)
self.assertEqual(1, len(swap_list))
self.assertEqual(1, swap_list[0].get('id'))
def test_get_bdm_local_disk_num(self):
size = block_device.get_bdm_local_disk_num(self.new_mapping)
self.assertEqual(2, size)
def test_new_format_is_swap(self):
expected_results = [True, False, False, False, False]
for expected, bdm in zip(expected_results, self.new_mapping):
res = block_device.new_format_is_swap(bdm)
self.assertEqual(expected, res)
def test_new_format_is_ephemeral(self):
expected_results = [False, True, False, False, False]
for expected, bdm in zip(expected_results, self.new_mapping):
res = block_device.new_format_is_ephemeral(bdm)
self.assertEqual(expected, res)
def test_validate_device_name(self):
for value in [' ', 10, None, 'a' * 260]:
self.assertRaises(exception.InvalidBDMFormat,
block_device.validate_device_name,
value)
def test_validate_and_default_volume_size(self):
bdm = {}
for value in [-1, 'a', 2.5]:
bdm['volume_size'] = value
self.assertRaises(exception.InvalidBDMFormat,
block_device.validate_and_default_volume_size,
bdm)
def test_get_bdms_to_connect(self):
root_bdm = {'device_name': 'vda', 'boot_index': 0}
bdms = [root_bdm,
{'device_name': 'vdb', 'boot_index': 1},
{'device_name': 'vdc', 'boot_index': -1},
{'device_name': 'vde', 'boot_index': None},
{'device_name': 'vdd'}]
self.assertNotIn(root_bdm, block_device.get_bdms_to_connect(bdms,
exclude_root_mapping=True))
self.assertIn(root_bdm, block_device.get_bdms_to_connect(bdms))
class TestBlockDeviceDict(test.NoDBTestCase):
def setUp(self):
super(TestBlockDeviceDict, self).setUp()
BDM = block_device.BlockDeviceDict
self.api_mapping = [
{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'boot_index': -1},
{'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'boot_index': -1},
{'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake-volume-id-1',
'boot_index': 0},
{'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'uuid': 'fake-snapshot-id-1',
'boot_index': -1},
{'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'},
]
self.new_mapping = [
BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'boot_index': -1}),
BDM({'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'boot_index': -1}),
BDM({'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'connection_info': "{'fake': 'connection_info'}",
'boot_index': 0}),
BDM({'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2',
'boot_index': -1}),
BDM({'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'}),
]
self.legacy_mapping = [
{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'delete_on_termination': True,
'virtual_name': 'swap'},
{'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'delete_on_termination': True,
'virtual_name': 'ephemeral0'},
{'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'volume_id': 'fake-volume-id-1',
'connection_info': "{'fake': 'connection_info'}"},
{'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2'},
{'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'},
]
self.new_mapping_source_image = [
BDM({'id': 6, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda3',
'source_type': 'image',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'volume_id': 'fake-volume-id-3',
'boot_index': -1}),
BDM({'id': 7, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda4',
'source_type': 'image',
'destination_type': 'local',
'connection_info': "{'fake': 'connection_info'}",
'image_id': 'fake-image-id-2',
'boot_index': -1}),
]
self.legacy_mapping_source_image = [
{'id': 6, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda3',
'connection_info': "{'fake': 'connection_info'}",
'volume_id': 'fake-volume-id-3'},
]
def test_init(self):
def fake_validate(obj, dct):
pass
self.stub_out('nova.block_device.BlockDeviceDict._fields',
set(['field1', 'field2']))
self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',
set(['db_field1', 'db_field2']))
self.stub_out('nova.block_device.BlockDeviceDict._validate',
fake_validate)
# Make sure db fields are not picked up if they are not
# in the original dict
dev_dict = block_device.BlockDeviceDict({'field1': 'foo',
'field2': 'bar',
'db_field1': 'baz'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
# Make sure all expected fields are defaulted
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
# Unless they are not meant to be
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},
do_not_default=set(['field2']))
self.assertIn('field1', dev_dict)
self.assertNotIn('field2', dev_dict)
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
# Passing kwargs to constructor works
dev_dict = block_device.BlockDeviceDict(field1='foo')
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
dev_dict = block_device.BlockDeviceDict(
{'field1': 'foo'}, field2='bar')
self.assertEqual('foo', dev_dict['field1'])
self.assertEqual('bar', dev_dict['field2'])
def test_init_prepend_dev_to_device_name(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance,
'device_name': 'vda',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vda', bdm_dict['device_name'])
bdm['device_name'] = '/dev/vdb'
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vdb', bdm_dict['device_name'])
bdm['device_name'] = None
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertIsNone(bdm_dict['device_name'])
def test_init_boolify_delete_on_termination(self):
# Make sure that when delete_on_termination is not passed it's
# still set to False and not None
bdm = {'id': 3, 'instance_uuid': uuids.instance,
'device_name': 'vda',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertFalse(bdm_dict['delete_on_termination'])
def test_validate(self):
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
{'bogus_field': 'lame_val'})
lame_bdm = dict(self.new_mapping[2])
del lame_bdm['source_type']
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_bdm)
lame_bdm['no_device'] = True
block_device.BlockDeviceDict(lame_bdm)
lame_dev_bdm = dict(self.new_mapping[2])
lame_dev_bdm['device_name'] = "not a valid name"
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_dev_bdm)
lame_dev_bdm['device_name'] = ""
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_dev_bdm)
cool_volume_size_bdm = dict(self.new_mapping[2])
cool_volume_size_bdm['volume_size'] = '42'
cool_volume_size_bdm = block_device.BlockDeviceDict(
cool_volume_size_bdm)
self.assertEqual(42, cool_volume_size_bdm['volume_size'])
lame_volume_size_bdm = dict(self.new_mapping[2])
lame_volume_size_bdm['volume_size'] = 'some_non_int_string'
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_volume_size_bdm)
truthy_bdm = dict(self.new_mapping[2])
truthy_bdm['delete_on_termination'] = '1'
truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)
self.assertTrue(truthy_bdm['delete_on_termination'])
verbose_bdm = dict(self.new_mapping[2])
verbose_bdm['boot_index'] = 'first'
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
verbose_bdm)
def test_from_legacy(self):
for legacy, new in zip(self.legacy_mapping, self.new_mapping):
self.assertThat(
block_device.BlockDeviceDict.from_legacy(legacy),
matchers.IsSubDictOf(new))
def test_from_legacy_mapping(self):
def _get_image_bdms(bdms):
return [bdm for bdm in bdms if bdm['source_type'] == 'image']
def _get_bootable_bdms(bdms):
return [bdm for bdm in bdms
if (bdm['boot_index'] is not None and
bdm['boot_index'] >= 0)]
new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)
self.assertEqual(0, len(_get_image_bdms(new_no_img)))
for new, expected in zip(new_no_img, self.new_mapping):
self.assertThat(new, matchers.IsSubDictOf(expected))
new_with_img = block_device.from_legacy_mapping(
self.legacy_mapping, 'fake_image_ref')
image_bdms = _get_image_bdms(new_with_img)
boot_bdms = _get_bootable_bdms(new_with_img)
self.assertEqual(1, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, image_bdms[0]['boot_index'])
self.assertEqual('image', boot_bdms[0]['source_type'])
new_with_img_and_root = block_device.from_legacy_mapping(
self.legacy_mapping, 'fake_image_ref', 'sda1')
image_bdms = _get_image_bdms(new_with_img_and_root)
boot_bdms = _get_bootable_bdms(new_with_img_and_root)
self.assertEqual(0, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, boot_bdms[0]['boot_index'])
self.assertEqual('volume', boot_bdms[0]['source_type'])
new_no_root = block_device.from_legacy_mapping(
self.legacy_mapping, 'fake_image_ref', 'sda1', no_root=True)
self.assertEqual(0, len(_get_image_bdms(new_no_root)))
self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))
def test_from_api(self):
for api, new in zip(self.api_mapping, self.new_mapping):
new['connection_info'] = None
if new['snapshot_id']:
new['volume_id'] = None
self.assertThat(
block_device.BlockDeviceDict.from_api(api, False),
matchers.IsSubDictOf(new))
def test_from_api_invalid_blank_id(self):
api_dict = {'id': 1,
'source_type': 'blank',
'destination_type': 'volume',
'uuid': 'fake-volume-id-1',
'delete_on_termination': True,
'boot_index': -1}
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api, api_dict,
False)
def test_from_api_invalid_source_to_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'uuid': 'fake-volume-id-1'}
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api, api_dict,
False)
def test_from_api_valid_source_to_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'uuid': 1,
'boot_index': 0}
retexp = block_device.BlockDeviceDict(
{'id': 1,
'source_type': 'image',
'image_id': 1,
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'boot_index': 0})
self.assertEqual(retexp,
block_device.BlockDeviceDict.from_api(api_dict, True))
def test_from_api_valid_source_to_local_mapping_with_string_bi(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'uuid': 1,
'boot_index': '0'}
retexp = block_device.BlockDeviceDict(
{'id': 1,
'source_type': 'image',
'image_id': 1,
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'boot_index': 0})
self.assertEqual(retexp,
block_device.BlockDeviceDict.from_api(api_dict, True))
def test_from_api_invalid_image_to_destination_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'uuid': 'fake-volume-id-1',
'volume_type': 'fake-lvm-1',
'boot_index': 1}
ex = self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api,
api_dict, False)
self.assertIn('Mapping image to local is not supported', str(ex))
def test_from_api_invalid_volume_type_to_destination_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'volume',
'destination_type': 'local',
'uuid': 'fake-volume-id-1',
'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api,
api_dict, False)
self.assertIn('Specifying a volume_type with destination_type=local '
'is not supported', str(ex))
def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(
self):
api_dict = {'id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake-volume-id-1',
'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api,
api_dict, False)
self.assertIn('Specifying volume type to existing volume is '
'not supported', str(ex))
def test_image_mapping(self):
removed_fields = ['id', 'instance_uuid', 'connection_info',
'created_at', 'updated_at', 'deleted_at', 'deleted']
for bdm in self.new_mapping:
mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(
bdm).get_image_mapping()
for fld in removed_fields:
self.assertNotIn(fld, mapping_bdm)
def _test_snapshot_from_bdm(self, template):
snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)
self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])
self.assertEqual('snapshot', snapshot['source_type'])
self.assertEqual('volume', snapshot['destination_type'])
self.assertEqual(template.volume_size, snapshot['volume_size'])
self.assertEqual(template.delete_on_termination,
snapshot['delete_on_termination'])
self.assertEqual(template.device_name, snapshot['device_name'])
for key in ['disk_bus', 'device_type', 'boot_index']:
self.assertEqual(template[key], snapshot[key])
def test_snapshot_from_bdm(self):
for bdm in self.new_mapping:
self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))
def test_snapshot_from_object(self):
for bdm in self.new_mapping[:-1]:
obj = objects.BlockDeviceMapping()
obj = objects.BlockDeviceMapping._from_db_object(
None, obj, fake_block_device.FakeDbBlockDeviceDict(
bdm))
self._test_snapshot_from_bdm(obj)
class GetBDMImageMetadataTestCase(test.NoDBTestCase):
def setUp(self):
super().setUp()
self.compute_api = compute_api.API()
self.context = context.RequestContext('fake', 'fake')
def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': None,
'volume_id': '1',
'delete_on_termination': False,
}]
expected_meta = {
'min_disk': 0, 'min_ram': 0, 'properties': {}, 'size': 0,
'status': 'active',
}
def get_vol_data(*args, **kwargs):
return {'bootable': is_bootable}
with mock.patch.object(
self.compute_api.volume_api, 'get', side_effect=get_vol_data,
):
if not is_bootable:
self.assertRaises(
exception.InvalidBDMVolumeNotBootable,
block_device.get_bdm_image_metadata,
self.context,
self.compute_api.image_api,
self.compute_api.volume_api,
block_device_mapping)
else:
meta = block_device.get_bdm_image_metadata(
self.context, self.compute_api.image_api,
self.compute_api.volume_api, block_device_mapping)
self.assertEqual(expected_meta, meta)
def test_get_bdm_image_metadata__non_bootable(self):
self._test_get_bdm_image_metadata__bootable(False)
def test_get_bdm_image_metadata__bootable(self):
self._test_get_bdm_image_metadata__bootable(True)
def test_get_bdm_image_metadata__basic_property(self):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': None,
'volume_id': '1',
'delete_on_termination': False,
}]
fake_volume = {
'volume_image_metadata': {
'min_ram': 256, 'min_disk': 128, 'foo': 'bar',
},
}
with mock.patch.object(
self.compute_api.volume_api, 'get', return_value=fake_volume,
):
meta = block_device.get_bdm_image_metadata(
self.context, self.compute_api.image_api,
self.compute_api.volume_api, block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
def test_get_bdm_image_metadata__snapshot_basic_property(self):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': '2',
'volume_id': None,
'delete_on_termination': False,
}]
fake_volume = {
'volume_image_metadata': {
'min_ram': 256, 'min_disk': 128, 'foo': 'bar',
},
}
fake_snapshot = {'volume_id': '1'}
with test.nested(
mock.patch.object(
self.compute_api.volume_api, 'get',
return_value=fake_volume),
mock.patch.object(
self.compute_api.volume_api, 'get_snapshot',
return_value=fake_snapshot),
) as (volume_get, volume_get_snapshot):
meta = block_device.get_bdm_image_metadata(
self.context, self.compute_api.image_api,
self.compute_api.volume_api, block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
volume_get_snapshot.assert_called_once_with(
self.context, block_device_mapping[0]['snapshot_id'])
volume_get.assert_called_once_with(
self.context, fake_snapshot['volume_id'])
@mock.patch.object(
cinder.API, 'get',
side_effect=exception.CinderConnectionFailed(reason='error'))
def test_get_bdm_image_metadata__cinder_down(self, mock_get):
bdms = [
objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict({
'id': 1,
'volume_id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'device_name': 'vda',
})
)
]
self.assertRaises(
exception.CinderConnectionFailed,
block_device.get_bdm_image_metadata,
self.context,
self.compute_api.image_api,
self.compute_api.volume_api,
bdms, legacy_bdm=True)
class GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):
def test_inherit_image_properties(self):
properties = {'fake_prop': 'fake_value'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(properties, image_meta['properties'])
def test_image_size(self):
volume = {'size': 10}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(10 * units.Gi, image_meta['size'])
def test_image_status(self):
volume = {}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual('active', image_meta['status'])
def test_values_conversion(self):
properties = {'min_ram': '5', 'min_disk': '7'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(5, image_meta['min_ram'])
self.assertEqual(7, image_meta['min_disk'])
def test_suppress_not_image_properties(self):
properties = {
'min_ram': '256', 'min_disk': '128', 'image_id': 'fake_id',
'image_name': 'fake_name', 'container_format': 'ami',
'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum',
}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual({}, image_meta['properties'])
self.assertEqual(0, image_meta['size'])
# volume's properties should not be touched
self.assertNotEqual({}, properties)
|
normal
|
{
"blob_id": "d56e313318635788ae5b3d3a3f767450ab2f2296",
"index": 4985,
"step-1": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-2": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-3": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n <mask token>\n\n def test_strip_dev(self):\n self.assertEqual('sda', block_device.strip_dev('/dev/sda'))\n self.assertEqual('sda', block_device.strip_dev('sda'))\n self.assertIsNone(block_device.strip_dev(None))\n <mask token>\n\n def test_get_device_letter(self):\n self.assertEqual('', block_device.get_device_letter(''))\n self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))\n self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))\n self.assertEqual('d', block_device.get_device_letter('/dev/d'))\n self.assertEqual('a', block_device.get_device_letter('a'))\n self.assertEqual('b', block_device.get_device_letter('sdb2'))\n self.assertEqual('c', block_device.get_device_letter('vdc'))\n self.assertEqual('c', block_device.get_device_letter('hdc'))\n self.assertIsNone(block_device.get_device_letter(None))\n\n def test_generate_device_name(self):\n expected = ('vda', ('vd', 0)), ('vdaa', ('vd', 26)), ('vdabc', (\n 'vd', 730)), ('vdidpok', ('vd', 4194304)), ('sdc', ('sd', 2)), (\n 'sdaa', ('sd', 26)), ('sdiw', ('sd', 256)), ('hdzz', ('hd', 701))\n for res, args in expected:\n self.assertEqual(res, block_device.generate_device_name(*args))\n <mask token>\n\n def test_get_root_bdm(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm, {'device_name': 'vdb', 'boot_index': 1}, {\n 'device_name': 'vdc', 'boot_index': -1}, {'device_name': 'vdd'}]\n self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))\n self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))\n self.assertIsNone(block_device.get_root_bdm(bdms[1:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[2:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[3:]))\n self.assertIsNone(block_device.get_root_bdm([]))\n <mask token>\n <mask token>\n\n def test_get_bdm_local_disk_num(self):\n size = block_device.get_bdm_local_disk_num(self.new_mapping)\n self.assertEqual(2, size)\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-4": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n\n def test_mappings_prepend_dev(self):\n mapping = [{'virtual': 'ami', 'device': '/dev/sda'}, {'virtual':\n 'root', 'device': 'sda'}, {'virtual': 'ephemeral0', 'device':\n 'sdb'}, {'virtual': 'swap', 'device': 'sdc'}, {'virtual':\n 'ephemeral1', 'device': 'sdd'}, {'virtual': 'ephemeral2',\n 'device': 'sde'}]\n expected = [{'virtual': 'ami', 'device': '/dev/sda'}, {'virtual':\n 'root', 'device': 'sda'}, {'virtual': 'ephemeral0', 'device':\n '/dev/sdb'}, {'virtual': 'swap', 'device': '/dev/sdc'}, {\n 'virtual': 'ephemeral1', 'device': '/dev/sdd'}, {'virtual':\n 'ephemeral2', 'device': '/dev/sde'}]\n prepended = block_device.mappings_prepend_dev(mapping)\n self.assertEqual(sorted(expected, key=lambda v: v['virtual']),\n sorted(prepended, key=lambda v: v['virtual']))\n\n def test_strip_dev(self):\n self.assertEqual('sda', block_device.strip_dev('/dev/sda'))\n self.assertEqual('sda', block_device.strip_dev('sda'))\n self.assertIsNone(block_device.strip_dev(None))\n\n def test_strip_prefix(self):\n self.assertEqual('a', block_device.strip_prefix('/dev/sda'))\n self.assertEqual('a', block_device.strip_prefix('a'))\n self.assertEqual('a', block_device.strip_prefix('xvda'))\n self.assertEqual('a', block_device.strip_prefix('vda'))\n self.assertEqual('a', block_device.strip_prefix('hda'))\n self.assertIsNone(block_device.strip_prefix(None))\n\n def test_get_device_letter(self):\n self.assertEqual('', block_device.get_device_letter(''))\n self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))\n self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))\n self.assertEqual('d', block_device.get_device_letter('/dev/d'))\n self.assertEqual('a', block_device.get_device_letter('a'))\n self.assertEqual('b', block_device.get_device_letter('sdb2'))\n self.assertEqual('c', block_device.get_device_letter('vdc'))\n self.assertEqual('c', block_device.get_device_letter('hdc'))\n self.assertIsNone(block_device.get_device_letter(None))\n\n def test_generate_device_name(self):\n expected = ('vda', ('vd', 0)), ('vdaa', ('vd', 26)), ('vdabc', (\n 'vd', 730)), ('vdidpok', ('vd', 4194304)), ('sdc', ('sd', 2)), (\n 'sdaa', ('sd', 26)), ('sdiw', ('sd', 256)), ('hdzz', ('hd', 701))\n for res, args in expected:\n self.assertEqual(res, block_device.generate_device_name(*args))\n <mask token>\n\n def test_get_root_bdm(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm, {'device_name': 'vdb', 'boot_index': 1}, {\n 'device_name': 'vdc', 'boot_index': -1}, {'device_name': 'vdd'}]\n self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))\n self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))\n self.assertIsNone(block_device.get_root_bdm(bdms[1:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[2:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[3:]))\n self.assertIsNone(block_device.get_root_bdm([]))\n\n def test_get_bdm_ephemeral_disk_size(self):\n size = block_device.get_bdm_ephemeral_disk_size(self.new_mapping)\n self.assertEqual(10, size)\n <mask token>\n\n def test_get_bdm_local_disk_num(self):\n size = block_device.get_bdm_local_disk_num(self.new_mapping)\n self.assertEqual(2, size)\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-5": "# Copyright 2011 Isaku Yamahata\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"\nTests for Block Device utility functions.\n\"\"\"\n\nfrom unittest import mock\n\nfrom oslo_utils.fixture import uuidsentinel as uuids\nfrom oslo_utils import units\n\nfrom nova import block_device\nfrom nova.compute import api as compute_api\nfrom nova import context\nfrom nova import exception\nfrom nova import objects\nfrom nova import test\nfrom nova.tests.unit import fake_block_device\nfrom nova.tests.unit import matchers\nfrom nova.volume import cinder\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n def setUp(self):\n super(BlockDeviceTestCase, self).setUp()\n BDM = block_device.BlockDeviceDict\n\n self.new_mapping = [\n BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'volume_size': 1,\n 'guest_format': 'swap',\n 'boot_index': -1}),\n BDM({'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'volume_size': 10,\n 'delete_on_termination': True,\n 'boot_index': -1}),\n BDM({'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'boot_index': 0}),\n BDM({'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'source_type': 'snapshot',\n 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'snapshot_id': 'fake-snapshot-id-1',\n 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}),\n BDM({'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'}),\n ]\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root',\n 'device': root_device0}]\n\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings,\n 'root_device_name': root_device1}\n\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0,\n block_device.properties_root_device_name(properties0))\n self.assertEqual(root_device1,\n block_device.properties_root_device_name(properties1))\n\n def test_ephemeral(self):\n self.assertFalse(block_device.is_ephemeral('ephemeral'))\n self.assertTrue(block_device.is_ephemeral('ephemeral0'))\n self.assertTrue(block_device.is_ephemeral('ephemeral1'))\n self.assertTrue(block_device.is_ephemeral('ephemeral11'))\n self.assertFalse(block_device.is_ephemeral('root'))\n self.assertFalse(block_device.is_ephemeral('swap'))\n self.assertFalse(block_device.is_ephemeral('/dev/sda1'))\n\n self.assertEqual(0, block_device.ephemeral_num('ephemeral0'))\n self.assertEqual(1, block_device.ephemeral_num('ephemeral1'))\n self.assertEqual(11, block_device.ephemeral_num('ephemeral11'))\n\n self.assertFalse(block_device.is_swap_or_ephemeral('ephemeral'))\n self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral0'))\n self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral1'))\n self.assertTrue(block_device.is_swap_or_ephemeral('swap'))\n self.assertFalse(block_device.is_swap_or_ephemeral('root'))\n self.assertFalse(block_device.is_swap_or_ephemeral('/dev/sda1'))\n\n def test_mappings_prepend_dev(self):\n mapping = [\n {'virtual': 'ami', 'device': '/dev/sda'},\n {'virtual': 'root', 'device': 'sda'},\n {'virtual': 'ephemeral0', 'device': 'sdb'},\n {'virtual': 'swap', 'device': 'sdc'},\n {'virtual': 'ephemeral1', 'device': 'sdd'},\n {'virtual': 'ephemeral2', 'device': 'sde'}]\n\n expected = [\n {'virtual': 'ami', 'device': '/dev/sda'},\n {'virtual': 'root', 'device': 'sda'},\n {'virtual': 'ephemeral0', 'device': '/dev/sdb'},\n {'virtual': 'swap', 'device': '/dev/sdc'},\n {'virtual': 'ephemeral1', 'device': '/dev/sdd'},\n {'virtual': 'ephemeral2', 'device': '/dev/sde'}]\n\n prepended = block_device.mappings_prepend_dev(mapping)\n self.assertEqual(sorted(expected, key=lambda v: v['virtual']),\n sorted(prepended, key=lambda v: v['virtual']))\n\n def test_strip_dev(self):\n self.assertEqual('sda', block_device.strip_dev('/dev/sda'))\n self.assertEqual('sda', block_device.strip_dev('sda'))\n self.assertIsNone(block_device.strip_dev(None))\n\n def test_strip_prefix(self):\n self.assertEqual('a', block_device.strip_prefix('/dev/sda'))\n self.assertEqual('a', block_device.strip_prefix('a'))\n self.assertEqual('a', block_device.strip_prefix('xvda'))\n self.assertEqual('a', block_device.strip_prefix('vda'))\n self.assertEqual('a', block_device.strip_prefix('hda'))\n self.assertIsNone(block_device.strip_prefix(None))\n\n def test_get_device_letter(self):\n self.assertEqual('', block_device.get_device_letter(''))\n self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))\n self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))\n self.assertEqual('d', block_device.get_device_letter('/dev/d'))\n self.assertEqual('a', block_device.get_device_letter('a'))\n self.assertEqual('b', block_device.get_device_letter('sdb2'))\n self.assertEqual('c', block_device.get_device_letter('vdc'))\n self.assertEqual('c', block_device.get_device_letter('hdc'))\n self.assertIsNone(block_device.get_device_letter(None))\n\n def test_generate_device_name(self):\n expected = (\n ('vda', (\"vd\", 0)),\n ('vdaa', (\"vd\", 26)),\n ('vdabc', (\"vd\", 730)),\n ('vdidpok', (\"vd\", 4194304)),\n ('sdc', (\"sd\", 2)),\n ('sdaa', (\"sd\", 26)),\n ('sdiw', (\"sd\", 256)),\n ('hdzz', (\"hd\", 701))\n )\n for res, args in expected:\n self.assertEqual(res, block_device.generate_device_name(*args))\n\n def test_volume_in_mapping(self):\n swap = {'device_name': '/dev/sdb',\n 'swap_size': 1}\n ephemerals = [{'num': 0,\n 'virtual_name': 'ephemeral0',\n 'device_name': '/dev/sdc1',\n 'size': 1},\n {'num': 2,\n 'virtual_name': 'ephemeral2',\n 'device_name': '/dev/sdd',\n 'size': 1}]\n block_device_mapping = [{'mount_device': '/dev/sde',\n 'device_path': 'fake_device'},\n {'mount_device': '/dev/sdf',\n 'device_path': 'fake_device'}]\n block_device_info = {\n 'root_device_name': '/dev/sda',\n 'swap': swap,\n 'ephemerals': ephemerals,\n 'block_device_mapping': block_device_mapping}\n\n def _assert_volume_in_mapping(device_name, true_or_false):\n in_mapping = block_device.volume_in_mapping(\n device_name, block_device_info)\n self.assertEqual(true_or_false, in_mapping)\n\n _assert_volume_in_mapping('sda', False)\n _assert_volume_in_mapping('sdb', True)\n _assert_volume_in_mapping('sdc1', True)\n _assert_volume_in_mapping('sdd', True)\n _assert_volume_in_mapping('sde', True)\n _assert_volume_in_mapping('sdf', True)\n _assert_volume_in_mapping('sdg', False)\n _assert_volume_in_mapping('sdh1', False)\n\n def test_get_root_bdm(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm,\n {'device_name': 'vdb', 'boot_index': 1},\n {'device_name': 'vdc', 'boot_index': -1},\n {'device_name': 'vdd'}]\n self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))\n self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))\n self.assertIsNone(block_device.get_root_bdm(bdms[1:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[2:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[3:]))\n self.assertIsNone(block_device.get_root_bdm([]))\n\n def test_get_bdm_ephemeral_disk_size(self):\n size = block_device.get_bdm_ephemeral_disk_size(self.new_mapping)\n self.assertEqual(10, size)\n\n def test_get_bdm_swap_list(self):\n swap_list = block_device.get_bdm_swap_list(self.new_mapping)\n self.assertEqual(1, len(swap_list))\n self.assertEqual(1, swap_list[0].get('id'))\n\n def test_get_bdm_local_disk_num(self):\n size = block_device.get_bdm_local_disk_num(self.new_mapping)\n self.assertEqual(2, size)\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n\n def test_new_format_is_ephemeral(self):\n expected_results = [False, True, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_ephemeral(bdm)\n self.assertEqual(expected, res)\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.validate_device_name,\n value)\n\n def test_validate_and_default_volume_size(self):\n bdm = {}\n for value in [-1, 'a', 2.5]:\n bdm['volume_size'] = value\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.validate_and_default_volume_size,\n bdm)\n\n def test_get_bdms_to_connect(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm,\n {'device_name': 'vdb', 'boot_index': 1},\n {'device_name': 'vdc', 'boot_index': -1},\n {'device_name': 'vde', 'boot_index': None},\n {'device_name': 'vdd'}]\n self.assertNotIn(root_bdm, block_device.get_bdms_to_connect(bdms,\n exclude_root_mapping=True))\n self.assertIn(root_bdm, block_device.get_bdms_to_connect(bdms))\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n\n BDM = block_device.BlockDeviceDict\n\n self.api_mapping = [\n {'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'guest_format': 'swap',\n 'boot_index': -1},\n {'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'boot_index': -1},\n {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'uuid': 'fake-volume-id-1',\n 'boot_index': 0},\n {'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'source_type': 'snapshot',\n 'destination_type': 'volume',\n 'uuid': 'fake-snapshot-id-1',\n 'boot_index': -1},\n {'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'},\n ]\n\n self.new_mapping = [\n BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'guest_format': 'swap',\n 'boot_index': -1}),\n BDM({'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'boot_index': -1}),\n BDM({'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'boot_index': 0}),\n BDM({'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'source_type': 'snapshot',\n 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'snapshot_id': 'fake-snapshot-id-1',\n 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}),\n BDM({'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'}),\n ]\n\n self.legacy_mapping = [\n {'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'delete_on_termination': True,\n 'virtual_name': 'swap'},\n {'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'delete_on_termination': True,\n 'virtual_name': 'ephemeral0'},\n {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'volume_id': 'fake-volume-id-1',\n 'connection_info': \"{'fake': 'connection_info'}\"},\n {'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'snapshot_id': 'fake-snapshot-id-1',\n 'volume_id': 'fake-volume-id-2'},\n {'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'},\n ]\n\n self.new_mapping_source_image = [\n BDM({'id': 6, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda3',\n 'source_type': 'image',\n 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}),\n BDM({'id': 7, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda4',\n 'source_type': 'image',\n 'destination_type': 'local',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'image_id': 'fake-image-id-2',\n 'boot_index': -1}),\n ]\n\n self.legacy_mapping_source_image = [\n {'id': 6, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda3',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'volume_id': 'fake-volume-id-3'},\n ]\n\n def test_init(self):\n def fake_validate(obj, dct):\n pass\n\n self.stub_out('nova.block_device.BlockDeviceDict._fields',\n set(['field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n\n # Make sure db fields are not picked up if they are not\n # in the original dict\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo',\n 'field2': 'bar',\n 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n\n # Make sure all expected fields are defaulted\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n\n # Unless they are not meant to be\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n\n # Passing kwargs to constructor works\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict(\n {'field1': 'foo'}, field2='bar')\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': 'vda',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n # Make sure that when delete_on_termination is not passed it's\n # still set to False and not None\n bdm = {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': 'vda',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n {'bogus_field': 'lame_val'})\n\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_bdm)\n\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = \"not a valid name\"\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_dev_bdm)\n\n lame_dev_bdm['device_name'] = \"\"\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_dev_bdm)\n\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_volume_size_bdm)\n\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(\n block_device.BlockDeviceDict.from_legacy(legacy),\n matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms\n if (bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0)]\n\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n\n new_with_img = block_device.from_legacy_mapping(\n self.legacy_mapping, 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n\n new_with_img_and_root = block_device.from_legacy_mapping(\n self.legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n\n new_no_root = block_device.from_legacy_mapping(\n self.legacy_mapping, 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(\n block_device.BlockDeviceDict.from_api(api, False),\n matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1,\n 'source_type': 'blank',\n 'destination_type': 'volume',\n 'uuid': 'fake-volume-id-1',\n 'delete_on_termination': True,\n 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api, api_dict,\n False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api, api_dict,\n False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'uuid': 1,\n 'boot_index': 0}\n\n retexp = block_device.BlockDeviceDict(\n {'id': 1,\n 'source_type': 'image',\n 'image_id': 1,\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0})\n self.assertEqual(retexp,\n block_device.BlockDeviceDict.from_api(api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'uuid': 1,\n 'boot_index': '0'}\n\n retexp = block_device.BlockDeviceDict(\n {'id': 1,\n 'source_type': 'image',\n 'image_id': 1,\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0})\n self.assertEqual(retexp,\n block_device.BlockDeviceDict.from_api(api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'uuid': 'fake-volume-id-1',\n 'volume_type': 'fake-lvm-1',\n 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api,\n api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'volume',\n 'destination_type': 'local',\n 'uuid': 'fake-volume-id-1',\n 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api,\n api_dict, False)\n self.assertIn('Specifying a volume_type with destination_type=local '\n 'is not supported', str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1,\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'uuid': 'fake-volume-id-1',\n 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api,\n api_dict, False)\n self.assertIn('Specifying volume type to existing volume is '\n 'not supported', str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(\n bdm).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination,\n snapshot['delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(\n None, obj, fake_block_device.FakeDbBlockDeviceDict(\n bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{\n 'id': 1,\n 'device_name': 'vda',\n 'no_device': None,\n 'virtual_name': None,\n 'snapshot_id': None,\n 'volume_id': '1',\n 'delete_on_termination': False,\n }]\n\n expected_meta = {\n 'min_disk': 0, 'min_ram': 0, 'properties': {}, 'size': 0,\n 'status': 'active',\n }\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n\n with mock.patch.object(\n self.compute_api.volume_api, 'get', side_effect=get_vol_data,\n ):\n if not is_bootable:\n self.assertRaises(\n exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata,\n self.context,\n self.compute_api.image_api,\n self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(\n self.context, self.compute_api.image_api,\n self.compute_api.volume_api, block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{\n 'id': 1,\n 'device_name': 'vda',\n 'no_device': None,\n 'virtual_name': None,\n 'snapshot_id': None,\n 'volume_id': '1',\n 'delete_on_termination': False,\n }]\n fake_volume = {\n 'volume_image_metadata': {\n 'min_ram': 256, 'min_disk': 128, 'foo': 'bar',\n },\n }\n with mock.patch.object(\n self.compute_api.volume_api, 'get', return_value=fake_volume,\n ):\n meta = block_device.get_bdm_image_metadata(\n self.context, self.compute_api.image_api,\n self.compute_api.volume_api, block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{\n 'id': 1,\n 'device_name': 'vda',\n 'no_device': None,\n 'virtual_name': None,\n 'snapshot_id': '2',\n 'volume_id': None,\n 'delete_on_termination': False,\n }]\n fake_volume = {\n 'volume_image_metadata': {\n 'min_ram': 256, 'min_disk': 128, 'foo': 'bar',\n },\n }\n fake_snapshot = {'volume_id': '1'}\n with test.nested(\n mock.patch.object(\n self.compute_api.volume_api, 'get',\n return_value=fake_volume),\n mock.patch.object(\n self.compute_api.volume_api, 'get_snapshot',\n return_value=fake_snapshot),\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(\n self.context, self.compute_api.image_api,\n self.compute_api.volume_api, block_device_mapping)\n\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(\n self.context, block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(\n self.context, fake_snapshot['volume_id'])\n\n @mock.patch.object(\n cinder.API, 'get',\n side_effect=exception.CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [\n objects.BlockDeviceMapping(\n **fake_block_device.FakeDbBlockDeviceDict({\n 'id': 1,\n 'volume_id': 1,\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'device_name': 'vda',\n })\n )\n ]\n self.assertRaises(\n exception.CinderConnectionFailed,\n block_device.get_bdm_image_metadata,\n self.context,\n self.compute_api.image_api,\n self.compute_api.volume_api,\n bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {\n 'min_ram': '256', 'min_disk': '128', 'image_id': 'fake_id',\n 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum',\n }\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n # volume's properties should not be touched\n self.assertNotEqual({}, properties)\n",
"step-ids": [
37,
38,
43,
46,
55
]
}
|
[
37,
38,
43,
46,
55
] |
from django.shortcuts import render
from django.views.generic import ListView
from auth_person.models import Post_news, User
# Create your views here.
def blog(request, foo):
inf = {'login': foo}
return render(request, 'blog/blog.html', context=inf)
class feed(ListView):
template_name = 'blog/feed.html'
model = Post_news
paginate_by = 10
def get_queryset(self):
user_name = self.kwargs['foo']
print(user_name)
return Post_news.objects.all().order_by('-date_post').filter(user__login=user_name)
def get_context_data(self, *, object_list=None, **kwargs):
|
normal
|
{
"blob_id": "b216c0f92bcf91fd538eabf0239cf149342ef2eb",
"index": 4493,
"step-1": "from django.shortcuts import render\nfrom django.views.generic import ListView\nfrom auth_person.models import Post_news, User\n\n# Create your views here.\n\n\ndef blog(request, foo):\n inf = {'login': foo}\n return render(request, 'blog/blog.html', context=inf)\n\nclass feed(ListView):\n template_name = 'blog/feed.html'\n model = Post_news\n paginate_by = 10\n def get_queryset(self):\n user_name = self.kwargs['foo']\n print(user_name)\n return Post_news.objects.all().order_by('-date_post').filter(user__login=user_name)\n\n def get_context_data(self, *, object_list=None, **kwargs):\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import re
import requests
import numpy as np
import json
import os
from collections import OrderedDict
import pandas as pd
import json
import datetime
import time
#将数组写入json文件方便pandas的读取
def write_list_to_json(list, json_file_name, json_file_save_path):
os.chdir(json_file_save_path)
with open(json_file_name, 'w') as f:
json.dump(list, f)
#获取数据算法
def getworld_data(url,header):
headers = header
res = requests.get(url,headers = headers)
res.encoding = "UTF-8"
pattern = re.compile('(\'\{"(\w+)":{"active":(.*?),"confirmed":(.*?),"deaths":(.*?),"recovered":(.*?),"relative_active":(.*?),"relative_active_start_date":(.*?),"relative_confirmed":(.*?),"relative_confirmed_start_date":(.*?),"relative_deaths":(.*?),"relative_deaths_start_date":(.*?),"relative_recovered":(.*?),"relative_recovered_start_date":(.*?)}\}\')',re.S)
end = re.findall(pattern,res.text)
a=str(end[0])
with open('test.txt','w') as f:
f.write(a)
data_relative_confirmed_json=[]
pattern_1 = re.compile('(\w+)":{"active":(.*?),"confirmed":(.*?),"deaths":(.*?),"recovered":(.*?),"relative_active":(.*?),"relative_active_start_date":(.*?),"relative_confirmed":(.*?),"relative_confirmed_start_date":(.*?),"relative_deaths":(.*?),"relative_deaths_start_date":(.*?),"relative_recovered":(.*?),"relative_recovered_start_date":(.*?)}',re.S)
end_1=re.findall(pattern_1,a)
return end_1
#时间推算算法及数据写入
def count_time(end_1):
data_relative_confirmed_json=[]
country=[]
for i in range(len(end_1)):
data={
'Country':'',
}
data['Country']=end_1[i][0]
#确诊人数
country.append(end_1[i][0])
care=end_1[i][5].replace('[','').replace(']','').split(',')
try:
time=end_1[i][6].replace('/',',').replace('/',',').replace('"','').split(',')
print(time)
time[2]='2020'
date=[]
in_date = time[2]+'-'+time[0]+'-'+time[1]
dt = datetime.datetime.strptime(in_date, "%Y-%m-%d")
for k in range(len(end_1[i][5].replace('[','').replace(']','').split(','))):
out_date = (dt + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
dt=datetime.datetime.strptime(out_date, "%Y-%m-%d")
date.append(out_date)
print(date)
time_care=OrderedDict(zip(date,care))
print(time_care)
date_json=OrderedDict(data,**time_care)
data_relative_confirmed_json.append(date_json)
except:
pass
return data_relative_confirmed_json
def write_json_to_csv(data_relative_confirmed_json,end_1):
write_list_to_json(data_relative_confirmed_json,'20200517-world-active-data.json','E:/python_code/world_cov19')
data_csv=pd.DataFrame(json.loads(open('20200517-world-active-data.json','r+').read()))
print(end_1[36][0])
care=end_1[36][5].replace('[','').replace(']','').split(',')
try:
time=end_1[36][6].replace('/',',').replace('/',',').replace('"','').split(',')
print(time)
time[2]='2020'
date=[]
in_date = time[2]+'-'+time[0]+'-'+time[1]
dt = datetime.datetime.strptime(in_date, "%Y-%m-%d")
for k in range(len(end_1[36][5].replace('[','').replace(']','').split(','))):
out_date = (dt + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
dt=datetime.datetime.strptime(out_date, "%Y-%m-%d")
date.append(out_date)
print(date)
time_care=OrderedDict(zip(date,care))
print(time_care)
except:
pass
date.insert(0,'Country')
cols=date
data_csv=data_csv.loc[:,cols]
data_csv.T
data_csv.to_csv('20200517-world-active-data.json.csv')
df=pd.read_csv('20200517-world-active-data.json.csv')
new_csv=df.T
new_csv.to_csv('20200517-world-active-data.json.csv')
|
normal
|
{
"blob_id": "0677e12bc9733c76bff7ed3fe83e3800e64e9a10",
"index": 7633,
"step-1": "<mask token>\n\n\ndef getworld_data(url, header):\n headers = header\n res = requests.get(url, headers=headers)\n res.encoding = 'UTF-8'\n pattern = re.compile(\n '(\\'\\\\{\"(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}\\\\}\\')'\n , re.S)\n end = re.findall(pattern, res.text)\n a = str(end[0])\n with open('test.txt', 'w') as f:\n f.write(a)\n data_relative_confirmed_json = []\n pattern_1 = re.compile(\n '(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}'\n , re.S)\n end_1 = re.findall(pattern_1, a)\n return end_1\n\n\n<mask token>\n\n\ndef write_json_to_csv(data_relative_confirmed_json, end_1):\n write_list_to_json(data_relative_confirmed_json,\n '20200517-world-active-data.json', 'E:/python_code/world_cov19')\n data_csv = pd.DataFrame(json.loads(open(\n '20200517-world-active-data.json', 'r+').read()))\n print(end_1[36][0])\n care = end_1[36][5].replace('[', '').replace(']', '').split(',')\n try:\n time = end_1[36][6].replace('/', ',').replace('/', ',').replace('\"', ''\n ).split(',')\n print(time)\n time[2] = '2020'\n date = []\n in_date = time[2] + '-' + time[0] + '-' + time[1]\n dt = datetime.datetime.strptime(in_date, '%Y-%m-%d')\n for k in range(len(end_1[36][5].replace('[', '').replace(']', '').\n split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')\n dt = datetime.datetime.strptime(out_date, '%Y-%m-%d')\n date.append(out_date)\n print(date)\n time_care = OrderedDict(zip(date, care))\n print(time_care)\n except:\n pass\n date.insert(0, 'Country')\n cols = date\n data_csv = data_csv.loc[:, cols]\n data_csv.T\n data_csv.to_csv('20200517-world-active-data.json.csv')\n df = pd.read_csv('20200517-world-active-data.json.csv')\n new_csv = df.T\n new_csv.to_csv('20200517-world-active-data.json.csv')\n",
"step-2": "<mask token>\n\n\ndef getworld_data(url, header):\n headers = header\n res = requests.get(url, headers=headers)\n res.encoding = 'UTF-8'\n pattern = re.compile(\n '(\\'\\\\{\"(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}\\\\}\\')'\n , re.S)\n end = re.findall(pattern, res.text)\n a = str(end[0])\n with open('test.txt', 'w') as f:\n f.write(a)\n data_relative_confirmed_json = []\n pattern_1 = re.compile(\n '(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}'\n , re.S)\n end_1 = re.findall(pattern_1, a)\n return end_1\n\n\ndef count_time(end_1):\n data_relative_confirmed_json = []\n country = []\n for i in range(len(end_1)):\n data = {'Country': ''}\n data['Country'] = end_1[i][0]\n country.append(end_1[i][0])\n care = end_1[i][5].replace('[', '').replace(']', '').split(',')\n try:\n time = end_1[i][6].replace('/', ',').replace('/', ',').replace('\"',\n '').split(',')\n print(time)\n time[2] = '2020'\n date = []\n in_date = time[2] + '-' + time[0] + '-' + time[1]\n dt = datetime.datetime.strptime(in_date, '%Y-%m-%d')\n for k in range(len(end_1[i][5].replace('[', '').replace(']', ''\n ).split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime(\n '%Y-%m-%d')\n dt = datetime.datetime.strptime(out_date, '%Y-%m-%d')\n date.append(out_date)\n print(date)\n time_care = OrderedDict(zip(date, care))\n print(time_care)\n date_json = OrderedDict(data, **time_care)\n data_relative_confirmed_json.append(date_json)\n except:\n pass\n return data_relative_confirmed_json\n\n\ndef write_json_to_csv(data_relative_confirmed_json, end_1):\n write_list_to_json(data_relative_confirmed_json,\n '20200517-world-active-data.json', 'E:/python_code/world_cov19')\n data_csv = pd.DataFrame(json.loads(open(\n '20200517-world-active-data.json', 'r+').read()))\n print(end_1[36][0])\n care = end_1[36][5].replace('[', '').replace(']', '').split(',')\n try:\n time = end_1[36][6].replace('/', ',').replace('/', ',').replace('\"', ''\n ).split(',')\n print(time)\n time[2] = '2020'\n date = []\n in_date = time[2] + '-' + time[0] + '-' + time[1]\n dt = datetime.datetime.strptime(in_date, '%Y-%m-%d')\n for k in range(len(end_1[36][5].replace('[', '').replace(']', '').\n split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')\n dt = datetime.datetime.strptime(out_date, '%Y-%m-%d')\n date.append(out_date)\n print(date)\n time_care = OrderedDict(zip(date, care))\n print(time_care)\n except:\n pass\n date.insert(0, 'Country')\n cols = date\n data_csv = data_csv.loc[:, cols]\n data_csv.T\n data_csv.to_csv('20200517-world-active-data.json.csv')\n df = pd.read_csv('20200517-world-active-data.json.csv')\n new_csv = df.T\n new_csv.to_csv('20200517-world-active-data.json.csv')\n",
"step-3": "<mask token>\n\n\ndef write_list_to_json(list, json_file_name, json_file_save_path):\n os.chdir(json_file_save_path)\n with open(json_file_name, 'w') as f:\n json.dump(list, f)\n\n\ndef getworld_data(url, header):\n headers = header\n res = requests.get(url, headers=headers)\n res.encoding = 'UTF-8'\n pattern = re.compile(\n '(\\'\\\\{\"(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}\\\\}\\')'\n , re.S)\n end = re.findall(pattern, res.text)\n a = str(end[0])\n with open('test.txt', 'w') as f:\n f.write(a)\n data_relative_confirmed_json = []\n pattern_1 = re.compile(\n '(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}'\n , re.S)\n end_1 = re.findall(pattern_1, a)\n return end_1\n\n\ndef count_time(end_1):\n data_relative_confirmed_json = []\n country = []\n for i in range(len(end_1)):\n data = {'Country': ''}\n data['Country'] = end_1[i][0]\n country.append(end_1[i][0])\n care = end_1[i][5].replace('[', '').replace(']', '').split(',')\n try:\n time = end_1[i][6].replace('/', ',').replace('/', ',').replace('\"',\n '').split(',')\n print(time)\n time[2] = '2020'\n date = []\n in_date = time[2] + '-' + time[0] + '-' + time[1]\n dt = datetime.datetime.strptime(in_date, '%Y-%m-%d')\n for k in range(len(end_1[i][5].replace('[', '').replace(']', ''\n ).split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime(\n '%Y-%m-%d')\n dt = datetime.datetime.strptime(out_date, '%Y-%m-%d')\n date.append(out_date)\n print(date)\n time_care = OrderedDict(zip(date, care))\n print(time_care)\n date_json = OrderedDict(data, **time_care)\n data_relative_confirmed_json.append(date_json)\n except:\n pass\n return data_relative_confirmed_json\n\n\ndef write_json_to_csv(data_relative_confirmed_json, end_1):\n write_list_to_json(data_relative_confirmed_json,\n '20200517-world-active-data.json', 'E:/python_code/world_cov19')\n data_csv = pd.DataFrame(json.loads(open(\n '20200517-world-active-data.json', 'r+').read()))\n print(end_1[36][0])\n care = end_1[36][5].replace('[', '').replace(']', '').split(',')\n try:\n time = end_1[36][6].replace('/', ',').replace('/', ',').replace('\"', ''\n ).split(',')\n print(time)\n time[2] = '2020'\n date = []\n in_date = time[2] + '-' + time[0] + '-' + time[1]\n dt = datetime.datetime.strptime(in_date, '%Y-%m-%d')\n for k in range(len(end_1[36][5].replace('[', '').replace(']', '').\n split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')\n dt = datetime.datetime.strptime(out_date, '%Y-%m-%d')\n date.append(out_date)\n print(date)\n time_care = OrderedDict(zip(date, care))\n print(time_care)\n except:\n pass\n date.insert(0, 'Country')\n cols = date\n data_csv = data_csv.loc[:, cols]\n data_csv.T\n data_csv.to_csv('20200517-world-active-data.json.csv')\n df = pd.read_csv('20200517-world-active-data.json.csv')\n new_csv = df.T\n new_csv.to_csv('20200517-world-active-data.json.csv')\n",
"step-4": "import re\nimport requests\nimport numpy as np\nimport json\nimport os\nfrom collections import OrderedDict\nimport pandas as pd\nimport json\nimport datetime\nimport time\n\n\ndef write_list_to_json(list, json_file_name, json_file_save_path):\n os.chdir(json_file_save_path)\n with open(json_file_name, 'w') as f:\n json.dump(list, f)\n\n\ndef getworld_data(url, header):\n headers = header\n res = requests.get(url, headers=headers)\n res.encoding = 'UTF-8'\n pattern = re.compile(\n '(\\'\\\\{\"(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}\\\\}\\')'\n , re.S)\n end = re.findall(pattern, res.text)\n a = str(end[0])\n with open('test.txt', 'w') as f:\n f.write(a)\n data_relative_confirmed_json = []\n pattern_1 = re.compile(\n '(\\\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}'\n , re.S)\n end_1 = re.findall(pattern_1, a)\n return end_1\n\n\ndef count_time(end_1):\n data_relative_confirmed_json = []\n country = []\n for i in range(len(end_1)):\n data = {'Country': ''}\n data['Country'] = end_1[i][0]\n country.append(end_1[i][0])\n care = end_1[i][5].replace('[', '').replace(']', '').split(',')\n try:\n time = end_1[i][6].replace('/', ',').replace('/', ',').replace('\"',\n '').split(',')\n print(time)\n time[2] = '2020'\n date = []\n in_date = time[2] + '-' + time[0] + '-' + time[1]\n dt = datetime.datetime.strptime(in_date, '%Y-%m-%d')\n for k in range(len(end_1[i][5].replace('[', '').replace(']', ''\n ).split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime(\n '%Y-%m-%d')\n dt = datetime.datetime.strptime(out_date, '%Y-%m-%d')\n date.append(out_date)\n print(date)\n time_care = OrderedDict(zip(date, care))\n print(time_care)\n date_json = OrderedDict(data, **time_care)\n data_relative_confirmed_json.append(date_json)\n except:\n pass\n return data_relative_confirmed_json\n\n\ndef write_json_to_csv(data_relative_confirmed_json, end_1):\n write_list_to_json(data_relative_confirmed_json,\n '20200517-world-active-data.json', 'E:/python_code/world_cov19')\n data_csv = pd.DataFrame(json.loads(open(\n '20200517-world-active-data.json', 'r+').read()))\n print(end_1[36][0])\n care = end_1[36][5].replace('[', '').replace(']', '').split(',')\n try:\n time = end_1[36][6].replace('/', ',').replace('/', ',').replace('\"', ''\n ).split(',')\n print(time)\n time[2] = '2020'\n date = []\n in_date = time[2] + '-' + time[0] + '-' + time[1]\n dt = datetime.datetime.strptime(in_date, '%Y-%m-%d')\n for k in range(len(end_1[36][5].replace('[', '').replace(']', '').\n split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')\n dt = datetime.datetime.strptime(out_date, '%Y-%m-%d')\n date.append(out_date)\n print(date)\n time_care = OrderedDict(zip(date, care))\n print(time_care)\n except:\n pass\n date.insert(0, 'Country')\n cols = date\n data_csv = data_csv.loc[:, cols]\n data_csv.T\n data_csv.to_csv('20200517-world-active-data.json.csv')\n df = pd.read_csv('20200517-world-active-data.json.csv')\n new_csv = df.T\n new_csv.to_csv('20200517-world-active-data.json.csv')\n",
"step-5": "import re\nimport requests\nimport numpy as np\nimport json\nimport os\nfrom collections import OrderedDict\nimport pandas as pd\nimport json\nimport datetime\nimport time\n#将数组写入json文件方便pandas的读取\ndef write_list_to_json(list, json_file_name, json_file_save_path):\n os.chdir(json_file_save_path)\n with open(json_file_name, 'w') as f:\n json.dump(list, f)\n\n#获取数据算法\ndef getworld_data(url,header):\n headers = header\n res = requests.get(url,headers = headers)\n res.encoding = \"UTF-8\"\n pattern = re.compile('(\\'\\{\"(\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}\\}\\')',re.S)\n end = re.findall(pattern,res.text)\n a=str(end[0])\n with open('test.txt','w') as f:\n f.write(a)\n data_relative_confirmed_json=[]\n pattern_1 = re.compile('(\\w+)\":{\"active\":(.*?),\"confirmed\":(.*?),\"deaths\":(.*?),\"recovered\":(.*?),\"relative_active\":(.*?),\"relative_active_start_date\":(.*?),\"relative_confirmed\":(.*?),\"relative_confirmed_start_date\":(.*?),\"relative_deaths\":(.*?),\"relative_deaths_start_date\":(.*?),\"relative_recovered\":(.*?),\"relative_recovered_start_date\":(.*?)}',re.S)\n end_1=re.findall(pattern_1,a)\n return end_1\n\n#时间推算算法及数据写入\ndef count_time(end_1):\n data_relative_confirmed_json=[]\n country=[]\n for i in range(len(end_1)):\n data={\n 'Country':'',\n }\n data['Country']=end_1[i][0]\n #确诊人数\n country.append(end_1[i][0])\n care=end_1[i][5].replace('[','').replace(']','').split(',')\n try:\n time=end_1[i][6].replace('/',',').replace('/',',').replace('\"','').split(',')\n print(time)\n time[2]='2020'\n date=[]\n in_date = time[2]+'-'+time[0]+'-'+time[1]\n dt = datetime.datetime.strptime(in_date, \"%Y-%m-%d\")\n for k in range(len(end_1[i][5].replace('[','').replace(']','').split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime(\"%Y-%m-%d\")\n dt=datetime.datetime.strptime(out_date, \"%Y-%m-%d\")\n date.append(out_date)\n print(date)\n time_care=OrderedDict(zip(date,care))\n print(time_care)\n date_json=OrderedDict(data,**time_care)\n data_relative_confirmed_json.append(date_json)\n \n except:\n pass\n return data_relative_confirmed_json\n\ndef write_json_to_csv(data_relative_confirmed_json,end_1):\n write_list_to_json(data_relative_confirmed_json,'20200517-world-active-data.json','E:/python_code/world_cov19')\n data_csv=pd.DataFrame(json.loads(open('20200517-world-active-data.json','r+').read()))\n print(end_1[36][0])\n care=end_1[36][5].replace('[','').replace(']','').split(',')\n try:\n time=end_1[36][6].replace('/',',').replace('/',',').replace('\"','').split(',')\n print(time)\n time[2]='2020'\n date=[]\n in_date = time[2]+'-'+time[0]+'-'+time[1]\n dt = datetime.datetime.strptime(in_date, \"%Y-%m-%d\")\n for k in range(len(end_1[36][5].replace('[','').replace(']','').split(','))):\n out_date = (dt + datetime.timedelta(days=1)).strftime(\"%Y-%m-%d\")\n dt=datetime.datetime.strptime(out_date, \"%Y-%m-%d\")\n date.append(out_date)\n print(date)\n time_care=OrderedDict(zip(date,care))\n print(time_care)\n except:\n pass\n date.insert(0,'Country')\n cols=date\n data_csv=data_csv.loc[:,cols]\n data_csv.T\n data_csv.to_csv('20200517-world-active-data.json.csv')\n df=pd.read_csv('20200517-world-active-data.json.csv')\n new_csv=df.T\n new_csv.to_csv('20200517-world-active-data.json.csv')\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from ocr_helpers import FilePathResolver, ProblemsWriter
from ocr_google_client import CfaProblemsBuilder
from ocr_google_client_2016 import ParserTwoThousandSixteenAnswers, ParserTwoThousandSixteenQuestions
def resolve_build_and_write(year, day_part, file_part, nb_blocks_footer=0, nb_words_footer=0, headers=None, skip_nb_page=0, parser=None, indentation_threshold=15):
resolver = FilePathResolver(year, day_part, file_part)
jpeg_filepaths = resolver.resolve_sorted_paths()
jpeg_filepaths = jpeg_filepaths[skip_nb_page:]
builder = CfaProblemsBuilder(parser=parser, headers=headers, nb_blocks_footer=nb_blocks_footer, nb_words_footer=nb_words_footer, indentation_threshold=indentation_threshold)
problems = builder.build_problems(jpeg_filepaths)
writer = ProblemsWriter()
writer.write_problems(resolver.get_xml_result_file(), problems)
# 2014 afternoon
# headers = ["7476229133318632 March Mock Exam - PM March Mock Exam - PM 399388"]
# resolve_build_and_write('2014', 'afternoon', 'answer', nb_blocks_footer=1, headers=headers, indentation_threshold=25)
# 2014 morning
# base_header = '3172168919041893 March Mock Exam - AM 399388'
# headers = ["|" + base_header, base_header]
# resolve_build_and_write('2014', 'morning', 'answer', nb_blocks_footer=1, headers=headers)
# 2015 afternoon
# headers = ['2015 Level I Mock Exam PM Questions and Answers']
# resolve_build_and_write('2015', 'afternoon', 'answer', nb_blocks_footer=1, headers=headers)
# 2015 morning
# headers = ['2015 Level I Mock Exam AM Questions and Answers']
# resolve_build_and_write('2015', 'morning', 'answer', nb_blocks_footer=1, headers=headers)
# 2016 afternoon answer
# headers = ['CFA level1-Mock-114']
# parser = ParserTwoThousandSixteenAnswers(17)
# resolve_build_and_write('2016', 'afternoon_answer', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
# 2016 afternoon questions
# headers = ['CFA level1-Mock-114', 'CFA levell-Mock-114']
# parser = ParserTwoThousandSixteenQuestions(17)
# resolve_build_and_write('2016', 'afternoon_question', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
#
# 2016 morning answer
# headers = ['CFA level1-Mock-113']
# parser = ParserTwoThousandSixteenAnswers(17)
# resolve_build_and_write('2016', 'morning_answer', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
# 2016 afternoon questions
# headers = ['CFA level1-Mock-113', 'CFA levell-Mock-113']
# parser = ParserTwoThousandSixteenQuestions(17)
# resolve_build_and_write('2016', 'morning_question', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
# 2017 afternoon
#resolve_build_and_write('2017', 'morning', 'answer', skip_nb_page=1, nb_blocks_footer=2)
# 2017 afternoon
resolve_build_and_write('2017', 'afternoon', 'answer', skip_nb_page=1, nb_blocks_footer=2)
|
normal
|
{
"blob_id": "ab3d443c60ca8ee82f594ae04e9b485a53d53f36",
"index": 5665,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef resolve_build_and_write(year, day_part, file_part, nb_blocks_footer=0,\n nb_words_footer=0, headers=None, skip_nb_page=0, parser=None,\n indentation_threshold=15):\n resolver = FilePathResolver(year, day_part, file_part)\n jpeg_filepaths = resolver.resolve_sorted_paths()\n jpeg_filepaths = jpeg_filepaths[skip_nb_page:]\n builder = CfaProblemsBuilder(parser=parser, headers=headers,\n nb_blocks_footer=nb_blocks_footer, nb_words_footer=nb_words_footer,\n indentation_threshold=indentation_threshold)\n problems = builder.build_problems(jpeg_filepaths)\n writer = ProblemsWriter()\n writer.write_problems(resolver.get_xml_result_file(), problems)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef resolve_build_and_write(year, day_part, file_part, nb_blocks_footer=0,\n nb_words_footer=0, headers=None, skip_nb_page=0, parser=None,\n indentation_threshold=15):\n resolver = FilePathResolver(year, day_part, file_part)\n jpeg_filepaths = resolver.resolve_sorted_paths()\n jpeg_filepaths = jpeg_filepaths[skip_nb_page:]\n builder = CfaProblemsBuilder(parser=parser, headers=headers,\n nb_blocks_footer=nb_blocks_footer, nb_words_footer=nb_words_footer,\n indentation_threshold=indentation_threshold)\n problems = builder.build_problems(jpeg_filepaths)\n writer = ProblemsWriter()\n writer.write_problems(resolver.get_xml_result_file(), problems)\n\n\nresolve_build_and_write('2017', 'afternoon', 'answer', skip_nb_page=1,\n nb_blocks_footer=2)\n",
"step-4": "from ocr_helpers import FilePathResolver, ProblemsWriter\nfrom ocr_google_client import CfaProblemsBuilder\nfrom ocr_google_client_2016 import ParserTwoThousandSixteenAnswers, ParserTwoThousandSixteenQuestions\n\n\ndef resolve_build_and_write(year, day_part, file_part, nb_blocks_footer=0,\n nb_words_footer=0, headers=None, skip_nb_page=0, parser=None,\n indentation_threshold=15):\n resolver = FilePathResolver(year, day_part, file_part)\n jpeg_filepaths = resolver.resolve_sorted_paths()\n jpeg_filepaths = jpeg_filepaths[skip_nb_page:]\n builder = CfaProblemsBuilder(parser=parser, headers=headers,\n nb_blocks_footer=nb_blocks_footer, nb_words_footer=nb_words_footer,\n indentation_threshold=indentation_threshold)\n problems = builder.build_problems(jpeg_filepaths)\n writer = ProblemsWriter()\n writer.write_problems(resolver.get_xml_result_file(), problems)\n\n\nresolve_build_and_write('2017', 'afternoon', 'answer', skip_nb_page=1,\n nb_blocks_footer=2)\n",
"step-5": "from ocr_helpers import FilePathResolver, ProblemsWriter\nfrom ocr_google_client import CfaProblemsBuilder\nfrom ocr_google_client_2016 import ParserTwoThousandSixteenAnswers, ParserTwoThousandSixteenQuestions\n\n\ndef resolve_build_and_write(year, day_part, file_part, nb_blocks_footer=0, nb_words_footer=0, headers=None, skip_nb_page=0, parser=None, indentation_threshold=15):\n resolver = FilePathResolver(year, day_part, file_part)\n jpeg_filepaths = resolver.resolve_sorted_paths()\n jpeg_filepaths = jpeg_filepaths[skip_nb_page:]\n\n builder = CfaProblemsBuilder(parser=parser, headers=headers, nb_blocks_footer=nb_blocks_footer, nb_words_footer=nb_words_footer, indentation_threshold=indentation_threshold)\n problems = builder.build_problems(jpeg_filepaths)\n\n writer = ProblemsWriter()\n writer.write_problems(resolver.get_xml_result_file(), problems)\n\n\n# 2014 afternoon\n# headers = [\"7476229133318632 March Mock Exam - PM March Mock Exam - PM 399388\"]\n# resolve_build_and_write('2014', 'afternoon', 'answer', nb_blocks_footer=1, headers=headers, indentation_threshold=25)\n\n# 2014 morning\n# base_header = '3172168919041893 March Mock Exam - AM 399388'\n# headers = [\"|\" + base_header, base_header]\n# resolve_build_and_write('2014', 'morning', 'answer', nb_blocks_footer=1, headers=headers)\n\n# 2015 afternoon\n# headers = ['2015 Level I Mock Exam PM Questions and Answers']\n# resolve_build_and_write('2015', 'afternoon', 'answer', nb_blocks_footer=1, headers=headers)\n\n# 2015 morning\n# headers = ['2015 Level I Mock Exam AM Questions and Answers']\n# resolve_build_and_write('2015', 'morning', 'answer', nb_blocks_footer=1, headers=headers)\n\n# 2016 afternoon answer\n# headers = ['CFA level1-Mock-114']\n# parser = ParserTwoThousandSixteenAnswers(17)\n# resolve_build_and_write('2016', 'afternoon_answer', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)\n\n# 2016 afternoon questions\n# headers = ['CFA level1-Mock-114', 'CFA levell-Mock-114']\n# parser = ParserTwoThousandSixteenQuestions(17)\n# resolve_build_and_write('2016', 'afternoon_question', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)\n#\n# 2016 morning answer\n# headers = ['CFA level1-Mock-113']\n# parser = ParserTwoThousandSixteenAnswers(17)\n# resolve_build_and_write('2016', 'morning_answer', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)\n\n# 2016 afternoon questions\n# headers = ['CFA level1-Mock-113', 'CFA levell-Mock-113']\n# parser = ParserTwoThousandSixteenQuestions(17)\n# resolve_build_and_write('2016', 'morning_question', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)\n\n# 2017 afternoon\n#resolve_build_and_write('2017', 'morning', 'answer', skip_nb_page=1, nb_blocks_footer=2)\n\n# 2017 afternoon\nresolve_build_and_write('2017', 'afternoon', 'answer', skip_nb_page=1, nb_blocks_footer=2)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.db import models
from django.utils.text import slugify
import misaka
from django.urls import reverse
from django.contrib.auth import get_user_model
from django import template
register=template.Library()
User=get_user_model() #call things out of users current session
# Create your models here.
class Group(models.Model):
name = models.CharField(max_length=128,unique=True)
slug = models.SlugField(allow_unicode=True,unique=True) #to avoid overlappping of the group names
description= models.TextField(blank=True,default='')
description_html=models.TextField(editable=False,default='',blank=True)
member= models.ManyToManyField(User,through='GroupMember')
def __str__(self):
return self.name
def save(self,*args,**kwargs):
self.slug=slugify(self.name) #whatever the name is we can put spaces in it
self.description_html=misaka.html(self.description)
super().save(*args,**kwargs)
def get_absolute_url(self):
return reverse("groups:single", kwargs={"slug": self.slug})
class GroupMember(models.Model):
group = models.ForeignKey(Group, related_name='memberships',on_delete=models.CASCADE)
user = models.ForeignKey(User, related_name='user_groups',on_delete=models.CASCADE)
def __str__(self):
return self.user.username
class Meta:
unique_together=('group','user')
|
normal
|
{
"blob_id": "51563f52e700a286451663a6e837d56e104c2c72",
"index": 2849,
"step-1": "<mask token>\n\n\nclass Group(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.name\n <mask token>\n <mask token>\n\n\nclass GroupMember(models.Model):\n group = models.ForeignKey(Group, related_name='memberships', on_delete=\n models.CASCADE)\n user = models.ForeignKey(User, related_name='user_groups', on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.user.username\n\n\n class Meta:\n unique_together = 'group', 'user'\n",
"step-2": "<mask token>\n\n\nclass Group(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.name\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.name)\n self.description_html = misaka.html(self.description)\n super().save(*args, **kwargs)\n\n def get_absolute_url(self):\n return reverse('groups:single', kwargs={'slug': self.slug})\n\n\nclass GroupMember(models.Model):\n group = models.ForeignKey(Group, related_name='memberships', on_delete=\n models.CASCADE)\n user = models.ForeignKey(User, related_name='user_groups', on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.user.username\n\n\n class Meta:\n unique_together = 'group', 'user'\n",
"step-3": "<mask token>\n\n\nclass Group(models.Model):\n name = models.CharField(max_length=128, unique=True)\n slug = models.SlugField(allow_unicode=True, unique=True)\n description = models.TextField(blank=True, default='')\n description_html = models.TextField(editable=False, default='', blank=True)\n member = models.ManyToManyField(User, through='GroupMember')\n\n def __str__(self):\n return self.name\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.name)\n self.description_html = misaka.html(self.description)\n super().save(*args, **kwargs)\n\n def get_absolute_url(self):\n return reverse('groups:single', kwargs={'slug': self.slug})\n\n\nclass GroupMember(models.Model):\n group = models.ForeignKey(Group, related_name='memberships', on_delete=\n models.CASCADE)\n user = models.ForeignKey(User, related_name='user_groups', on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.user.username\n\n\n class Meta:\n unique_together = 'group', 'user'\n",
"step-4": "<mask token>\nregister = template.Library()\nUser = get_user_model()\n\n\nclass Group(models.Model):\n name = models.CharField(max_length=128, unique=True)\n slug = models.SlugField(allow_unicode=True, unique=True)\n description = models.TextField(blank=True, default='')\n description_html = models.TextField(editable=False, default='', blank=True)\n member = models.ManyToManyField(User, through='GroupMember')\n\n def __str__(self):\n return self.name\n\n def save(self, *args, **kwargs):\n self.slug = slugify(self.name)\n self.description_html = misaka.html(self.description)\n super().save(*args, **kwargs)\n\n def get_absolute_url(self):\n return reverse('groups:single', kwargs={'slug': self.slug})\n\n\nclass GroupMember(models.Model):\n group = models.ForeignKey(Group, related_name='memberships', on_delete=\n models.CASCADE)\n user = models.ForeignKey(User, related_name='user_groups', on_delete=\n models.CASCADE)\n\n def __str__(self):\n return self.user.username\n\n\n class Meta:\n unique_together = 'group', 'user'\n",
"step-5": "from django.db import models\nfrom django.utils.text import slugify\nimport misaka\nfrom django.urls import reverse \nfrom django.contrib.auth import get_user_model\nfrom django import template\nregister=template.Library()\n\n\nUser=get_user_model() #call things out of users current session\n\n# Create your models here.\n\n\nclass Group(models.Model):\n name = models.CharField(max_length=128,unique=True)\n slug = models.SlugField(allow_unicode=True,unique=True) #to avoid overlappping of the group names\n description= models.TextField(blank=True,default='')\n description_html=models.TextField(editable=False,default='',blank=True)\n member= models.ManyToManyField(User,through='GroupMember')\n \n def __str__(self):\n return self.name\n \n def save(self,*args,**kwargs):\n self.slug=slugify(self.name) #whatever the name is we can put spaces in it\n self.description_html=misaka.html(self.description)\n super().save(*args,**kwargs)\n \n def get_absolute_url(self):\n return reverse(\"groups:single\", kwargs={\"slug\": self.slug})\n \n\nclass GroupMember(models.Model):\n group = models.ForeignKey(Group, related_name='memberships',on_delete=models.CASCADE)\n user = models.ForeignKey(User, related_name='user_groups',on_delete=models.CASCADE)\n \n def __str__(self):\n return self.user.username\n \n class Meta:\n unique_together=('group','user')",
"step-ids": [
5,
7,
8,
9,
11
]
}
|
[
5,
7,
8,
9,
11
] |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>)
# Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from osv import osv, fields
class SaleOrder(osv.osv):
'''
Sale Order
'''
_inherit = 'sale.order'
_columns = {
'coupon_code':fields.char('Promo Coupon Code', size=20),
}
def apply_promotions(self, cursor, user, ids, context=None):
"""
Applies the promotions to the given records
@param cursor: Database Cursor
@param user: ID of User
@param ids: ID of current record.
@param context: Context(no direct use).
"""
promotions_obj = self.pool.get('promos.rules')
for order_id in ids:
promotions_obj.apply_promotions(cursor, user,
order_id, context=None)
return True
SaleOrder()
class SaleOrderLine(osv.osv):
'''
Sale Order Line
'''
_inherit = "sale.order.line"
_columns = {
'promotion_line':fields.boolean(
"Promotion Line",
help="Indicates if the line was created by promotions"
)
}
SaleOrderLine()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
normal
|
{
"blob_id": "d9538c030c0225c4255100da70d6bf23f550a64f",
"index": 734,
"step-1": "<mask token>\n\n\nclass SaleOrderLine(osv.osv):\n <mask token>\n _inherit = 'sale.order.line'\n _columns = {'promotion_line': fields.boolean('Promotion Line', help=\n 'Indicates if the line was created by promotions')}\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SaleOrder(osv.osv):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass SaleOrderLine(osv.osv):\n \"\"\"\n Sale Order Line\n \"\"\"\n _inherit = 'sale.order.line'\n _columns = {'promotion_line': fields.boolean('Promotion Line', help=\n 'Indicates if the line was created by promotions')}\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SaleOrder(osv.osv):\n <mask token>\n _inherit = 'sale.order'\n _columns = {'coupon_code': fields.char('Promo Coupon Code', size=20)}\n\n def apply_promotions(self, cursor, user, ids, context=None):\n \"\"\"\n Applies the promotions to the given records\n @param cursor: Database Cursor\n @param user: ID of User\n @param ids: ID of current record.\n @param context: Context(no direct use).\n \"\"\"\n promotions_obj = self.pool.get('promos.rules')\n for order_id in ids:\n promotions_obj.apply_promotions(cursor, user, order_id, context\n =None)\n return True\n\n\n<mask token>\n\n\nclass SaleOrderLine(osv.osv):\n \"\"\"\n Sale Order Line\n \"\"\"\n _inherit = 'sale.order.line'\n _columns = {'promotion_line': fields.boolean('Promotion Line', help=\n 'Indicates if the line was created by promotions')}\n\n\n<mask token>\n",
"step-4": "from osv import osv, fields\n\n\nclass SaleOrder(osv.osv):\n \"\"\"\n Sale Order\n \"\"\"\n _inherit = 'sale.order'\n _columns = {'coupon_code': fields.char('Promo Coupon Code', size=20)}\n\n def apply_promotions(self, cursor, user, ids, context=None):\n \"\"\"\n Applies the promotions to the given records\n @param cursor: Database Cursor\n @param user: ID of User\n @param ids: ID of current record.\n @param context: Context(no direct use).\n \"\"\"\n promotions_obj = self.pool.get('promos.rules')\n for order_id in ids:\n promotions_obj.apply_promotions(cursor, user, order_id, context\n =None)\n return True\n\n\nSaleOrder()\n\n\nclass SaleOrderLine(osv.osv):\n \"\"\"\n Sale Order Line\n \"\"\"\n _inherit = 'sale.order.line'\n _columns = {'promotion_line': fields.boolean('Promotion Line', help=\n 'Indicates if the line was created by promotions')}\n\n\nSaleOrderLine()\n",
"step-5": "# -*- coding: utf-8 -*-\n##############################################################################\n#\n# OpenERP, Open Source Management Solution\n# Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>)\n# Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>)\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>\n#\n##############################################################################\nfrom osv import osv, fields\n\nclass SaleOrder(osv.osv):\n '''\n Sale Order\n '''\n _inherit = 'sale.order'\n \n _columns = {\n 'coupon_code':fields.char('Promo Coupon Code', size=20),\n }\n \n def apply_promotions(self, cursor, user, ids, context=None):\n \"\"\"\n Applies the promotions to the given records\n @param cursor: Database Cursor\n @param user: ID of User\n @param ids: ID of current record.\n @param context: Context(no direct use).\n \"\"\"\n promotions_obj = self.pool.get('promos.rules')\n for order_id in ids:\n promotions_obj.apply_promotions(cursor, user, \n order_id, context=None)\n \n return True\n \nSaleOrder()\n\n\nclass SaleOrderLine(osv.osv):\n '''\n Sale Order Line\n '''\n _inherit = \"sale.order.line\"\n \n _columns = {\n 'promotion_line':fields.boolean(\n \"Promotion Line\",\n help=\"Indicates if the line was created by promotions\"\n )\n }\nSaleOrderLine()\n# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:",
"step-ids": [
2,
4,
6,
9,
10
]
}
|
[
2,
4,
6,
9,
10
] |
from . import mongo
col = mongo.cli['Cupidbot']['timer']
async def add_time(chat, time):
return col.insert_one({'chat': chat, 'time': time})
async def get_time(chat):
return col.find_one({'chat': chat})
async def update_time(chat, time):
return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':
time}})
|
normal
|
{
"blob_id": "e4ce10f5db56e4e2e1988da3cee542a4a09785a8",
"index": 5381,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nasync def add_time(chat, time):\n return col.insert_one({'chat': chat, 'time': time})\n\n\nasync def get_time(chat):\n return col.find_one({'chat': chat})\n\n\nasync def update_time(chat, time):\n return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':\n time}})\n",
"step-3": "<mask token>\ncol = mongo.cli['Cupidbot']['timer']\n\n\nasync def add_time(chat, time):\n return col.insert_one({'chat': chat, 'time': time})\n\n\nasync def get_time(chat):\n return col.find_one({'chat': chat})\n\n\nasync def update_time(chat, time):\n return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':\n time}})\n",
"step-4": "from . import mongo\ncol = mongo.cli['Cupidbot']['timer']\n\n\nasync def add_time(chat, time):\n return col.insert_one({'chat': chat, 'time': time})\n\n\nasync def get_time(chat):\n return col.find_one({'chat': chat})\n\n\nasync def update_time(chat, time):\n return col.update_one({'chat': chat}, {'$set': {'chat': chat, 'time':\n time}})\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import sqlite3
conn = sqlite3.connect("blog.db")
c = conn.cursor()
q = "CREATE TABLE users(Username text, Password text, UserID integer)"
c.execute(q)
q = "CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)"
c.execute(q)
q = "CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)"
c.execute(q)
conn.commit()
|
normal
|
{
"blob_id": "8afaa69d3a20c5e39e6321869f25dbd9020a5b3a",
"index": 2460,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nc.execute(q)\n<mask token>\nc.execute(q)\n<mask token>\nc.execute(q)\nconn.commit()\n",
"step-3": "<mask token>\nconn = sqlite3.connect('blog.db')\nc = conn.cursor()\nq = 'CREATE TABLE users(Username text, Password text, UserID integer)'\nc.execute(q)\nq = (\n 'CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)'\n )\nc.execute(q)\nq = (\n 'CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)'\n )\nc.execute(q)\nconn.commit()\n",
"step-4": "import sqlite3\nconn = sqlite3.connect('blog.db')\nc = conn.cursor()\nq = 'CREATE TABLE users(Username text, Password text, UserID integer)'\nc.execute(q)\nq = (\n 'CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)'\n )\nc.execute(q)\nq = (\n 'CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)'\n )\nc.execute(q)\nconn.commit()\n",
"step-5": "import sqlite3\n\nconn = sqlite3.connect(\"blog.db\")\n\nc = conn.cursor()\n\nq = \"CREATE TABLE users(Username text, Password text, UserID integer)\"\nc.execute(q)\n\nq = \"CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)\"\nc.execute(q)\n\nq = \"CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)\"\nc.execute(q)\n\nconn.commit() \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.db import models
class Author(models.Model):
author = models.CharField(
"Author",
max_length=30,
blank=False,
null=False
)
biography = models.TextField(
"About author",
max_length=500,
blank=True,
null=True
)
def __str__(self):
return self.author
class Series(models.Model):
title = models.CharField(
"Title of series",
max_length=100,
blank=False,
null=False
)
description = models.TextField(
"About this series",
max_length=500,
blank=True,
null=True
)
def __str__(self):
return self.title
class Genre(models.Model):
genre = models.CharField(
"Genre",
max_length=50,
blank=False,
null=False
)
description = models.TextField(
"About this genre",
max_length=500,
blank=True,
null=True
)
def __str__(self):
return self.genre
class PublishingHouse(models.Model):
house = models.CharField(
"Publishing House",
max_length=40,
blank=False,
null=False
)
history = models.TextField(
"Other books of this house",
max_length=500,
blank=True,
null=True
)
def __str__(self):
return self.house
|
normal
|
{
"blob_id": "b34ad8d7fc8df0ab86c5930ab2b5aa1f86d13ae3",
"index": 7580,
"step-1": "<mask token>\n\n\nclass Series(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Genre(models.Model):\n genre = models.CharField('Genre', max_length=50, blank=False, null=False)\n description = models.TextField('About this genre', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.genre\n\n\nclass PublishingHouse(models.Model):\n house = models.CharField('Publishing House', max_length=40, blank=False,\n null=False)\n history = models.TextField('Other books of this house', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.house\n",
"step-2": "<mask token>\n\n\nclass Series(models.Model):\n title = models.CharField('Title of series', max_length=100, blank=False,\n null=False)\n description = models.TextField('About this series', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.title\n\n\nclass Genre(models.Model):\n genre = models.CharField('Genre', max_length=50, blank=False, null=False)\n description = models.TextField('About this genre', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.genre\n\n\nclass PublishingHouse(models.Model):\n house = models.CharField('Publishing House', max_length=40, blank=False,\n null=False)\n history = models.TextField('Other books of this house', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.house\n",
"step-3": "<mask token>\n\n\nclass Author(models.Model):\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.author\n\n\nclass Series(models.Model):\n title = models.CharField('Title of series', max_length=100, blank=False,\n null=False)\n description = models.TextField('About this series', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.title\n\n\nclass Genre(models.Model):\n genre = models.CharField('Genre', max_length=50, blank=False, null=False)\n description = models.TextField('About this genre', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.genre\n\n\nclass PublishingHouse(models.Model):\n house = models.CharField('Publishing House', max_length=40, blank=False,\n null=False)\n history = models.TextField('Other books of this house', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.house\n",
"step-4": "<mask token>\n\n\nclass Author(models.Model):\n author = models.CharField('Author', max_length=30, blank=False, null=False)\n biography = models.TextField('About author', max_length=500, blank=True,\n null=True)\n\n def __str__(self):\n return self.author\n\n\nclass Series(models.Model):\n title = models.CharField('Title of series', max_length=100, blank=False,\n null=False)\n description = models.TextField('About this series', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.title\n\n\nclass Genre(models.Model):\n genre = models.CharField('Genre', max_length=50, blank=False, null=False)\n description = models.TextField('About this genre', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.genre\n\n\nclass PublishingHouse(models.Model):\n house = models.CharField('Publishing House', max_length=40, blank=False,\n null=False)\n history = models.TextField('Other books of this house', max_length=500,\n blank=True, null=True)\n\n def __str__(self):\n return self.house\n",
"step-5": "from django.db import models\n\n\nclass Author(models.Model):\n author = models.CharField(\n \"Author\",\n max_length=30,\n blank=False,\n null=False\n )\n\n biography = models.TextField(\n \"About author\",\n max_length=500,\n blank=True,\n null=True\n )\n\n def __str__(self):\n return self.author\n\n\nclass Series(models.Model):\n title = models.CharField(\n \"Title of series\",\n max_length=100,\n blank=False,\n null=False\n )\n\n description = models.TextField(\n \"About this series\",\n max_length=500,\n blank=True,\n null=True\n )\n\n def __str__(self):\n return self.title\n\n\nclass Genre(models.Model):\n genre = models.CharField(\n \"Genre\",\n max_length=50,\n blank=False,\n null=False\n )\n\n description = models.TextField(\n \"About this genre\",\n max_length=500,\n blank=True,\n null=True\n )\n\n def __str__(self):\n return self.genre\n\n\nclass PublishingHouse(models.Model):\n house = models.CharField(\n \"Publishing House\",\n max_length=40,\n blank=False,\n null=False\n\n )\n\n history = models.TextField(\n \"Other books of this house\",\n max_length=500,\n blank=True,\n null=True\n )\n\n def __str__(self):\n return self.house\n",
"step-ids": [
7,
9,
11,
12,
14
]
}
|
[
7,
9,
11,
12,
14
] |
from django.conf import settings
from django.db import models
def get_image_filename(instance, filename):
a = f'post_images/{instance.post.title}.svg'
return a
def get_main_image_filename(instance, filename):
a = f'post_images/{instance.title}_main.svg'
return a
# Create your models here.
class Posts(models.Model):
PYEONG_CHOICE_FIELD = (
('1-7', '1-7평'),
('8-15', '8-15평'),
('16-25', '16-25평'),
('26-', '그 이상'),
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
title = models.TextField(
'제목', max_length=50
)
content = models.TextField(
'작성 글', max_length=500
)
main_image = models.ImageField(
upload_to=get_main_image_filename,
blank=True,
null=True,
verbose_name='메인 이미지',
)
pyeong = models.ManyToManyField(
'Pyeong',
blank=True,
)
created_at = models.DateTimeField(
'생성 날짜', auto_now_add=True,
)
updated_at = models.DateTimeField(
verbose_name='수정 날짜', auto_now=True, null=True, blank=True
)
like_users = models.ManyToManyField(
'members.Users',
through='PostLike',
related_name='like_posts',
related_query_name='like_post',
blank=True,
)
colors = models.ManyToManyField(
'posts.Colors',
blank=True,
)
housingtype = models.ManyToManyField(
'HousingTypes',
blank=True,
)
style = models.ManyToManyField(
'Styles',
blank=True,
)
postPyeong = models.CharField(max_length=10, choices=PYEONG_CHOICE_FIELD)
@staticmethod
def initial_setting():
Pyeong.make_pyeng()
Colors.make_color()
HousingTypes.make_housing_type()
Styles.make_style()
class Meta:
verbose_name = '게시글'
verbose_name_plural = '게시글 목록'
def __str__(self):
return '%s : %s' % (self.pk, self.title)
class Comments(models.Model):
post = models.ForeignKey(
Posts,
on_delete=models.CASCADE,
verbose_name='포스트',
related_name='comment_set',
related_query_name='comments',
)
author = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
content = models.TextField(
'댓글 내용', max_length=500
)
# 글쓴이
created_at = models.DateTimeField(
'작성 날', auto_now_add=True,
)
updated_at = models.DateTimeField(
'수정 날짜', auto_now=True,
)
class Meta:
verbose_name = '댓글'
verbose_name_plural = '댓글 목록'
def save(self, *args, **kwargs):
# 여기서 이미지 처리를 하게 될 듯
super().save(*args, **kwargs)
class PostLike(models.Model):
post = models.ForeignKey(
Posts,
on_delete=models.CASCADE,
)
user = models.ForeignKey(
'members.Users',
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(
auto_now_add=True,
)
def __str__(self):
return 'Post[{post_pk}] Like (User: {username})'.format(
post_pk=self.post.pk,
username=self.user.username,
)
class Meta:
verbose_name = '게시글 좋아요'
verbose_name_plural = f'{verbose_name} 목록'
# 특정 유저가 특정 포스트 좋아요를 누른 정보는 유니크 해야 함.
unique_together = (
('post', 'user'),
)
class Pyeong(models.Model):
type = models.CharField(
'평 수',
max_length=20,
)
@staticmethod
def make_pyeng():
index_list = ['1-7', '8-15', '16-25', '그 이상']
for i in range((len(index_list))):
Pyeong.objects.create(type=index_list[i])
def __str__(self):
return '%s : %s' % (self.pk, self.type)
class HousingTypes(models.Model):
type = models.CharField(
'주거 환경',
max_length=20,
)
@staticmethod
def make_housing_type():
index_list = ['빌라', '아파트', '오피스텔', '원룸', '투쓰리룸', '복층']
for i in range(len(index_list)):
HousingTypes.objects.create(type=index_list[i])
def __str__(self):
return '%s : %s' % (self.pk, self.type)
class Styles(models.Model):
type = models.CharField(
'디자인 스타일',
max_length=10,
)
@staticmethod
def make_style():
index_list = ['모던', '미니멀리즘', '한국', '스칸다나비아', '인더스트리얼', '프로방스', '로맨틱', '클래식', '엔틱']
for i in range(len(index_list)):
Styles.objects.create(type=index_list[i])
def __str__(self):
return '%s : %s' % (self.pk, self.type)
class Colors(models.Model):
type = models.CharField(
'색상',
max_length=10
)
@staticmethod
def make_color():
index_list = ['빨강', '주황', '노랑', '초록', '파랑', '남색', '보라색', '검정', '흰색', '회색']
for i in range(len(index_list)):
Colors.objects.create(type=index_list[i])
def __str__(self):
return '%s : %s' % (self.pk, self.type)
class PostImages(models.Model):
post = models.ForeignKey(
Posts,
on_delete=models.CASCADE,
)
image = models.ImageField(
upload_to=get_image_filename,
verbose_name='다중 이미지',
)
image_comment = models.TextField(
'사진 설명', max_length=200, blank=True, null=True,
)
# 이미지 추가 스택오버플로우 정보
# https://stackoverflow.com/questions/34006994/how-to-upload-multiple-images-to-a-blog-post-in-django
|
normal
|
{
"blob_id": "1bbadf02c4b9ca22a0099bcc09fa4c62c9901c39",
"index": 1069,
"step-1": "<mask token>\n\n\nclass Styles(models.Model):\n <mask token>\n\n @staticmethod\n def make_style():\n index_list = ['모던', '미니멀리즘', '한국', '스칸다나비아', '인더스트리얼', '프로방스',\n '로맨틱', '클래식', '엔틱']\n for i in range(len(index_list)):\n Styles.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Colors(models.Model):\n type = models.CharField('색상', max_length=10)\n\n @staticmethod\n def make_color():\n index_list = ['빨강', '주황', '노랑', '초록', '파랑', '남색', '보라색', '검정', '흰색',\n '회색']\n for i in range(len(index_list)):\n Colors.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass PostImages(models.Model):\n post = models.ForeignKey(Posts, on_delete=models.CASCADE)\n image = models.ImageField(upload_to=get_image_filename, verbose_name=\n '다중 이미지')\n image_comment = models.TextField('사진 설명', max_length=200, blank=True,\n null=True)\n",
"step-2": "<mask token>\n\n\nclass Pyeong(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass HousingTypes(models.Model):\n type = models.CharField('주거 환경', max_length=20)\n\n @staticmethod\n def make_housing_type():\n index_list = ['빌라', '아파트', '오피스텔', '원룸', '투쓰리룸', '복층']\n for i in range(len(index_list)):\n HousingTypes.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Styles(models.Model):\n type = models.CharField('디자인 스타일', max_length=10)\n\n @staticmethod\n def make_style():\n index_list = ['모던', '미니멀리즘', '한국', '스칸다나비아', '인더스트리얼', '프로방스',\n '로맨틱', '클래식', '엔틱']\n for i in range(len(index_list)):\n Styles.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Colors(models.Model):\n type = models.CharField('색상', max_length=10)\n\n @staticmethod\n def make_color():\n index_list = ['빨강', '주황', '노랑', '초록', '파랑', '남색', '보라색', '검정', '흰색',\n '회색']\n for i in range(len(index_list)):\n Colors.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass PostImages(models.Model):\n post = models.ForeignKey(Posts, on_delete=models.CASCADE)\n image = models.ImageField(upload_to=get_image_filename, verbose_name=\n '다중 이미지')\n image_comment = models.TextField('사진 설명', max_length=200, blank=True,\n null=True)\n",
"step-3": "<mask token>\n\n\nclass Comments(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = '댓글'\n verbose_name_plural = '댓글 목록'\n\n def save(self, *args, **kwargs):\n super().save(*args, **kwargs)\n\n\nclass PostLike(models.Model):\n post = models.ForeignKey(Posts, on_delete=models.CASCADE)\n user = models.ForeignKey('members.Users', on_delete=models.CASCADE)\n created_at = models.DateTimeField(auto_now_add=True)\n\n def __str__(self):\n return 'Post[{post_pk}] Like (User: {username})'.format(post_pk=\n self.post.pk, username=self.user.username)\n\n\n class Meta:\n verbose_name = '게시글 좋아요'\n verbose_name_plural = f'{verbose_name} 목록'\n unique_together = ('post', 'user'),\n\n\nclass Pyeong(models.Model):\n type = models.CharField('평 수', max_length=20)\n\n @staticmethod\n def make_pyeng():\n index_list = ['1-7', '8-15', '16-25', '그 이상']\n for i in range(len(index_list)):\n Pyeong.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass HousingTypes(models.Model):\n type = models.CharField('주거 환경', max_length=20)\n\n @staticmethod\n def make_housing_type():\n index_list = ['빌라', '아파트', '오피스텔', '원룸', '투쓰리룸', '복층']\n for i in range(len(index_list)):\n HousingTypes.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Styles(models.Model):\n type = models.CharField('디자인 스타일', max_length=10)\n\n @staticmethod\n def make_style():\n index_list = ['모던', '미니멀리즘', '한국', '스칸다나비아', '인더스트리얼', '프로방스',\n '로맨틱', '클래식', '엔틱']\n for i in range(len(index_list)):\n Styles.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Colors(models.Model):\n type = models.CharField('색상', max_length=10)\n\n @staticmethod\n def make_color():\n index_list = ['빨강', '주황', '노랑', '초록', '파랑', '남색', '보라색', '검정', '흰색',\n '회색']\n for i in range(len(index_list)):\n Colors.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass PostImages(models.Model):\n post = models.ForeignKey(Posts, on_delete=models.CASCADE)\n image = models.ImageField(upload_to=get_image_filename, verbose_name=\n '다중 이미지')\n image_comment = models.TextField('사진 설명', max_length=200, blank=True,\n null=True)\n",
"step-4": "<mask token>\n\n\nclass Posts(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def initial_setting():\n Pyeong.make_pyeng()\n Colors.make_color()\n HousingTypes.make_housing_type()\n Styles.make_style()\n\n\n class Meta:\n verbose_name = '게시글'\n verbose_name_plural = '게시글 목록'\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.title)\n\n\nclass Comments(models.Model):\n post = models.ForeignKey(Posts, on_delete=models.CASCADE, verbose_name=\n '포스트', related_name='comment_set', related_query_name='comments')\n author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n content = models.TextField('댓글 내용', max_length=500)\n created_at = models.DateTimeField('작성 날', auto_now_add=True)\n updated_at = models.DateTimeField('수정 날짜', auto_now=True)\n\n\n class Meta:\n verbose_name = '댓글'\n verbose_name_plural = '댓글 목록'\n\n def save(self, *args, **kwargs):\n super().save(*args, **kwargs)\n\n\nclass PostLike(models.Model):\n post = models.ForeignKey(Posts, on_delete=models.CASCADE)\n user = models.ForeignKey('members.Users', on_delete=models.CASCADE)\n created_at = models.DateTimeField(auto_now_add=True)\n\n def __str__(self):\n return 'Post[{post_pk}] Like (User: {username})'.format(post_pk=\n self.post.pk, username=self.user.username)\n\n\n class Meta:\n verbose_name = '게시글 좋아요'\n verbose_name_plural = f'{verbose_name} 목록'\n unique_together = ('post', 'user'),\n\n\nclass Pyeong(models.Model):\n type = models.CharField('평 수', max_length=20)\n\n @staticmethod\n def make_pyeng():\n index_list = ['1-7', '8-15', '16-25', '그 이상']\n for i in range(len(index_list)):\n Pyeong.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass HousingTypes(models.Model):\n type = models.CharField('주거 환경', max_length=20)\n\n @staticmethod\n def make_housing_type():\n index_list = ['빌라', '아파트', '오피스텔', '원룸', '투쓰리룸', '복층']\n for i in range(len(index_list)):\n HousingTypes.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Styles(models.Model):\n type = models.CharField('디자인 스타일', max_length=10)\n\n @staticmethod\n def make_style():\n index_list = ['모던', '미니멀리즘', '한국', '스칸다나비아', '인더스트리얼', '프로방스',\n '로맨틱', '클래식', '엔틱']\n for i in range(len(index_list)):\n Styles.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Colors(models.Model):\n type = models.CharField('색상', max_length=10)\n\n @staticmethod\n def make_color():\n index_list = ['빨강', '주황', '노랑', '초록', '파랑', '남색', '보라색', '검정', '흰색',\n '회색']\n for i in range(len(index_list)):\n Colors.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass PostImages(models.Model):\n post = models.ForeignKey(Posts, on_delete=models.CASCADE)\n image = models.ImageField(upload_to=get_image_filename, verbose_name=\n '다중 이미지')\n image_comment = models.TextField('사진 설명', max_length=200, blank=True,\n null=True)\n",
"step-5": "from django.conf import settings\nfrom django.db import models\n\n\ndef get_image_filename(instance, filename):\n a = f'post_images/{instance.post.title}.svg'\n return a\n\n\ndef get_main_image_filename(instance, filename):\n a = f'post_images/{instance.title}_main.svg'\n return a\n\n\n# Create your models here.\nclass Posts(models.Model):\n PYEONG_CHOICE_FIELD = (\n ('1-7', '1-7평'),\n ('8-15', '8-15평'),\n ('16-25', '16-25평'),\n ('26-', '그 이상'),\n )\n\n user = models.ForeignKey(\n settings.AUTH_USER_MODEL,\n on_delete=models.CASCADE,\n )\n title = models.TextField(\n '제목', max_length=50\n )\n content = models.TextField(\n '작성 글', max_length=500\n )\n main_image = models.ImageField(\n upload_to=get_main_image_filename,\n blank=True,\n null=True,\n verbose_name='메인 이미지',\n )\n pyeong = models.ManyToManyField(\n 'Pyeong',\n blank=True,\n )\n created_at = models.DateTimeField(\n '생성 날짜', auto_now_add=True,\n )\n updated_at = models.DateTimeField(\n verbose_name='수정 날짜', auto_now=True, null=True, blank=True\n )\n\n like_users = models.ManyToManyField(\n 'members.Users',\n through='PostLike',\n related_name='like_posts',\n related_query_name='like_post',\n blank=True,\n )\n\n colors = models.ManyToManyField(\n 'posts.Colors',\n blank=True,\n )\n housingtype = models.ManyToManyField(\n 'HousingTypes',\n blank=True,\n )\n style = models.ManyToManyField(\n 'Styles',\n blank=True,\n )\n postPyeong = models.CharField(max_length=10, choices=PYEONG_CHOICE_FIELD)\n\n @staticmethod\n def initial_setting():\n Pyeong.make_pyeng()\n Colors.make_color()\n HousingTypes.make_housing_type()\n Styles.make_style()\n\n class Meta:\n verbose_name = '게시글'\n verbose_name_plural = '게시글 목록'\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.title)\n\n\nclass Comments(models.Model):\n post = models.ForeignKey(\n Posts,\n on_delete=models.CASCADE,\n verbose_name='포스트',\n related_name='comment_set',\n related_query_name='comments',\n )\n author = models.ForeignKey(\n settings.AUTH_USER_MODEL,\n on_delete=models.CASCADE,\n )\n content = models.TextField(\n '댓글 내용', max_length=500\n )\n # 글쓴이\n created_at = models.DateTimeField(\n '작성 날', auto_now_add=True,\n )\n updated_at = models.DateTimeField(\n '수정 날짜', auto_now=True,\n )\n\n class Meta:\n verbose_name = '댓글'\n verbose_name_plural = '댓글 목록'\n\n def save(self, *args, **kwargs):\n # 여기서 이미지 처리를 하게 될 듯\n super().save(*args, **kwargs)\n\n\nclass PostLike(models.Model):\n post = models.ForeignKey(\n Posts,\n on_delete=models.CASCADE,\n )\n user = models.ForeignKey(\n 'members.Users',\n on_delete=models.CASCADE,\n )\n created_at = models.DateTimeField(\n auto_now_add=True,\n )\n\n def __str__(self):\n return 'Post[{post_pk}] Like (User: {username})'.format(\n post_pk=self.post.pk,\n username=self.user.username,\n )\n\n class Meta:\n verbose_name = '게시글 좋아요'\n verbose_name_plural = f'{verbose_name} 목록'\n # 특정 유저가 특정 포스트 좋아요를 누른 정보는 유니크 해야 함.\n unique_together = (\n ('post', 'user'),\n )\n\n\nclass Pyeong(models.Model):\n type = models.CharField(\n '평 수',\n max_length=20,\n )\n\n @staticmethod\n def make_pyeng():\n index_list = ['1-7', '8-15', '16-25', '그 이상']\n for i in range((len(index_list))):\n Pyeong.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass HousingTypes(models.Model):\n type = models.CharField(\n '주거 환경',\n max_length=20,\n )\n\n @staticmethod\n def make_housing_type():\n index_list = ['빌라', '아파트', '오피스텔', '원룸', '투쓰리룸', '복층']\n for i in range(len(index_list)):\n HousingTypes.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Styles(models.Model):\n type = models.CharField(\n '디자인 스타일',\n max_length=10,\n )\n\n @staticmethod\n def make_style():\n index_list = ['모던', '미니멀리즘', '한국', '스칸다나비아', '인더스트리얼', '프로방스', '로맨틱', '클래식', '엔틱']\n for i in range(len(index_list)):\n Styles.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass Colors(models.Model):\n type = models.CharField(\n '색상',\n max_length=10\n )\n\n @staticmethod\n def make_color():\n index_list = ['빨강', '주황', '노랑', '초록', '파랑', '남색', '보라색', '검정', '흰색', '회색']\n for i in range(len(index_list)):\n Colors.objects.create(type=index_list[i])\n\n def __str__(self):\n return '%s : %s' % (self.pk, self.type)\n\n\nclass PostImages(models.Model):\n post = models.ForeignKey(\n Posts,\n on_delete=models.CASCADE,\n )\n image = models.ImageField(\n upload_to=get_image_filename,\n verbose_name='다중 이미지',\n )\n image_comment = models.TextField(\n '사진 설명', max_length=200, blank=True, null=True,\n )\n # 이미지 추가 스택오버플로우 정보\n # https://stackoverflow.com/questions/34006994/how-to-upload-multiple-images-to-a-blog-post-in-django\n",
"step-ids": [
9,
15,
23,
27,
32
]
}
|
[
9,
15,
23,
27,
32
] |
# 4. Пользователь вводит целое положительное число.
# Найдите самую большую цифру в числе. Для решения используйте цикл while и арифметические операции.
income_number = int(input('Введите, пожалуйста, целое положительное число '))
max_number = 0
# в другую сторону решение, не так как Вы на вебинаре советовали, но тоже работает, и не сказать чтобы сильно длинее...
while income_number != 0: # продолжаю цикл вплоть до уничтожения числа
num_exp = 10 ** (len(str(income_number)) - 1) # устанавливаю размерность числа
deleted_number = int(income_number / num_exp) # узнаю крайнюю левую цифру
if max_number < deleted_number: # перезапись максимальной, если есть такая необходимость
max_number = deleted_number
income_number = income_number - deleted_number * num_exp # "откусываю" крайнюю левую цифру
print(f'Самая большая цифра в числе {max_number}')
|
normal
|
{
"blob_id": "18e0ece7c38169d2de91a07dddd4f40b7427848f",
"index": 3759,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile income_number != 0:\n num_exp = 10 ** (len(str(income_number)) - 1)\n deleted_number = int(income_number / num_exp)\n if max_number < deleted_number:\n max_number = deleted_number\n income_number = income_number - deleted_number * num_exp\nprint(f'Самая большая цифра в числе {max_number}')\n",
"step-3": "income_number = int(input('Введите, пожалуйста, целое положительное число '))\nmax_number = 0\nwhile income_number != 0:\n num_exp = 10 ** (len(str(income_number)) - 1)\n deleted_number = int(income_number / num_exp)\n if max_number < deleted_number:\n max_number = deleted_number\n income_number = income_number - deleted_number * num_exp\nprint(f'Самая большая цифра в числе {max_number}')\n",
"step-4": "# 4. Пользователь вводит целое положительное число.\n# Найдите самую большую цифру в числе. Для решения используйте цикл while и арифметические операции.\n\nincome_number = int(input('Введите, пожалуйста, целое положительное число '))\n\nmax_number = 0\n# в другую сторону решение, не так как Вы на вебинаре советовали, но тоже работает, и не сказать чтобы сильно длинее...\nwhile income_number != 0: # продолжаю цикл вплоть до уничтожения числа\n num_exp = 10 ** (len(str(income_number)) - 1) # устанавливаю размерность числа\n deleted_number = int(income_number / num_exp) # узнаю крайнюю левую цифру\n if max_number < deleted_number: # перезапись максимальной, если есть такая необходимость\n max_number = deleted_number\n income_number = income_number - deleted_number * num_exp # \"откусываю\" крайнюю левую цифру\n\nprint(f'Самая большая цифра в числе {max_number}')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from timeit import default_timer as timer
import numpy as np
bets1 = [ # lowest config possible
0.00000001,
0.00000004,
0.0000001,
0.0000005,
0.00000150,
0.00000500,
0.00001000
]
bets2 = [ # 2 is 10x 1
0.0000001,
0.0000004,
0.000001,
0.000005,
0.0000150,
0.0000500,
0.0001000
]
# options
max_seeds = 100
max_rolls = 100000 # 100k is around 8-24 hours of fastplay
seed_wins = 0
num_rolls = []
start_position = np.random.randint(1, 100000000)
for seed in range(start_position, start_position+max_seeds):
# current game round stats
cur_wins = 0
max_wins = 0
cur_losses = 0
max_losses = 0
win_streak = []
loss_streak = []
# seed data and timer
np.random.seed(seed)
start_time = timer()
start_bal = cur_bal = 0.001 # 10$ reasonable start
# actual Play
for index in range(max_rolls):
# make bets
bets = [ # this appears to be working, a function of cur_bal
0.00000001,
float('{:.8f}'.format(cur_bal * 0.001)),
float('{:.8f}'.format(cur_bal * 0.002)),
float('{:.8f}'.format(cur_bal * 0.005)),
float('{:.8f}'.format(cur_bal * 0.01)),
float('{:.8f}'.format(cur_bal * 0.05)),
float('{:.8f}'.format(cur_bal * 0.12)),
float('{:.8f}'.format(cur_bal * 0.3)),
]
# if Winning... Stop
if (cur_bal / start_bal - 1)*100 > 10000 or index==max_rolls-1:
print('Seed: {}, Num_Rolls {}, Balance: {:.8f} | Profit: {:.2f}%'.format(
seed, index, cur_bal, (cur_bal/start_bal-1)*100))
print('Max_L: {}'.format(max_losses))
print('Max_W: {}'.format(max_wins))
#print('Won The Day!')
seed_wins += 1
num_rolls.append(index)
break
# get bet
if cur_losses < len(bets):
bet = bets[cur_losses]
else:
bet = bets[0]
if bet < bets[0]: # dont bet less than 8 decimal places
bet = bets[0]
# if Losing ... Stop
if cur_bal <= 0:
break
if bet >= cur_bal:
#print('Seed: {}, Num_Rolls {}, Balance: {:.8f} | Profit: {:.2f}%'.format(
# seed, index, cur_bal, (cur_bal/start_bal-1)*100))
#print('Game Over man!')
break
## >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> MAKE PLAY
roll = np.random.randint(1, 10000)
win = True if roll < 3900 else False ## 3900/10000 appears to be a good handicap
## <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# fix balance
if win:
loss_streak.append(cur_losses)
cur_bal += bet * 2
cur_losses = 0
cur_wins += 1
else:
win_streak.append(cur_wins)
cur_bal -= bet
cur_losses += 1
cur_wins = 0
# fix maxes
if cur_losses > max_losses:
max_losses = cur_losses
if cur_wins > max_wins:
max_wins = cur_wins
# /actual play
# seed stuff
seed_time = timer() - start_time
print('Seed_time: {:.2f}'.format(seed_time), end='\r') # you will see this a lot if losing
# Finished All Seeds
print('Won {}/{} Seeds'.format(seed_wins,max_seeds))
if seed_wins: # if won anything.
print('Avg # of rolls to 1000%: {}'.format(int(np.array(num_rolls).mean())))
|
normal
|
{
"blob_id": "4c66ab6110e81bb88fc6916a1695e0f23e6e0e9d",
"index": 6754,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor seed in range(start_position, start_position + max_seeds):\n cur_wins = 0\n max_wins = 0\n cur_losses = 0\n max_losses = 0\n win_streak = []\n loss_streak = []\n np.random.seed(seed)\n start_time = timer()\n start_bal = cur_bal = 0.001\n for index in range(max_rolls):\n bets = [1e-08, float('{:.8f}'.format(cur_bal * 0.001)), float(\n '{:.8f}'.format(cur_bal * 0.002)), float('{:.8f}'.format(\n cur_bal * 0.005)), float('{:.8f}'.format(cur_bal * 0.01)),\n float('{:.8f}'.format(cur_bal * 0.05)), float('{:.8f}'.format(\n cur_bal * 0.12)), float('{:.8f}'.format(cur_bal * 0.3))]\n if (cur_bal / start_bal - 1) * 100 > 10000 or index == max_rolls - 1:\n print('Seed: {}, Num_Rolls {}, Balance: {:.8f} | Profit: {:.2f}%'\n .format(seed, index, cur_bal, (cur_bal / start_bal - 1) * 100))\n print('Max_L: {}'.format(max_losses))\n print('Max_W: {}'.format(max_wins))\n seed_wins += 1\n num_rolls.append(index)\n break\n if cur_losses < len(bets):\n bet = bets[cur_losses]\n else:\n bet = bets[0]\n if bet < bets[0]:\n bet = bets[0]\n if cur_bal <= 0:\n break\n if bet >= cur_bal:\n break\n roll = np.random.randint(1, 10000)\n win = True if roll < 3900 else False\n if win:\n loss_streak.append(cur_losses)\n cur_bal += bet * 2\n cur_losses = 0\n cur_wins += 1\n else:\n win_streak.append(cur_wins)\n cur_bal -= bet\n cur_losses += 1\n cur_wins = 0\n if cur_losses > max_losses:\n max_losses = cur_losses\n if cur_wins > max_wins:\n max_wins = cur_wins\n seed_time = timer() - start_time\n print('Seed_time: {:.2f}'.format(seed_time), end='\\r')\nprint('Won {}/{} Seeds'.format(seed_wins, max_seeds))\nif seed_wins:\n print('Avg # of rolls to 1000%: {}'.format(int(np.array(num_rolls).mean()))\n )\n",
"step-3": "<mask token>\nbets1 = [1e-08, 4e-08, 1e-07, 5e-07, 1.5e-06, 5e-06, 1e-05]\nbets2 = [1e-07, 4e-07, 1e-06, 5e-06, 1.5e-05, 5e-05, 0.0001]\nmax_seeds = 100\nmax_rolls = 100000\nseed_wins = 0\nnum_rolls = []\nstart_position = np.random.randint(1, 100000000)\nfor seed in range(start_position, start_position + max_seeds):\n cur_wins = 0\n max_wins = 0\n cur_losses = 0\n max_losses = 0\n win_streak = []\n loss_streak = []\n np.random.seed(seed)\n start_time = timer()\n start_bal = cur_bal = 0.001\n for index in range(max_rolls):\n bets = [1e-08, float('{:.8f}'.format(cur_bal * 0.001)), float(\n '{:.8f}'.format(cur_bal * 0.002)), float('{:.8f}'.format(\n cur_bal * 0.005)), float('{:.8f}'.format(cur_bal * 0.01)),\n float('{:.8f}'.format(cur_bal * 0.05)), float('{:.8f}'.format(\n cur_bal * 0.12)), float('{:.8f}'.format(cur_bal * 0.3))]\n if (cur_bal / start_bal - 1) * 100 > 10000 or index == max_rolls - 1:\n print('Seed: {}, Num_Rolls {}, Balance: {:.8f} | Profit: {:.2f}%'\n .format(seed, index, cur_bal, (cur_bal / start_bal - 1) * 100))\n print('Max_L: {}'.format(max_losses))\n print('Max_W: {}'.format(max_wins))\n seed_wins += 1\n num_rolls.append(index)\n break\n if cur_losses < len(bets):\n bet = bets[cur_losses]\n else:\n bet = bets[0]\n if bet < bets[0]:\n bet = bets[0]\n if cur_bal <= 0:\n break\n if bet >= cur_bal:\n break\n roll = np.random.randint(1, 10000)\n win = True if roll < 3900 else False\n if win:\n loss_streak.append(cur_losses)\n cur_bal += bet * 2\n cur_losses = 0\n cur_wins += 1\n else:\n win_streak.append(cur_wins)\n cur_bal -= bet\n cur_losses += 1\n cur_wins = 0\n if cur_losses > max_losses:\n max_losses = cur_losses\n if cur_wins > max_wins:\n max_wins = cur_wins\n seed_time = timer() - start_time\n print('Seed_time: {:.2f}'.format(seed_time), end='\\r')\nprint('Won {}/{} Seeds'.format(seed_wins, max_seeds))\nif seed_wins:\n print('Avg # of rolls to 1000%: {}'.format(int(np.array(num_rolls).mean()))\n )\n",
"step-4": "from timeit import default_timer as timer\nimport numpy as np\nbets1 = [1e-08, 4e-08, 1e-07, 5e-07, 1.5e-06, 5e-06, 1e-05]\nbets2 = [1e-07, 4e-07, 1e-06, 5e-06, 1.5e-05, 5e-05, 0.0001]\nmax_seeds = 100\nmax_rolls = 100000\nseed_wins = 0\nnum_rolls = []\nstart_position = np.random.randint(1, 100000000)\nfor seed in range(start_position, start_position + max_seeds):\n cur_wins = 0\n max_wins = 0\n cur_losses = 0\n max_losses = 0\n win_streak = []\n loss_streak = []\n np.random.seed(seed)\n start_time = timer()\n start_bal = cur_bal = 0.001\n for index in range(max_rolls):\n bets = [1e-08, float('{:.8f}'.format(cur_bal * 0.001)), float(\n '{:.8f}'.format(cur_bal * 0.002)), float('{:.8f}'.format(\n cur_bal * 0.005)), float('{:.8f}'.format(cur_bal * 0.01)),\n float('{:.8f}'.format(cur_bal * 0.05)), float('{:.8f}'.format(\n cur_bal * 0.12)), float('{:.8f}'.format(cur_bal * 0.3))]\n if (cur_bal / start_bal - 1) * 100 > 10000 or index == max_rolls - 1:\n print('Seed: {}, Num_Rolls {}, Balance: {:.8f} | Profit: {:.2f}%'\n .format(seed, index, cur_bal, (cur_bal / start_bal - 1) * 100))\n print('Max_L: {}'.format(max_losses))\n print('Max_W: {}'.format(max_wins))\n seed_wins += 1\n num_rolls.append(index)\n break\n if cur_losses < len(bets):\n bet = bets[cur_losses]\n else:\n bet = bets[0]\n if bet < bets[0]:\n bet = bets[0]\n if cur_bal <= 0:\n break\n if bet >= cur_bal:\n break\n roll = np.random.randint(1, 10000)\n win = True if roll < 3900 else False\n if win:\n loss_streak.append(cur_losses)\n cur_bal += bet * 2\n cur_losses = 0\n cur_wins += 1\n else:\n win_streak.append(cur_wins)\n cur_bal -= bet\n cur_losses += 1\n cur_wins = 0\n if cur_losses > max_losses:\n max_losses = cur_losses\n if cur_wins > max_wins:\n max_wins = cur_wins\n seed_time = timer() - start_time\n print('Seed_time: {:.2f}'.format(seed_time), end='\\r')\nprint('Won {}/{} Seeds'.format(seed_wins, max_seeds))\nif seed_wins:\n print('Avg # of rolls to 1000%: {}'.format(int(np.array(num_rolls).mean()))\n )\n",
"step-5": "from timeit import default_timer as timer\nimport numpy as np\n\nbets1 = [ # lowest config possible\n 0.00000001,\n 0.00000004,\n 0.0000001,\n 0.0000005,\n 0.00000150,\n 0.00000500,\n 0.00001000\n]\nbets2 = [ # 2 is 10x 1\n 0.0000001,\n 0.0000004,\n 0.000001,\n 0.000005,\n 0.0000150,\n 0.0000500,\n 0.0001000\n]\n\n# options\nmax_seeds = 100\nmax_rolls = 100000 # 100k is around 8-24 hours of fastplay\n\nseed_wins = 0\nnum_rolls = []\nstart_position = np.random.randint(1, 100000000)\n\n\n\nfor seed in range(start_position, start_position+max_seeds):\n # current game round stats\n cur_wins = 0\n max_wins = 0\n cur_losses = 0\n max_losses = 0\n\n win_streak = []\n loss_streak = []\n # seed data and timer\n np.random.seed(seed)\n start_time = timer()\n\n start_bal = cur_bal = 0.001 # 10$ reasonable start\n # actual Play\n for index in range(max_rolls):\n # make bets\n bets = [ # this appears to be working, a function of cur_bal\n 0.00000001,\n float('{:.8f}'.format(cur_bal * 0.001)),\n float('{:.8f}'.format(cur_bal * 0.002)),\n float('{:.8f}'.format(cur_bal * 0.005)),\n float('{:.8f}'.format(cur_bal * 0.01)),\n float('{:.8f}'.format(cur_bal * 0.05)),\n float('{:.8f}'.format(cur_bal * 0.12)),\n float('{:.8f}'.format(cur_bal * 0.3)),\n ]\n\n # if Winning... Stop\n if (cur_bal / start_bal - 1)*100 > 10000 or index==max_rolls-1:\n print('Seed: {}, Num_Rolls {}, Balance: {:.8f} | Profit: {:.2f}%'.format(\n seed, index, cur_bal, (cur_bal/start_bal-1)*100))\n print('Max_L: {}'.format(max_losses))\n print('Max_W: {}'.format(max_wins))\n #print('Won The Day!')\n seed_wins += 1\n num_rolls.append(index)\n break\n\n # get bet\n if cur_losses < len(bets):\n bet = bets[cur_losses]\n else:\n bet = bets[0]\n if bet < bets[0]: # dont bet less than 8 decimal places\n bet = bets[0]\n\n # if Losing ... Stop\n if cur_bal <= 0:\n break\n if bet >= cur_bal:\n #print('Seed: {}, Num_Rolls {}, Balance: {:.8f} | Profit: {:.2f}%'.format(\n # seed, index, cur_bal, (cur_bal/start_bal-1)*100))\n #print('Game Over man!')\n break\n\n ## >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> MAKE PLAY\n roll = np.random.randint(1, 10000)\n win = True if roll < 3900 else False ## 3900/10000 appears to be a good handicap\n ## <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n\n # fix balance\n if win:\n loss_streak.append(cur_losses)\n cur_bal += bet * 2\n cur_losses = 0\n cur_wins += 1\n else:\n win_streak.append(cur_wins)\n cur_bal -= bet\n cur_losses += 1\n cur_wins = 0\n\n # fix maxes\n if cur_losses > max_losses:\n max_losses = cur_losses\n if cur_wins > max_wins:\n max_wins = cur_wins\n # /actual play\n # seed stuff\n seed_time = timer() - start_time\n print('Seed_time: {:.2f}'.format(seed_time), end='\\r') # you will see this a lot if losing\n# Finished All Seeds\nprint('Won {}/{} Seeds'.format(seed_wins,max_seeds))\nif seed_wins: # if won anything.\n print('Avg # of rolls to 1000%: {}'.format(int(np.array(num_rolls).mean())))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from __future__ import annotations
from typing import TYPE_CHECKING
import abc
import tcod.event
if TYPE_CHECKING:
from tcodplus.canvas import Canvas
from tcodplus.event import CanvasDispatcher
class IDrawable(abc.ABC):
@property
@abc.abstractmethod
def force_redraw(self) -> bool:
pass
@property
@force_redraw.setter
def force_redraw(self, value: bool) -> None:
pass
@abc.abstractmethod
def draw(self, dest: Canvas) -> None:
pass
@abc.abstractmethod
def base_drawing(self, console: tcod.console.Console) -> None:
pass
class IFocusable(abc.ABC):
@property
@abc.abstractmethod
def focus_dispatcher(self) -> CanvasDispatcher:
pass
class IMouseFocusable(IFocusable):
@abc.abstractmethod
def mousefocus(self, event: tcod.event.MouseMotion) -> bool:
pass
class IKeyboardFocusable(IFocusable):
@property
@abc.abstractmethod
def kbdfocus(self) -> bool:
pass
@kbdfocus.setter
@abc.abstractmethod
def kbdfocus(self, val: bool) -> None:
pass
@property
@abc.abstractmethod
def kbdfocus_requested(self) -> bool:
pass
@kbdfocus_requested.setter
@abc.abstractmethod
def kbdfocus_requested(self, val: bool) -> None:
pass
class IUpdatable(abc.ABC):
@property
@abc.abstractmethod
def should_update(self) -> bool:
pass
@should_update.setter
@abc.abstractmethod
def should_update(self, value: bool) -> None:
pass
@abc.abstractmethod
def update(self) -> None:
pass
|
normal
|
{
"blob_id": "e37f958191c9481c6664e90c17f43419a0b5b606",
"index": 8131,
"step-1": "<mask token>\n\n\nclass IDrawable(abc.ABC):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass IFocusable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def focus_dispatcher(self) ->CanvasDispatcher:\n pass\n\n\nclass IMouseFocusable(IFocusable):\n\n @abc.abstractmethod\n def mousefocus(self, event: tcod.event.MouseMotion) ->bool:\n pass\n\n\nclass IKeyboardFocusable(IFocusable):\n\n @property\n @abc.abstractmethod\n def kbdfocus(self) ->bool:\n pass\n\n @kbdfocus.setter\n @abc.abstractmethod\n def kbdfocus(self, val: bool) ->None:\n pass\n\n @property\n @abc.abstractmethod\n def kbdfocus_requested(self) ->bool:\n pass\n\n @kbdfocus_requested.setter\n @abc.abstractmethod\n def kbdfocus_requested(self, val: bool) ->None:\n pass\n\n\nclass IUpdatable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def should_update(self) ->bool:\n pass\n\n @should_update.setter\n @abc.abstractmethod\n def should_update(self, value: bool) ->None:\n pass\n\n @abc.abstractmethod\n def update(self) ->None:\n pass\n",
"step-2": "<mask token>\n\n\nclass IDrawable(abc.ABC):\n <mask token>\n <mask token>\n <mask token>\n\n @abc.abstractmethod\n def base_drawing(self, console: tcod.console.Console) ->None:\n pass\n\n\nclass IFocusable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def focus_dispatcher(self) ->CanvasDispatcher:\n pass\n\n\nclass IMouseFocusable(IFocusable):\n\n @abc.abstractmethod\n def mousefocus(self, event: tcod.event.MouseMotion) ->bool:\n pass\n\n\nclass IKeyboardFocusable(IFocusable):\n\n @property\n @abc.abstractmethod\n def kbdfocus(self) ->bool:\n pass\n\n @kbdfocus.setter\n @abc.abstractmethod\n def kbdfocus(self, val: bool) ->None:\n pass\n\n @property\n @abc.abstractmethod\n def kbdfocus_requested(self) ->bool:\n pass\n\n @kbdfocus_requested.setter\n @abc.abstractmethod\n def kbdfocus_requested(self, val: bool) ->None:\n pass\n\n\nclass IUpdatable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def should_update(self) ->bool:\n pass\n\n @should_update.setter\n @abc.abstractmethod\n def should_update(self, value: bool) ->None:\n pass\n\n @abc.abstractmethod\n def update(self) ->None:\n pass\n",
"step-3": "<mask token>\n\n\nclass IDrawable(abc.ABC):\n <mask token>\n\n @property\n @force_redraw.setter\n def force_redraw(self, value: bool) ->None:\n pass\n\n @abc.abstractmethod\n def draw(self, dest: Canvas) ->None:\n pass\n\n @abc.abstractmethod\n def base_drawing(self, console: tcod.console.Console) ->None:\n pass\n\n\nclass IFocusable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def focus_dispatcher(self) ->CanvasDispatcher:\n pass\n\n\nclass IMouseFocusable(IFocusable):\n\n @abc.abstractmethod\n def mousefocus(self, event: tcod.event.MouseMotion) ->bool:\n pass\n\n\nclass IKeyboardFocusable(IFocusable):\n\n @property\n @abc.abstractmethod\n def kbdfocus(self) ->bool:\n pass\n\n @kbdfocus.setter\n @abc.abstractmethod\n def kbdfocus(self, val: bool) ->None:\n pass\n\n @property\n @abc.abstractmethod\n def kbdfocus_requested(self) ->bool:\n pass\n\n @kbdfocus_requested.setter\n @abc.abstractmethod\n def kbdfocus_requested(self, val: bool) ->None:\n pass\n\n\nclass IUpdatable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def should_update(self) ->bool:\n pass\n\n @should_update.setter\n @abc.abstractmethod\n def should_update(self, value: bool) ->None:\n pass\n\n @abc.abstractmethod\n def update(self) ->None:\n pass\n",
"step-4": "<mask token>\n\n\nclass IDrawable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def force_redraw(self) ->bool:\n pass\n\n @property\n @force_redraw.setter\n def force_redraw(self, value: bool) ->None:\n pass\n\n @abc.abstractmethod\n def draw(self, dest: Canvas) ->None:\n pass\n\n @abc.abstractmethod\n def base_drawing(self, console: tcod.console.Console) ->None:\n pass\n\n\nclass IFocusable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def focus_dispatcher(self) ->CanvasDispatcher:\n pass\n\n\nclass IMouseFocusable(IFocusable):\n\n @abc.abstractmethod\n def mousefocus(self, event: tcod.event.MouseMotion) ->bool:\n pass\n\n\nclass IKeyboardFocusable(IFocusable):\n\n @property\n @abc.abstractmethod\n def kbdfocus(self) ->bool:\n pass\n\n @kbdfocus.setter\n @abc.abstractmethod\n def kbdfocus(self, val: bool) ->None:\n pass\n\n @property\n @abc.abstractmethod\n def kbdfocus_requested(self) ->bool:\n pass\n\n @kbdfocus_requested.setter\n @abc.abstractmethod\n def kbdfocus_requested(self, val: bool) ->None:\n pass\n\n\nclass IUpdatable(abc.ABC):\n\n @property\n @abc.abstractmethod\n def should_update(self) ->bool:\n pass\n\n @should_update.setter\n @abc.abstractmethod\n def should_update(self, value: bool) ->None:\n pass\n\n @abc.abstractmethod\n def update(self) ->None:\n pass\n",
"step-5": "from __future__ import annotations\nfrom typing import TYPE_CHECKING\nimport abc\nimport tcod.event\n\nif TYPE_CHECKING:\n from tcodplus.canvas import Canvas\n from tcodplus.event import CanvasDispatcher\n\n\nclass IDrawable(abc.ABC):\n @property\n @abc.abstractmethod\n def force_redraw(self) -> bool:\n pass\n\n @property\n @force_redraw.setter\n def force_redraw(self, value: bool) -> None:\n pass\n\n @abc.abstractmethod\n def draw(self, dest: Canvas) -> None:\n pass\n\n @abc.abstractmethod\n def base_drawing(self, console: tcod.console.Console) -> None:\n pass\n\n\nclass IFocusable(abc.ABC):\n @property\n @abc.abstractmethod\n def focus_dispatcher(self) -> CanvasDispatcher:\n pass\n\n\nclass IMouseFocusable(IFocusable):\n @abc.abstractmethod\n def mousefocus(self, event: tcod.event.MouseMotion) -> bool:\n pass\n\n\nclass IKeyboardFocusable(IFocusable):\n @property\n @abc.abstractmethod\n def kbdfocus(self) -> bool:\n pass\n\n @kbdfocus.setter\n @abc.abstractmethod\n def kbdfocus(self, val: bool) -> None:\n pass\n\n @property\n @abc.abstractmethod\n def kbdfocus_requested(self) -> bool:\n pass\n\n @kbdfocus_requested.setter\n @abc.abstractmethod\n def kbdfocus_requested(self, val: bool) -> None:\n pass\n\n\nclass IUpdatable(abc.ABC):\n @property\n @abc.abstractmethod\n def should_update(self) -> bool:\n pass\n\n @should_update.setter\n @abc.abstractmethod\n def should_update(self, value: bool) -> None:\n pass\n\n @abc.abstractmethod\n def update(self) -> None:\n pass\n",
"step-ids": [
14,
15,
17,
18,
21
]
}
|
[
14,
15,
17,
18,
21
] |
from flask import request, Flask
import lock, shelve
app = Flask(__name__)
@app.route("/unlock")
def web_unlock():
if not (request.args.get("token") and request.args.get("state")):
return "Error"
else:
with shelve.open("Settings.conf") as settings:
if "token" in settings:
token = settings["token"]
else:
return "System not setup !"
if request.args.get("token") != token:
return "Invalid Token"
if request.args.get("state") == "open":
lock.unlock()
elif request.args.get("state") == "close":
lock.lock()
elif request.args.get("state") == "switch":
lock.switch()
else:
return "Invalid State"
return "Done"
@app.route("/state")
def web_state():
return str(lock.state())
if __name__ == "__main__":
app.run(debug=True, port=5000, host="0.0.0.0")
|
normal
|
{
"blob_id": "ee0f90b84df73ae5783ca0b8a52fe6fe9c979f15",
"index": 2576,
"step-1": "<mask token>\n\n\[email protected]('/unlock')\ndef web_unlock():\n if not (request.args.get('token') and request.args.get('state')):\n return 'Error'\n else:\n with shelve.open('Settings.conf') as settings:\n if 'token' in settings:\n token = settings['token']\n else:\n return 'System not setup !'\n if request.args.get('token') != token:\n return 'Invalid Token'\n if request.args.get('state') == 'open':\n lock.unlock()\n elif request.args.get('state') == 'close':\n lock.lock()\n elif request.args.get('state') == 'switch':\n lock.switch()\n else:\n return 'Invalid State'\n return 'Done'\n\n\[email protected]('/state')\ndef web_state():\n return str(lock.state())\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/unlock')\ndef web_unlock():\n if not (request.args.get('token') and request.args.get('state')):\n return 'Error'\n else:\n with shelve.open('Settings.conf') as settings:\n if 'token' in settings:\n token = settings['token']\n else:\n return 'System not setup !'\n if request.args.get('token') != token:\n return 'Invalid Token'\n if request.args.get('state') == 'open':\n lock.unlock()\n elif request.args.get('state') == 'close':\n lock.lock()\n elif request.args.get('state') == 'switch':\n lock.switch()\n else:\n return 'Invalid State'\n return 'Done'\n\n\[email protected]('/state')\ndef web_state():\n return str(lock.state())\n\n\nif __name__ == '__main__':\n app.run(debug=True, port=5000, host='0.0.0.0')\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\[email protected]('/unlock')\ndef web_unlock():\n if not (request.args.get('token') and request.args.get('state')):\n return 'Error'\n else:\n with shelve.open('Settings.conf') as settings:\n if 'token' in settings:\n token = settings['token']\n else:\n return 'System not setup !'\n if request.args.get('token') != token:\n return 'Invalid Token'\n if request.args.get('state') == 'open':\n lock.unlock()\n elif request.args.get('state') == 'close':\n lock.lock()\n elif request.args.get('state') == 'switch':\n lock.switch()\n else:\n return 'Invalid State'\n return 'Done'\n\n\[email protected]('/state')\ndef web_state():\n return str(lock.state())\n\n\nif __name__ == '__main__':\n app.run(debug=True, port=5000, host='0.0.0.0')\n",
"step-4": "from flask import request, Flask\nimport lock, shelve\napp = Flask(__name__)\n\n\[email protected]('/unlock')\ndef web_unlock():\n if not (request.args.get('token') and request.args.get('state')):\n return 'Error'\n else:\n with shelve.open('Settings.conf') as settings:\n if 'token' in settings:\n token = settings['token']\n else:\n return 'System not setup !'\n if request.args.get('token') != token:\n return 'Invalid Token'\n if request.args.get('state') == 'open':\n lock.unlock()\n elif request.args.get('state') == 'close':\n lock.lock()\n elif request.args.get('state') == 'switch':\n lock.switch()\n else:\n return 'Invalid State'\n return 'Done'\n\n\[email protected]('/state')\ndef web_state():\n return str(lock.state())\n\n\nif __name__ == '__main__':\n app.run(debug=True, port=5000, host='0.0.0.0')\n",
"step-5": "from flask import request, Flask\nimport lock, shelve\n\napp = Flask(__name__)\[email protected](\"/unlock\")\ndef web_unlock():\n if not (request.args.get(\"token\") and request.args.get(\"state\")):\n return \"Error\"\n else:\n with shelve.open(\"Settings.conf\") as settings:\n if \"token\" in settings:\n token = settings[\"token\"]\n else:\n return \"System not setup !\"\n if request.args.get(\"token\") != token:\n return \"Invalid Token\"\n if request.args.get(\"state\") == \"open\":\n lock.unlock()\n elif request.args.get(\"state\") == \"close\":\n lock.lock()\n elif request.args.get(\"state\") == \"switch\":\n lock.switch()\n else:\n return \"Invalid State\"\n return \"Done\"\n\[email protected](\"/state\")\ndef web_state():\n return str(lock.state())\n\nif __name__ == \"__main__\":\n app.run(debug=True, port=5000, host=\"0.0.0.0\")\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# 多角色认证装饰器
def auth(role):
from core import admin_view,student_view,teacher_view
def deco(func):
def wrapper(*args,**kwargs):
if role == 'admin':
if admin_view.admin_user == None:
admin_view.login()
else:
res = func(*args,**kwargs)
return res
if role == 'student':
if student_view.student_user == None:
student_view.login()
else:
res = func(*args,**kwargs)
return res
if role == 'teacher':
if teacher_view.teacher_user == None:
teacher_view.login()
else:
res = func(*args,**kwargs)
return res
return wrapper
return deco
|
normal
|
{
"blob_id": "e247ffb5b6e4319ff17d0b8ae9f67e10c282c4ff",
"index": 7348,
"step-1": "<mask token>\n",
"step-2": "def auth(role):\n from core import admin_view, student_view, teacher_view\n\n def deco(func):\n\n def wrapper(*args, **kwargs):\n if role == 'admin':\n if admin_view.admin_user == None:\n admin_view.login()\n else:\n res = func(*args, **kwargs)\n return res\n if role == 'student':\n if student_view.student_user == None:\n student_view.login()\n else:\n res = func(*args, **kwargs)\n return res\n if role == 'teacher':\n if teacher_view.teacher_user == None:\n teacher_view.login()\n else:\n res = func(*args, **kwargs)\n return res\n return wrapper\n return deco\n",
"step-3": "\n# 多角色认证装饰器\n\ndef auth(role):\n\n from core import admin_view,student_view,teacher_view\n def deco(func):\n def wrapper(*args,**kwargs):\n\n if role == 'admin':\n if admin_view.admin_user == None:\n admin_view.login()\n else:\n res = func(*args,**kwargs)\n return res\n\n if role == 'student':\n if student_view.student_user == None:\n student_view.login()\n else:\n res = func(*args,**kwargs)\n return res\n\n\n if role == 'teacher':\n if teacher_view.teacher_user == None:\n teacher_view.login()\n else:\n res = func(*args,**kwargs)\n return res\n\n\n return wrapper\n return deco",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
log = logging.getLogger('ucs')
def firmware_pack_create(handle, org_name, name, rack_bundle_version,
blade_bundle_version, descr="", mode="staged",
org_parent="org-root"):
"""
This method creates Host Firmware pack.
Args:
handle (UcsHandle)
org_name (string): Name of the organization
name (string): Name of the firmware pack.
rack_bundle_version (string): Rack bundle version
blade_bundle_version (string): Blade bundle version
mode (string): "one-sot" or "staged"
descr (string): Basic description.
org_parent (string): Parent of Org
Returns:
None
Example:
firmware_pack_create(handle, org_name="sample_org",
name="sample_fp",
rack_bundle_version="",
blade_bundle_version="")
"""
org_dn = org_parent + "/org-" + org_name
p_mo = handle.query_dn(org_dn)
if not p_mo:
log.info("Sub-Org <%s> not found!" % org_name)
else:
from ucsmsdk.mometa.firmware.FirmwareComputeHostPack import\
FirmwareComputeHostPack
mo = FirmwareComputeHostPack(parent_mo_or_dn=org_dn,
name=name,
descr=descr,
rack_bundle_version=rack_bundle_version,
mode=mode,
blade_bundle_version=blade_bundle_version)
handle.add_mo(mo)
handle.commit()
def firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,
blade_bundle_version=None, descr=None, mode=None,
org_parent="org-root"):
"""
This method creates Host Firmware pack.
Args:
handle (UcsHandle)
org_name (string): Name of the organization
name (string): Name of the firmware pack.
rack_bundle_version (string): Rack bundle version
blade_bundle_version (string): Blade bundle version
mode (string): "one-sot" or "staged"
descr (string): Basic description.
org_parent (string): Parent of Org
Returns:
None
Example:
firmware_pack_modify(handle, org_name="sample_org",
name="sample_fp",
rack_bundle_version="",
blade_bundle_version="")
"""
org_dn = org_parent + "/org-" + org_name
fw_dn= org_dn + "/fw-host-pack-" + name
mo = handle.query_dn(fw_dn)
if mo is not None:
if rack_bundle_version is not None:
mo.rack_bundle_version = rack_bundle_version
if blade_bundle_version is not None:
mo.blade_bundle_version = blade_bundle_version
if mode is not None:
mo.mode=mode
if descr is not None:
mo.descr = descr
handle.set_mo(mo)
handle.commit()
else:
log.info("Firmware host pack <%s> not found." % name)
def firmware_pack_remove(handle, org_name, name, org_parent="org-root"):
"""
This method removes Host Firmware pack.
Args:
handle (UcsHandle)
org_name (string): Name of the organization
name (string): Name of the firmware pack.
org_parent (string): Parent of Org.
Returns:
None
Example:
firmware_pack_remove(handle, org_name="sample_org",
name="sample_fp")
"""
org_dn = org_parent + "/org-" + org_name
p_mo = handle.query_dn(org_dn)
if not p_mo:
log.info("Sub-Org <%s> not found!" %org_name)
else:
fw_dn= org_dn + "/fw-host-pack-" + name
mo = handle.query_dn(fw_dn)
if not mo:
log.info("Firmware host pack <%s> not found.Nothing to remove" % name)
else:
handle.remove_mo(mo)
handle.commit()
|
normal
|
{
"blob_id": "21cfe1ca606d18763fbfb8ff6862c382b3321adc",
"index": 8511,
"step-1": "<mask token>\n\n\ndef firmware_pack_create(handle, org_name, name, rack_bundle_version,\n blade_bundle_version, descr='', mode='staged', org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_create(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info('Sub-Org <%s> not found!' % org_name)\n else:\n from ucsmsdk.mometa.firmware.FirmwareComputeHostPack import FirmwareComputeHostPack\n mo = FirmwareComputeHostPack(parent_mo_or_dn=org_dn, name=name,\n descr=descr, rack_bundle_version=rack_bundle_version, mode=mode,\n blade_bundle_version=blade_bundle_version)\n handle.add_mo(mo)\n handle.commit()\n\n\ndef firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,\n blade_bundle_version=None, descr=None, mode=None, org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_modify(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n fw_dn = org_dn + '/fw-host-pack-' + name\n mo = handle.query_dn(fw_dn)\n if mo is not None:\n if rack_bundle_version is not None:\n mo.rack_bundle_version = rack_bundle_version\n if blade_bundle_version is not None:\n mo.blade_bundle_version = blade_bundle_version\n if mode is not None:\n mo.mode = mode\n if descr is not None:\n mo.descr = descr\n handle.set_mo(mo)\n handle.commit()\n else:\n log.info('Firmware host pack <%s> not found.' % name)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef firmware_pack_create(handle, org_name, name, rack_bundle_version,\n blade_bundle_version, descr='', mode='staged', org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_create(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info('Sub-Org <%s> not found!' % org_name)\n else:\n from ucsmsdk.mometa.firmware.FirmwareComputeHostPack import FirmwareComputeHostPack\n mo = FirmwareComputeHostPack(parent_mo_or_dn=org_dn, name=name,\n descr=descr, rack_bundle_version=rack_bundle_version, mode=mode,\n blade_bundle_version=blade_bundle_version)\n handle.add_mo(mo)\n handle.commit()\n\n\ndef firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,\n blade_bundle_version=None, descr=None, mode=None, org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_modify(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n fw_dn = org_dn + '/fw-host-pack-' + name\n mo = handle.query_dn(fw_dn)\n if mo is not None:\n if rack_bundle_version is not None:\n mo.rack_bundle_version = rack_bundle_version\n if blade_bundle_version is not None:\n mo.blade_bundle_version = blade_bundle_version\n if mode is not None:\n mo.mode = mode\n if descr is not None:\n mo.descr = descr\n handle.set_mo(mo)\n handle.commit()\n else:\n log.info('Firmware host pack <%s> not found.' % name)\n\n\ndef firmware_pack_remove(handle, org_name, name, org_parent='org-root'):\n \"\"\"\n This method removes Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n org_parent (string): Parent of Org.\n\n Returns:\n None\n\n Example:\n firmware_pack_remove(handle, org_name=\"sample_org\",\n name=\"sample_fp\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info('Sub-Org <%s> not found!' % org_name)\n else:\n fw_dn = org_dn + '/fw-host-pack-' + name\n mo = handle.query_dn(fw_dn)\n if not mo:\n log.info('Firmware host pack <%s> not found.Nothing to remove' %\n name)\n else:\n handle.remove_mo(mo)\n handle.commit()\n",
"step-3": "<mask token>\nlog = logging.getLogger('ucs')\n\n\ndef firmware_pack_create(handle, org_name, name, rack_bundle_version,\n blade_bundle_version, descr='', mode='staged', org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_create(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info('Sub-Org <%s> not found!' % org_name)\n else:\n from ucsmsdk.mometa.firmware.FirmwareComputeHostPack import FirmwareComputeHostPack\n mo = FirmwareComputeHostPack(parent_mo_or_dn=org_dn, name=name,\n descr=descr, rack_bundle_version=rack_bundle_version, mode=mode,\n blade_bundle_version=blade_bundle_version)\n handle.add_mo(mo)\n handle.commit()\n\n\ndef firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,\n blade_bundle_version=None, descr=None, mode=None, org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_modify(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n fw_dn = org_dn + '/fw-host-pack-' + name\n mo = handle.query_dn(fw_dn)\n if mo is not None:\n if rack_bundle_version is not None:\n mo.rack_bundle_version = rack_bundle_version\n if blade_bundle_version is not None:\n mo.blade_bundle_version = blade_bundle_version\n if mode is not None:\n mo.mode = mode\n if descr is not None:\n mo.descr = descr\n handle.set_mo(mo)\n handle.commit()\n else:\n log.info('Firmware host pack <%s> not found.' % name)\n\n\ndef firmware_pack_remove(handle, org_name, name, org_parent='org-root'):\n \"\"\"\n This method removes Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n org_parent (string): Parent of Org.\n\n Returns:\n None\n\n Example:\n firmware_pack_remove(handle, org_name=\"sample_org\",\n name=\"sample_fp\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info('Sub-Org <%s> not found!' % org_name)\n else:\n fw_dn = org_dn + '/fw-host-pack-' + name\n mo = handle.query_dn(fw_dn)\n if not mo:\n log.info('Firmware host pack <%s> not found.Nothing to remove' %\n name)\n else:\n handle.remove_mo(mo)\n handle.commit()\n",
"step-4": "import logging\nlog = logging.getLogger('ucs')\n\n\ndef firmware_pack_create(handle, org_name, name, rack_bundle_version,\n blade_bundle_version, descr='', mode='staged', org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_create(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info('Sub-Org <%s> not found!' % org_name)\n else:\n from ucsmsdk.mometa.firmware.FirmwareComputeHostPack import FirmwareComputeHostPack\n mo = FirmwareComputeHostPack(parent_mo_or_dn=org_dn, name=name,\n descr=descr, rack_bundle_version=rack_bundle_version, mode=mode,\n blade_bundle_version=blade_bundle_version)\n handle.add_mo(mo)\n handle.commit()\n\n\ndef firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,\n blade_bundle_version=None, descr=None, mode=None, org_parent='org-root'):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_modify(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n fw_dn = org_dn + '/fw-host-pack-' + name\n mo = handle.query_dn(fw_dn)\n if mo is not None:\n if rack_bundle_version is not None:\n mo.rack_bundle_version = rack_bundle_version\n if blade_bundle_version is not None:\n mo.blade_bundle_version = blade_bundle_version\n if mode is not None:\n mo.mode = mode\n if descr is not None:\n mo.descr = descr\n handle.set_mo(mo)\n handle.commit()\n else:\n log.info('Firmware host pack <%s> not found.' % name)\n\n\ndef firmware_pack_remove(handle, org_name, name, org_parent='org-root'):\n \"\"\"\n This method removes Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n org_parent (string): Parent of Org.\n\n Returns:\n None\n\n Example:\n firmware_pack_remove(handle, org_name=\"sample_org\",\n name=\"sample_fp\")\n \"\"\"\n org_dn = org_parent + '/org-' + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info('Sub-Org <%s> not found!' % org_name)\n else:\n fw_dn = org_dn + '/fw-host-pack-' + name\n mo = handle.query_dn(fw_dn)\n if not mo:\n log.info('Firmware host pack <%s> not found.Nothing to remove' %\n name)\n else:\n handle.remove_mo(mo)\n handle.commit()\n",
"step-5": "# Copyright 2015 Cisco Systems, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nlog = logging.getLogger('ucs')\n\n\ndef firmware_pack_create(handle, org_name, name, rack_bundle_version,\n blade_bundle_version, descr=\"\", mode=\"staged\",\n org_parent=\"org-root\"):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_create(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n\n org_dn = org_parent + \"/org-\" + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info(\"Sub-Org <%s> not found!\" % org_name)\n else:\n from ucsmsdk.mometa.firmware.FirmwareComputeHostPack import\\\n FirmwareComputeHostPack\n\n mo = FirmwareComputeHostPack(parent_mo_or_dn=org_dn,\n name=name,\n descr=descr,\n rack_bundle_version=rack_bundle_version,\n mode=mode,\n blade_bundle_version=blade_bundle_version)\n handle.add_mo(mo)\n handle.commit()\n\n\ndef firmware_pack_modify(handle, org_name, name, rack_bundle_version=None,\n blade_bundle_version=None, descr=None, mode=None,\n org_parent=\"org-root\"):\n \"\"\"\n This method creates Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n rack_bundle_version (string): Rack bundle version\n blade_bundle_version (string): Blade bundle version\n mode (string): \"one-sot\" or \"staged\"\n descr (string): Basic description.\n org_parent (string): Parent of Org\n\n Returns:\n None\n\n Example:\n firmware_pack_modify(handle, org_name=\"sample_org\",\n name=\"sample_fp\",\n rack_bundle_version=\"\",\n blade_bundle_version=\"\")\n \"\"\"\n\n org_dn = org_parent + \"/org-\" + org_name\n fw_dn= org_dn + \"/fw-host-pack-\" + name\n mo = handle.query_dn(fw_dn)\n if mo is not None:\n if rack_bundle_version is not None:\n mo.rack_bundle_version = rack_bundle_version\n if blade_bundle_version is not None:\n mo.blade_bundle_version = blade_bundle_version\n if mode is not None:\n mo.mode=mode\n if descr is not None:\n mo.descr = descr\n\n handle.set_mo(mo)\n handle.commit()\n else:\n log.info(\"Firmware host pack <%s> not found.\" % name)\n\n\ndef firmware_pack_remove(handle, org_name, name, org_parent=\"org-root\"):\n\n \"\"\"\n This method removes Host Firmware pack.\n\n Args:\n handle (UcsHandle)\n org_name (string): Name of the organization\n name (string): Name of the firmware pack.\n org_parent (string): Parent of Org.\n\n Returns:\n None\n\n Example:\n firmware_pack_remove(handle, org_name=\"sample_org\",\n name=\"sample_fp\")\n \"\"\"\n org_dn = org_parent + \"/org-\" + org_name\n p_mo = handle.query_dn(org_dn)\n if not p_mo:\n log.info(\"Sub-Org <%s> not found!\" %org_name)\n else:\n fw_dn= org_dn + \"/fw-host-pack-\" + name\n mo = handle.query_dn(fw_dn)\n if not mo:\n log.info(\"Firmware host pack <%s> not found.Nothing to remove\" % name)\n else:\n handle.remove_mo(mo)\n handle.commit()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
"""
爱丽丝和鲍勃有不同大小的糖果棒:A[i] 是爱丽丝拥有的第 i 根糖果棒的大小,B[j] 是鲍勃拥有的第 j 根糖果棒的大小。
因为他们是朋友,所以他们想交换一根糖果棒,这样交换后,他们都有相同的糖果总量。(一个人拥有的糖果总量是他们拥有的糖果棒大小的总和。)
返回一个整数数组 ans,其中 ans[0] 是爱丽丝必须交换的糖果棒的大小,ans[1] 是 Bob 必须交换的糖果棒的大小。
如果有多个答案,你可以返回其中任何一个。保证答案存在。
"""
def fairCandySwap(A, B):
sumA, sumB = sum(A), sum(B)
setA, setB = set(A), set(B)
delta = (sumA -sumB) // 2
for j in setB:
if j + delta in setA:
return (j+delta, j)
print(fairCandySwap(A = [1,1], B = [2,2]))
print(fairCandySwap(A = [1,2], B = [2,3]))
print(fairCandySwap(A = [2], B = [1,3]))
print(fairCandySwap(A = [1,2,5], B = [2,4]))
|
normal
|
{
"blob_id": "9abc5f18e2eb07afe6bc31d6bd27298350707d1d",
"index": 962,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef fairCandySwap(A, B):\n sumA, sumB = sum(A), sum(B)\n setA, setB = set(A), set(B)\n delta = (sumA - sumB) // 2\n for j in setB:\n if j + delta in setA:\n return j + delta, j\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fairCandySwap(A, B):\n sumA, sumB = sum(A), sum(B)\n setA, setB = set(A), set(B)\n delta = (sumA - sumB) // 2\n for j in setB:\n if j + delta in setA:\n return j + delta, j\n\n\nprint(fairCandySwap(A=[1, 1], B=[2, 2]))\nprint(fairCandySwap(A=[1, 2], B=[2, 3]))\nprint(fairCandySwap(A=[2], B=[1, 3]))\nprint(fairCandySwap(A=[1, 2, 5], B=[2, 4]))\n",
"step-4": "\"\"\"\n爱丽丝和鲍勃有不同大小的糖果棒:A[i] 是爱丽丝拥有的第 i 根糖果棒的大小,B[j] 是鲍勃拥有的第 j 根糖果棒的大小。\n\n因为他们是朋友,所以他们想交换一根糖果棒,这样交换后,他们都有相同的糖果总量。(一个人拥有的糖果总量是他们拥有的糖果棒大小的总和。)\n\n返回一个整数数组 ans,其中 ans[0] 是爱丽丝必须交换的糖果棒的大小,ans[1] 是 Bob 必须交换的糖果棒的大小。\n\n如果有多个答案,你可以返回其中任何一个。保证答案存在。\n\"\"\"\n\ndef fairCandySwap(A, B):\n sumA, sumB = sum(A), sum(B)\n setA, setB = set(A), set(B)\n delta = (sumA -sumB) // 2\n for j in setB:\n if j + delta in setA:\n return (j+delta, j)\n\nprint(fairCandySwap(A = [1,1], B = [2,2]))\nprint(fairCandySwap(A = [1,2], B = [2,3]))\nprint(fairCandySwap(A = [2], B = [1,3]))\nprint(fairCandySwap(A = [1,2,5], B = [2,4]))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.apps import AppConfig
from django.conf import settings
import importlib
import importlib.util
class RestAdminAppConfig(AppConfig):
name = 'libraries.django_rest_admin'
verbose_name = 'Rest Admin'
loaded = False
def ready(self):
autodiscover()
def autodiscover():
"""
Automatic discovering of rest_admin.py file inside apps.
similar to what Django admin does.
"""
from .register import rest_admin
if not RestAdminAppConfig.loaded:
for app in settings.INSTALLED_APPS:
# For each app, we need to look for an rest_admin.py inside that app's
# package. We can't use os.path here -- recall that modules may be
# imported different ways (think zip files) -- so we need to get
# the app's __path__ and look for rest_admin.py on that path.
# Step 1: find out the app's __path__ Import errors here will (and
# should) bubble up, but a missing __path__ (which is legal, but weird)
# fails silently -- apps that do weird things with __path__ might
# need to roll their own rest_admin registration.
try:
app_path = importlib.import_module(app).__path__
except AttributeError:
continue
# Step 2: use imp.find_module to find the app's rest_admin.py. For some
# reason imp.find_module raises ImportError if the app can't be found
# but doesn't actually try to import the module. So skip this app if
# its rest_admin.py doesn't exist
# try:
# importlib.util.find_spec('rest_admin', app_path)
# # imp.find_module('rest_admin', app_path)
# except ImportError:
# continue
#
if not importlib.find_loader('rest_admin', app_path):
continue
# Step 3: import the app's admin file. If this has errors we want them
# to bubble up.
importlib.import_module("%s.rest_admin" % app)
# autodiscover was successful, reset loading flag.
RestAdminAppConfig.loaded = True
|
normal
|
{
"blob_id": "a41d00c86d0bdab1bced77c275e56c3569af4f4e",
"index": 921,
"step-1": "<mask token>\n\n\nclass RestAdminAppConfig(AppConfig):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\ndef autodiscover():\n \"\"\"\n Automatic discovering of rest_admin.py file inside apps.\n similar to what Django admin does. \n \"\"\"\n from .register import rest_admin\n if not RestAdminAppConfig.loaded:\n for app in settings.INSTALLED_APPS:\n try:\n app_path = importlib.import_module(app).__path__\n except AttributeError:\n continue\n if not importlib.find_loader('rest_admin', app_path):\n continue\n importlib.import_module('%s.rest_admin' % app)\n RestAdminAppConfig.loaded = True\n",
"step-4": "from django.apps import AppConfig\nfrom django.conf import settings\nimport importlib\nimport importlib.util\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\ndef autodiscover():\n \"\"\"\n Automatic discovering of rest_admin.py file inside apps.\n similar to what Django admin does. \n \"\"\"\n from .register import rest_admin\n if not RestAdminAppConfig.loaded:\n for app in settings.INSTALLED_APPS:\n try:\n app_path = importlib.import_module(app).__path__\n except AttributeError:\n continue\n if not importlib.find_loader('rest_admin', app_path):\n continue\n importlib.import_module('%s.rest_admin' % app)\n RestAdminAppConfig.loaded = True\n",
"step-5": "from django.apps import AppConfig\nfrom django.conf import settings\nimport importlib\nimport importlib.util\n\n\nclass RestAdminAppConfig(AppConfig):\n name = 'libraries.django_rest_admin'\n verbose_name = 'Rest Admin'\n loaded = False\n\n def ready(self):\n autodiscover()\n\n\ndef autodiscover():\n \"\"\"\n Automatic discovering of rest_admin.py file inside apps.\n similar to what Django admin does. \n \"\"\"\n from .register import rest_admin\n\n if not RestAdminAppConfig.loaded:\n for app in settings.INSTALLED_APPS:\n # For each app, we need to look for an rest_admin.py inside that app's\n # package. We can't use os.path here -- recall that modules may be\n # imported different ways (think zip files) -- so we need to get\n # the app's __path__ and look for rest_admin.py on that path.\n\n # Step 1: find out the app's __path__ Import errors here will (and\n # should) bubble up, but a missing __path__ (which is legal, but weird)\n # fails silently -- apps that do weird things with __path__ might\n # need to roll their own rest_admin registration.\n try:\n app_path = importlib.import_module(app).__path__\n except AttributeError:\n continue\n\n # Step 2: use imp.find_module to find the app's rest_admin.py. For some\n # reason imp.find_module raises ImportError if the app can't be found\n # but doesn't actually try to import the module. So skip this app if\n # its rest_admin.py doesn't exist\n # try:\n # importlib.util.find_spec('rest_admin', app_path)\n # # imp.find_module('rest_admin', app_path)\n # except ImportError:\n # continue\n #\n if not importlib.find_loader('rest_admin', app_path):\n continue\n\n # Step 3: import the app's admin file. If this has errors we want them\n # to bubble up.\n importlib.import_module(\"%s.rest_admin\" % app)\n\n # autodiscover was successful, reset loading flag.\n RestAdminAppConfig.loaded = True\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
import numpy as np
import heapq
class KdNode:
"""
node of kdtree.
"""
def __init__(self, depth, splitting_feature, splitting_value, idx, parent):
"""
:param depth: depth of the node.
:param splitting_feature: split samples by which feature.
:param splitting_value: split samples by which feature value.
:param idx: indices of samples in the dataset.
:param parent: the parent node if it exists.
"""
self.depth = depth
self.splitting_feature = splitting_feature
self.splitting_value = splitting_value
self.idx = idx
self.parent = parent
# left and right children
self.left, self.right = None, None
class KdTree:
"""an efficient algorithm of find k-nearest-neighbours
https://en.wikipedia.org/wiki/K-d_tree
pseudo-code: (construct)
input: X, shape is (n_samples, n_features). dimension k
output: k-d tree
(1) start: divide all samples in X into two equal-sized collections by the median of the
first feature. Construct a root whose depth is 1. For samples equal to the median,
store them at the root. Store samples < median at the left child of the root,
and those > median at the right child.
(2) repeat: for nodes of depth j, select the l-th feature as splitting axis. l = j(mod k).
divide samples in the node by the median of the l-th feature. store samples equal to
the median at the node, and split other samples into left and right children on whether
they < median.
(3) terminate: terminate until no samples in left and right subtrees of the node.
pseudo-code: (search)
input: k-d tree, target sample x.
output: k nearest neighbours of x. (a list 'k-nn')
(1) top-down: starting from the root. if the feature value of the splitting axis of x is smaller
than the splitting threshold (the median of 1st feature) of the root, move it to the left
child. else to the right child. go down recursively until reach a leaf. append samples of
the leaf to a list 'k-nn'.
(2) bottom-up: move to the parent of current node. If the max distance from x to samples in
'k-nn' is larger than the distance from x to the splitting threshold of the parent, search
for samples in the right subtree which is closer to x than some samples in 'k-nn'. If
successfully find some, replace those 'furthest' samples in 'k-nn' with closer samples
if the size of 'k-nn' > k.
(3) terminate: terminate if reach the root and finish checking its right subtree.
"""
def __init__(self):
self.root = None
def create(self, X, dimensions=None):
"""
create a kd-tree on data X.
:param X: shape is (n_samples, n_features).
:param dimensions: the max number of features chosen for splitting samples. if None, set to
be n_features.
:return: None
"""
n_samples, n_features = X.shape
self.X = X
if not dimensions:
dimensions = n_features
self.root = KdNode(depth=0,
splitting_feature=0,
splitting_value=np.median(X[:, 0]),
idx=np.arange(n_samples),
parent=None)
# grow the tree by DFS
stack = [self.root]
while stack:
node = stack.pop()
# splitting samples in the node into two children
sample_values = X[node.idx, node.splitting_feature]
left_idx = node.idx[sample_values < node.splitting_value]
right_idx = node.idx[sample_values > node.splitting_value]
node.idx = node.idx[sample_values == node.splitting_value]
# since left and right subtrees are divided by the median of their parent,
# the sizes of the two subtrees are expected to be equal
assert len(left_idx) == len(right_idx),\
'left and right subtrees should have the same number of samples'
# append left and right children
if len(left_idx):
child_depth = node.depth + 1
child_feature = (node.depth + 1) % dimensions
left_value = np.median(X[left_idx, child_feature])
node.left = KdNode(depth=child_depth, splitting_feature=child_feature,
splitting_value=left_value, idx=left_idx, parent=node)
right_value = np.median(X[right_idx, child_feature])
node.right = KdNode(depth=child_depth, splitting_feature=child_feature,
splitting_value=right_value, idx=right_idx, parent=node)
stack.append(node.left)
stack.append(node.right)
def _search(self, x, k=3):
"""
:param x: the target sample point. shape is (n_features,)
:param k: the number of nearest neighbours to find.
:return: a list of k nearest neighbours.
"""
# top-down
cur_node = self.root
# kd-tree is actually a full binary tree
while cur_node.left:
if x[cur_node.splitting_feature] <= cur_node.splitting_value:
cur_node = cur_node.left
else:
cur_node = cur_node.right
# append samples in cur_node into k_nn. k_nn is a max heap
k_nn = []
# bottom-top
while cur_node:
for idx in cur_node.idx:
# Euclidean distance
dist = np.linalg.norm(self.X[idx] - x)
# negate the dist to construct a max heap
heapq.heappush(k_nn, (-dist, idx))
if abs(x[cur_node.splitting_feature] - cur_node.splitting_value) < -k_nn[0][0] or len(k_nn) < k:
# the max distance from x to samples in 'k-nn' > the distance from x to the splitting threshold
# check samples of another child
if x[cur_node.splitting_feature] <= cur_node.splitting_value:
checking_samples = self._samples_of_subtree(cur_node.right, x, k)
else:
checking_samples = self._samples_of_subtree(cur_node.left, x, k)
k_nn.extend(checking_samples)
heapq.heapify(k_nn)
# keep the size of k_nn <= k
while len(k_nn) > k:
heapq.heappop(k_nn)
cur_node = cur_node.parent
# sort k_nn
k_nn.sort(reverse=True)
dists, idxs = zip(*k_nn)
return [-d for d in dists], list(idxs)
def search(self, X, k=3):
"""
:param X: the target sample points. shape is (n_samples, n_features)
:param k: the number of nearest neighbours to find.
:return: lists of k nearest neighbours for each sample point.
"""
assert self.root, 'must create a tree before search'
result = [self._search(x, k) for x in X]
dists, idxs = zip(*result)
return np.array(dists), np.array(idxs)
def _samples_of_subtree(self, root, x, k):
# get k nearest neighbours from the subtree rooted at root
k_nn = []
def dfs(node):
if not node:
return
for idx in node.idx:
dist = np.linalg.norm(x - self.X[idx])
heapq.heappush(k_nn, (-dist, idx))
while len(k_nn) > k:
heapq.heappop(k_nn)
if len(k_nn) < k or \
(0 < len(k_nn) and abs(x[node.splitting_feature] - node.splitting_value) < -k_nn[0][0]):
# have to search both two children
dfs(node.left)
dfs(node.right)
else:
if x[node.splitting_feature] <= node.splitting_value:
dfs(node.left)
else:
dfs(node.right)
dfs(root)
return k_nn
if __name__ == '__main__':
from sklearn.neighbors import NearestNeighbors
n_samples, n_features = 2000, 10
n_test = 100
K = 5
X = np.random.random((n_samples, n_features))
test_X = np.random.random((n_test, n_features))
nbrs = NearestNeighbors(n_neighbors=K, algorithm='ball_tree').fit(X)
distances, indices = nbrs.kneighbors(test_X)
tree = KdTree()
tree.create(X)
dists, idxs = tree.search(test_X, k=K)
print(np.all(distances == dists))
print(np.all(indices == idxs))
|
normal
|
{
"blob_id": "2f16c74e51789dd06bfc1fe1c6173fa5b0ac38cd",
"index": 4747,
"step-1": "<mask token>\n\n\nclass KdTree:\n <mask token>\n\n def __init__(self):\n self.root = None\n\n def create(self, X, dimensions=None):\n \"\"\"\n create a kd-tree on data X.\n :param X: shape is (n_samples, n_features).\n :param dimensions: the max number of features chosen for splitting samples. if None, set to\n be n_features.\n :return: None\n \"\"\"\n n_samples, n_features = X.shape\n self.X = X\n if not dimensions:\n dimensions = n_features\n self.root = KdNode(depth=0, splitting_feature=0, splitting_value=np\n .median(X[:, 0]), idx=np.arange(n_samples), parent=None)\n stack = [self.root]\n while stack:\n node = stack.pop()\n sample_values = X[node.idx, node.splitting_feature]\n left_idx = node.idx[sample_values < node.splitting_value]\n right_idx = node.idx[sample_values > node.splitting_value]\n node.idx = node.idx[sample_values == node.splitting_value]\n assert len(left_idx) == len(right_idx\n ), 'left and right subtrees should have the same number of samples'\n if len(left_idx):\n child_depth = node.depth + 1\n child_feature = (node.depth + 1) % dimensions\n left_value = np.median(X[left_idx, child_feature])\n node.left = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=left_value, idx=left_idx,\n parent=node)\n right_value = np.median(X[right_idx, child_feature])\n node.right = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=right_value, idx=\n right_idx, parent=node)\n stack.append(node.left)\n stack.append(node.right)\n\n def _search(self, x, k=3):\n \"\"\"\n :param x: the target sample point. shape is (n_features,)\n :param k: the number of nearest neighbours to find.\n :return: a list of k nearest neighbours.\n \"\"\"\n cur_node = self.root\n while cur_node.left:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n cur_node = cur_node.left\n else:\n cur_node = cur_node.right\n k_nn = []\n while cur_node:\n for idx in cur_node.idx:\n dist = np.linalg.norm(self.X[idx] - x)\n heapq.heappush(k_nn, (-dist, idx))\n if abs(x[cur_node.splitting_feature] - cur_node.splitting_value\n ) < -k_nn[0][0] or len(k_nn) < k:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n checking_samples = self._samples_of_subtree(cur_node.\n right, x, k)\n else:\n checking_samples = self._samples_of_subtree(cur_node.\n left, x, k)\n k_nn.extend(checking_samples)\n heapq.heapify(k_nn)\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n cur_node = cur_node.parent\n k_nn.sort(reverse=True)\n dists, idxs = zip(*k_nn)\n return [(-d) for d in dists], list(idxs)\n\n def search(self, X, k=3):\n \"\"\"\n :param X: the target sample points. shape is (n_samples, n_features)\n :param k: the number of nearest neighbours to find.\n :return: lists of k nearest neighbours for each sample point.\n \"\"\"\n assert self.root, 'must create a tree before search'\n result = [self._search(x, k) for x in X]\n dists, idxs = zip(*result)\n return np.array(dists), np.array(idxs)\n\n def _samples_of_subtree(self, root, x, k):\n k_nn = []\n\n def dfs(node):\n if not node:\n return\n for idx in node.idx:\n dist = np.linalg.norm(x - self.X[idx])\n heapq.heappush(k_nn, (-dist, idx))\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n if len(k_nn) < k or 0 < len(k_nn) and abs(x[node.\n splitting_feature] - node.splitting_value) < -k_nn[0][0]:\n dfs(node.left)\n dfs(node.right)\n elif x[node.splitting_feature] <= node.splitting_value:\n dfs(node.left)\n else:\n dfs(node.right)\n dfs(root)\n return k_nn\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass KdTree:\n \"\"\"an efficient algorithm of find k-nearest-neighbours\n https://en.wikipedia.org/wiki/K-d_tree\n\n pseudo-code: (construct)\n input: X, shape is (n_samples, n_features). dimension k\n output: k-d tree\n\n (1) start: divide all samples in X into two equal-sized collections by the median of the\n first feature. Construct a root whose depth is 1. For samples equal to the median,\n store them at the root. Store samples < median at the left child of the root,\n and those > median at the right child.\n (2) repeat: for nodes of depth j, select the l-th feature as splitting axis. l = j(mod k).\n divide samples in the node by the median of the l-th feature. store samples equal to\n the median at the node, and split other samples into left and right children on whether\n they < median.\n (3) terminate: terminate until no samples in left and right subtrees of the node.\n\n pseudo-code: (search)\n input: k-d tree, target sample x.\n output: k nearest neighbours of x. (a list 'k-nn')\n\n (1) top-down: starting from the root. if the feature value of the splitting axis of x is smaller\n than the splitting threshold (the median of 1st feature) of the root, move it to the left\n child. else to the right child. go down recursively until reach a leaf. append samples of\n the leaf to a list 'k-nn'.\n (2) bottom-up: move to the parent of current node. If the max distance from x to samples in\n 'k-nn' is larger than the distance from x to the splitting threshold of the parent, search\n for samples in the right subtree which is closer to x than some samples in 'k-nn'. If\n successfully find some, replace those 'furthest' samples in 'k-nn' with closer samples\n if the size of 'k-nn' > k.\n (3) terminate: terminate if reach the root and finish checking its right subtree.\n \"\"\"\n\n def __init__(self):\n self.root = None\n\n def create(self, X, dimensions=None):\n \"\"\"\n create a kd-tree on data X.\n :param X: shape is (n_samples, n_features).\n :param dimensions: the max number of features chosen for splitting samples. if None, set to\n be n_features.\n :return: None\n \"\"\"\n n_samples, n_features = X.shape\n self.X = X\n if not dimensions:\n dimensions = n_features\n self.root = KdNode(depth=0, splitting_feature=0, splitting_value=np\n .median(X[:, 0]), idx=np.arange(n_samples), parent=None)\n stack = [self.root]\n while stack:\n node = stack.pop()\n sample_values = X[node.idx, node.splitting_feature]\n left_idx = node.idx[sample_values < node.splitting_value]\n right_idx = node.idx[sample_values > node.splitting_value]\n node.idx = node.idx[sample_values == node.splitting_value]\n assert len(left_idx) == len(right_idx\n ), 'left and right subtrees should have the same number of samples'\n if len(left_idx):\n child_depth = node.depth + 1\n child_feature = (node.depth + 1) % dimensions\n left_value = np.median(X[left_idx, child_feature])\n node.left = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=left_value, idx=left_idx,\n parent=node)\n right_value = np.median(X[right_idx, child_feature])\n node.right = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=right_value, idx=\n right_idx, parent=node)\n stack.append(node.left)\n stack.append(node.right)\n\n def _search(self, x, k=3):\n \"\"\"\n :param x: the target sample point. shape is (n_features,)\n :param k: the number of nearest neighbours to find.\n :return: a list of k nearest neighbours.\n \"\"\"\n cur_node = self.root\n while cur_node.left:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n cur_node = cur_node.left\n else:\n cur_node = cur_node.right\n k_nn = []\n while cur_node:\n for idx in cur_node.idx:\n dist = np.linalg.norm(self.X[idx] - x)\n heapq.heappush(k_nn, (-dist, idx))\n if abs(x[cur_node.splitting_feature] - cur_node.splitting_value\n ) < -k_nn[0][0] or len(k_nn) < k:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n checking_samples = self._samples_of_subtree(cur_node.\n right, x, k)\n else:\n checking_samples = self._samples_of_subtree(cur_node.\n left, x, k)\n k_nn.extend(checking_samples)\n heapq.heapify(k_nn)\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n cur_node = cur_node.parent\n k_nn.sort(reverse=True)\n dists, idxs = zip(*k_nn)\n return [(-d) for d in dists], list(idxs)\n\n def search(self, X, k=3):\n \"\"\"\n :param X: the target sample points. shape is (n_samples, n_features)\n :param k: the number of nearest neighbours to find.\n :return: lists of k nearest neighbours for each sample point.\n \"\"\"\n assert self.root, 'must create a tree before search'\n result = [self._search(x, k) for x in X]\n dists, idxs = zip(*result)\n return np.array(dists), np.array(idxs)\n\n def _samples_of_subtree(self, root, x, k):\n k_nn = []\n\n def dfs(node):\n if not node:\n return\n for idx in node.idx:\n dist = np.linalg.norm(x - self.X[idx])\n heapq.heappush(k_nn, (-dist, idx))\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n if len(k_nn) < k or 0 < len(k_nn) and abs(x[node.\n splitting_feature] - node.splitting_value) < -k_nn[0][0]:\n dfs(node.left)\n dfs(node.right)\n elif x[node.splitting_feature] <= node.splitting_value:\n dfs(node.left)\n else:\n dfs(node.right)\n dfs(root)\n return k_nn\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass KdNode:\n \"\"\"\n node of kdtree.\n \"\"\"\n\n def __init__(self, depth, splitting_feature, splitting_value, idx, parent):\n \"\"\"\n :param depth: depth of the node.\n :param splitting_feature: split samples by which feature.\n :param splitting_value: split samples by which feature value.\n :param idx: indices of samples in the dataset.\n :param parent: the parent node if it exists.\n \"\"\"\n self.depth = depth\n self.splitting_feature = splitting_feature\n self.splitting_value = splitting_value\n self.idx = idx\n self.parent = parent\n self.left, self.right = None, None\n\n\nclass KdTree:\n \"\"\"an efficient algorithm of find k-nearest-neighbours\n https://en.wikipedia.org/wiki/K-d_tree\n\n pseudo-code: (construct)\n input: X, shape is (n_samples, n_features). dimension k\n output: k-d tree\n\n (1) start: divide all samples in X into two equal-sized collections by the median of the\n first feature. Construct a root whose depth is 1. For samples equal to the median,\n store them at the root. Store samples < median at the left child of the root,\n and those > median at the right child.\n (2) repeat: for nodes of depth j, select the l-th feature as splitting axis. l = j(mod k).\n divide samples in the node by the median of the l-th feature. store samples equal to\n the median at the node, and split other samples into left and right children on whether\n they < median.\n (3) terminate: terminate until no samples in left and right subtrees of the node.\n\n pseudo-code: (search)\n input: k-d tree, target sample x.\n output: k nearest neighbours of x. (a list 'k-nn')\n\n (1) top-down: starting from the root. if the feature value of the splitting axis of x is smaller\n than the splitting threshold (the median of 1st feature) of the root, move it to the left\n child. else to the right child. go down recursively until reach a leaf. append samples of\n the leaf to a list 'k-nn'.\n (2) bottom-up: move to the parent of current node. If the max distance from x to samples in\n 'k-nn' is larger than the distance from x to the splitting threshold of the parent, search\n for samples in the right subtree which is closer to x than some samples in 'k-nn'. If\n successfully find some, replace those 'furthest' samples in 'k-nn' with closer samples\n if the size of 'k-nn' > k.\n (3) terminate: terminate if reach the root and finish checking its right subtree.\n \"\"\"\n\n def __init__(self):\n self.root = None\n\n def create(self, X, dimensions=None):\n \"\"\"\n create a kd-tree on data X.\n :param X: shape is (n_samples, n_features).\n :param dimensions: the max number of features chosen for splitting samples. if None, set to\n be n_features.\n :return: None\n \"\"\"\n n_samples, n_features = X.shape\n self.X = X\n if not dimensions:\n dimensions = n_features\n self.root = KdNode(depth=0, splitting_feature=0, splitting_value=np\n .median(X[:, 0]), idx=np.arange(n_samples), parent=None)\n stack = [self.root]\n while stack:\n node = stack.pop()\n sample_values = X[node.idx, node.splitting_feature]\n left_idx = node.idx[sample_values < node.splitting_value]\n right_idx = node.idx[sample_values > node.splitting_value]\n node.idx = node.idx[sample_values == node.splitting_value]\n assert len(left_idx) == len(right_idx\n ), 'left and right subtrees should have the same number of samples'\n if len(left_idx):\n child_depth = node.depth + 1\n child_feature = (node.depth + 1) % dimensions\n left_value = np.median(X[left_idx, child_feature])\n node.left = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=left_value, idx=left_idx,\n parent=node)\n right_value = np.median(X[right_idx, child_feature])\n node.right = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=right_value, idx=\n right_idx, parent=node)\n stack.append(node.left)\n stack.append(node.right)\n\n def _search(self, x, k=3):\n \"\"\"\n :param x: the target sample point. shape is (n_features,)\n :param k: the number of nearest neighbours to find.\n :return: a list of k nearest neighbours.\n \"\"\"\n cur_node = self.root\n while cur_node.left:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n cur_node = cur_node.left\n else:\n cur_node = cur_node.right\n k_nn = []\n while cur_node:\n for idx in cur_node.idx:\n dist = np.linalg.norm(self.X[idx] - x)\n heapq.heappush(k_nn, (-dist, idx))\n if abs(x[cur_node.splitting_feature] - cur_node.splitting_value\n ) < -k_nn[0][0] or len(k_nn) < k:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n checking_samples = self._samples_of_subtree(cur_node.\n right, x, k)\n else:\n checking_samples = self._samples_of_subtree(cur_node.\n left, x, k)\n k_nn.extend(checking_samples)\n heapq.heapify(k_nn)\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n cur_node = cur_node.parent\n k_nn.sort(reverse=True)\n dists, idxs = zip(*k_nn)\n return [(-d) for d in dists], list(idxs)\n\n def search(self, X, k=3):\n \"\"\"\n :param X: the target sample points. shape is (n_samples, n_features)\n :param k: the number of nearest neighbours to find.\n :return: lists of k nearest neighbours for each sample point.\n \"\"\"\n assert self.root, 'must create a tree before search'\n result = [self._search(x, k) for x in X]\n dists, idxs = zip(*result)\n return np.array(dists), np.array(idxs)\n\n def _samples_of_subtree(self, root, x, k):\n k_nn = []\n\n def dfs(node):\n if not node:\n return\n for idx in node.idx:\n dist = np.linalg.norm(x - self.X[idx])\n heapq.heappush(k_nn, (-dist, idx))\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n if len(k_nn) < k or 0 < len(k_nn) and abs(x[node.\n splitting_feature] - node.splitting_value) < -k_nn[0][0]:\n dfs(node.left)\n dfs(node.right)\n elif x[node.splitting_feature] <= node.splitting_value:\n dfs(node.left)\n else:\n dfs(node.right)\n dfs(root)\n return k_nn\n\n\n<mask token>\n",
"step-4": "import numpy as np\nimport heapq\n\n\nclass KdNode:\n \"\"\"\n node of kdtree.\n \"\"\"\n\n def __init__(self, depth, splitting_feature, splitting_value, idx, parent):\n \"\"\"\n :param depth: depth of the node.\n :param splitting_feature: split samples by which feature.\n :param splitting_value: split samples by which feature value.\n :param idx: indices of samples in the dataset.\n :param parent: the parent node if it exists.\n \"\"\"\n self.depth = depth\n self.splitting_feature = splitting_feature\n self.splitting_value = splitting_value\n self.idx = idx\n self.parent = parent\n self.left, self.right = None, None\n\n\nclass KdTree:\n \"\"\"an efficient algorithm of find k-nearest-neighbours\n https://en.wikipedia.org/wiki/K-d_tree\n\n pseudo-code: (construct)\n input: X, shape is (n_samples, n_features). dimension k\n output: k-d tree\n\n (1) start: divide all samples in X into two equal-sized collections by the median of the\n first feature. Construct a root whose depth is 1. For samples equal to the median,\n store them at the root. Store samples < median at the left child of the root,\n and those > median at the right child.\n (2) repeat: for nodes of depth j, select the l-th feature as splitting axis. l = j(mod k).\n divide samples in the node by the median of the l-th feature. store samples equal to\n the median at the node, and split other samples into left and right children on whether\n they < median.\n (3) terminate: terminate until no samples in left and right subtrees of the node.\n\n pseudo-code: (search)\n input: k-d tree, target sample x.\n output: k nearest neighbours of x. (a list 'k-nn')\n\n (1) top-down: starting from the root. if the feature value of the splitting axis of x is smaller\n than the splitting threshold (the median of 1st feature) of the root, move it to the left\n child. else to the right child. go down recursively until reach a leaf. append samples of\n the leaf to a list 'k-nn'.\n (2) bottom-up: move to the parent of current node. If the max distance from x to samples in\n 'k-nn' is larger than the distance from x to the splitting threshold of the parent, search\n for samples in the right subtree which is closer to x than some samples in 'k-nn'. If\n successfully find some, replace those 'furthest' samples in 'k-nn' with closer samples\n if the size of 'k-nn' > k.\n (3) terminate: terminate if reach the root and finish checking its right subtree.\n \"\"\"\n\n def __init__(self):\n self.root = None\n\n def create(self, X, dimensions=None):\n \"\"\"\n create a kd-tree on data X.\n :param X: shape is (n_samples, n_features).\n :param dimensions: the max number of features chosen for splitting samples. if None, set to\n be n_features.\n :return: None\n \"\"\"\n n_samples, n_features = X.shape\n self.X = X\n if not dimensions:\n dimensions = n_features\n self.root = KdNode(depth=0, splitting_feature=0, splitting_value=np\n .median(X[:, 0]), idx=np.arange(n_samples), parent=None)\n stack = [self.root]\n while stack:\n node = stack.pop()\n sample_values = X[node.idx, node.splitting_feature]\n left_idx = node.idx[sample_values < node.splitting_value]\n right_idx = node.idx[sample_values > node.splitting_value]\n node.idx = node.idx[sample_values == node.splitting_value]\n assert len(left_idx) == len(right_idx\n ), 'left and right subtrees should have the same number of samples'\n if len(left_idx):\n child_depth = node.depth + 1\n child_feature = (node.depth + 1) % dimensions\n left_value = np.median(X[left_idx, child_feature])\n node.left = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=left_value, idx=left_idx,\n parent=node)\n right_value = np.median(X[right_idx, child_feature])\n node.right = KdNode(depth=child_depth, splitting_feature=\n child_feature, splitting_value=right_value, idx=\n right_idx, parent=node)\n stack.append(node.left)\n stack.append(node.right)\n\n def _search(self, x, k=3):\n \"\"\"\n :param x: the target sample point. shape is (n_features,)\n :param k: the number of nearest neighbours to find.\n :return: a list of k nearest neighbours.\n \"\"\"\n cur_node = self.root\n while cur_node.left:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n cur_node = cur_node.left\n else:\n cur_node = cur_node.right\n k_nn = []\n while cur_node:\n for idx in cur_node.idx:\n dist = np.linalg.norm(self.X[idx] - x)\n heapq.heappush(k_nn, (-dist, idx))\n if abs(x[cur_node.splitting_feature] - cur_node.splitting_value\n ) < -k_nn[0][0] or len(k_nn) < k:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n checking_samples = self._samples_of_subtree(cur_node.\n right, x, k)\n else:\n checking_samples = self._samples_of_subtree(cur_node.\n left, x, k)\n k_nn.extend(checking_samples)\n heapq.heapify(k_nn)\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n cur_node = cur_node.parent\n k_nn.sort(reverse=True)\n dists, idxs = zip(*k_nn)\n return [(-d) for d in dists], list(idxs)\n\n def search(self, X, k=3):\n \"\"\"\n :param X: the target sample points. shape is (n_samples, n_features)\n :param k: the number of nearest neighbours to find.\n :return: lists of k nearest neighbours for each sample point.\n \"\"\"\n assert self.root, 'must create a tree before search'\n result = [self._search(x, k) for x in X]\n dists, idxs = zip(*result)\n return np.array(dists), np.array(idxs)\n\n def _samples_of_subtree(self, root, x, k):\n k_nn = []\n\n def dfs(node):\n if not node:\n return\n for idx in node.idx:\n dist = np.linalg.norm(x - self.X[idx])\n heapq.heappush(k_nn, (-dist, idx))\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n if len(k_nn) < k or 0 < len(k_nn) and abs(x[node.\n splitting_feature] - node.splitting_value) < -k_nn[0][0]:\n dfs(node.left)\n dfs(node.right)\n elif x[node.splitting_feature] <= node.splitting_value:\n dfs(node.left)\n else:\n dfs(node.right)\n dfs(root)\n return k_nn\n\n\nif __name__ == '__main__':\n from sklearn.neighbors import NearestNeighbors\n n_samples, n_features = 2000, 10\n n_test = 100\n K = 5\n X = np.random.random((n_samples, n_features))\n test_X = np.random.random((n_test, n_features))\n nbrs = NearestNeighbors(n_neighbors=K, algorithm='ball_tree').fit(X)\n distances, indices = nbrs.kneighbors(test_X)\n tree = KdTree()\n tree.create(X)\n dists, idxs = tree.search(test_X, k=K)\n print(np.all(distances == dists))\n print(np.all(indices == idxs))\n",
"step-5": "import numpy as np\nimport heapq\n\n\nclass KdNode:\n \"\"\"\n node of kdtree.\n \"\"\"\n def __init__(self, depth, splitting_feature, splitting_value, idx, parent):\n \"\"\"\n :param depth: depth of the node.\n :param splitting_feature: split samples by which feature.\n :param splitting_value: split samples by which feature value.\n :param idx: indices of samples in the dataset.\n :param parent: the parent node if it exists.\n \"\"\"\n self.depth = depth\n self.splitting_feature = splitting_feature\n self.splitting_value = splitting_value\n self.idx = idx\n self.parent = parent\n # left and right children\n self.left, self.right = None, None\n\n\nclass KdTree:\n \"\"\"an efficient algorithm of find k-nearest-neighbours\n https://en.wikipedia.org/wiki/K-d_tree\n\n pseudo-code: (construct)\n input: X, shape is (n_samples, n_features). dimension k\n output: k-d tree\n\n (1) start: divide all samples in X into two equal-sized collections by the median of the\n first feature. Construct a root whose depth is 1. For samples equal to the median,\n store them at the root. Store samples < median at the left child of the root,\n and those > median at the right child.\n (2) repeat: for nodes of depth j, select the l-th feature as splitting axis. l = j(mod k).\n divide samples in the node by the median of the l-th feature. store samples equal to\n the median at the node, and split other samples into left and right children on whether\n they < median.\n (3) terminate: terminate until no samples in left and right subtrees of the node.\n\n pseudo-code: (search)\n input: k-d tree, target sample x.\n output: k nearest neighbours of x. (a list 'k-nn')\n\n (1) top-down: starting from the root. if the feature value of the splitting axis of x is smaller\n than the splitting threshold (the median of 1st feature) of the root, move it to the left\n child. else to the right child. go down recursively until reach a leaf. append samples of\n the leaf to a list 'k-nn'.\n (2) bottom-up: move to the parent of current node. If the max distance from x to samples in\n 'k-nn' is larger than the distance from x to the splitting threshold of the parent, search\n for samples in the right subtree which is closer to x than some samples in 'k-nn'. If\n successfully find some, replace those 'furthest' samples in 'k-nn' with closer samples\n if the size of 'k-nn' > k.\n (3) terminate: terminate if reach the root and finish checking its right subtree.\n \"\"\"\n def __init__(self):\n self.root = None\n\n def create(self, X, dimensions=None):\n \"\"\"\n create a kd-tree on data X.\n :param X: shape is (n_samples, n_features).\n :param dimensions: the max number of features chosen for splitting samples. if None, set to\n be n_features.\n :return: None\n \"\"\"\n n_samples, n_features = X.shape\n self.X = X\n if not dimensions:\n dimensions = n_features\n\n self.root = KdNode(depth=0,\n splitting_feature=0,\n splitting_value=np.median(X[:, 0]),\n idx=np.arange(n_samples),\n parent=None)\n # grow the tree by DFS\n stack = [self.root]\n while stack:\n node = stack.pop()\n # splitting samples in the node into two children\n sample_values = X[node.idx, node.splitting_feature]\n left_idx = node.idx[sample_values < node.splitting_value]\n right_idx = node.idx[sample_values > node.splitting_value]\n node.idx = node.idx[sample_values == node.splitting_value]\n # since left and right subtrees are divided by the median of their parent,\n # the sizes of the two subtrees are expected to be equal\n assert len(left_idx) == len(right_idx),\\\n 'left and right subtrees should have the same number of samples'\n # append left and right children\n if len(left_idx):\n child_depth = node.depth + 1\n child_feature = (node.depth + 1) % dimensions\n left_value = np.median(X[left_idx, child_feature])\n node.left = KdNode(depth=child_depth, splitting_feature=child_feature,\n splitting_value=left_value, idx=left_idx, parent=node)\n right_value = np.median(X[right_idx, child_feature])\n node.right = KdNode(depth=child_depth, splitting_feature=child_feature,\n splitting_value=right_value, idx=right_idx, parent=node)\n stack.append(node.left)\n stack.append(node.right)\n\n def _search(self, x, k=3):\n \"\"\"\n :param x: the target sample point. shape is (n_features,)\n :param k: the number of nearest neighbours to find.\n :return: a list of k nearest neighbours.\n \"\"\"\n # top-down\n cur_node = self.root\n # kd-tree is actually a full binary tree\n while cur_node.left:\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n cur_node = cur_node.left\n else:\n cur_node = cur_node.right\n # append samples in cur_node into k_nn. k_nn is a max heap\n k_nn = []\n # bottom-top\n while cur_node:\n for idx in cur_node.idx:\n # Euclidean distance\n dist = np.linalg.norm(self.X[idx] - x)\n # negate the dist to construct a max heap\n heapq.heappush(k_nn, (-dist, idx))\n if abs(x[cur_node.splitting_feature] - cur_node.splitting_value) < -k_nn[0][0] or len(k_nn) < k:\n # the max distance from x to samples in 'k-nn' > the distance from x to the splitting threshold\n # check samples of another child\n if x[cur_node.splitting_feature] <= cur_node.splitting_value:\n checking_samples = self._samples_of_subtree(cur_node.right, x, k)\n else:\n checking_samples = self._samples_of_subtree(cur_node.left, x, k)\n k_nn.extend(checking_samples)\n heapq.heapify(k_nn)\n # keep the size of k_nn <= k\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n cur_node = cur_node.parent\n # sort k_nn\n k_nn.sort(reverse=True)\n dists, idxs = zip(*k_nn)\n return [-d for d in dists], list(idxs)\n\n def search(self, X, k=3):\n \"\"\"\n :param X: the target sample points. shape is (n_samples, n_features)\n :param k: the number of nearest neighbours to find.\n :return: lists of k nearest neighbours for each sample point.\n \"\"\"\n assert self.root, 'must create a tree before search'\n\n result = [self._search(x, k) for x in X]\n dists, idxs = zip(*result)\n return np.array(dists), np.array(idxs)\n\n def _samples_of_subtree(self, root, x, k):\n # get k nearest neighbours from the subtree rooted at root\n k_nn = []\n\n def dfs(node):\n if not node:\n return\n for idx in node.idx:\n dist = np.linalg.norm(x - self.X[idx])\n heapq.heappush(k_nn, (-dist, idx))\n while len(k_nn) > k:\n heapq.heappop(k_nn)\n if len(k_nn) < k or \\\n (0 < len(k_nn) and abs(x[node.splitting_feature] - node.splitting_value) < -k_nn[0][0]):\n # have to search both two children\n dfs(node.left)\n dfs(node.right)\n else:\n if x[node.splitting_feature] <= node.splitting_value:\n dfs(node.left)\n else:\n dfs(node.right)\n\n dfs(root)\n return k_nn\n\n\nif __name__ == '__main__':\n from sklearn.neighbors import NearestNeighbors\n n_samples, n_features = 2000, 10\n n_test = 100\n K = 5\n X = np.random.random((n_samples, n_features))\n test_X = np.random.random((n_test, n_features))\n nbrs = NearestNeighbors(n_neighbors=K, algorithm='ball_tree').fit(X)\n distances, indices = nbrs.kneighbors(test_X)\n tree = KdTree()\n tree.create(X)\n dists, idxs = tree.search(test_X, k=K)\n print(np.all(distances == dists))\n print(np.all(indices == idxs))\n",
"step-ids": [
6,
7,
10,
12,
13
]
}
|
[
6,
7,
10,
12,
13
] |
#encoding:utf-8
class Employee():
def __int__(self,name,sex,salary):
self.name = name
self.sex = sex
self.salary = salary
def give_raise(self):
222
|
normal
|
{
"blob_id": "014509170b98a38838859d3ca48c74ca6be0bd46",
"index": 7190,
"step-1": "#encoding:utf-8\nclass Employee():\n def __int__(self,name,sex,salary):\n self.name = name\n self.sex = sex\n self.salary = salary\n def give_raise(self):\n 222",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from aiogram import Dispatcher
from create_bot import bot
from data_base import sqlite_db
# new user in group
async def new_member(message):
new_user = message.new_chat_members[0]
user_id = new_user['id']
if new_user['username']:
user_name = new_user['username']
elif new_user['first_name']:
user_name = new_user['first_name']
elif new_user['last_name']:
user_name = new_user['last_name']
else:
user_name = 'Пользователь без имени'
await sqlite_db.sql_add_user_to_db(user_id, user_name)
await bot.send_message(message.chat.id, f'Добро пожаловать, {user_name}!\nКоманда - /start переход'
f' в пользовательское меню.\nКоманда - /help помощь по командам бота.')
# left user from group
async def left_member(message):
left_user = message.left_chat_member
user_name = await sqlite_db.sql_get_user_name(left_user['id'])
user_name = user_name[0][0]
await sqlite_db.sql_del_user_from_db(left_user['id'])
await bot.send_message(message.chat.id, f'Будем рады Вас видеть, {user_name}! Возвращайтесь!')
def register_handlers_for_other(dp: Dispatcher):
dp.register_message_handler(new_member, content_types=["new_chat_members"])
dp.register_message_handler(left_member, content_types=["left_chat_member"])
|
normal
|
{
"blob_id": "dfcfa4fa036fe8c058d66fc0b9ea73ddb9d4446e",
"index": 7524,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef register_handlers_for_other(dp: Dispatcher):\n dp.register_message_handler(new_member, content_types=['new_chat_members'])\n dp.register_message_handler(left_member, content_types=['left_chat_member']\n )\n",
"step-3": "<mask token>\n\n\nasync def new_member(message):\n new_user = message.new_chat_members[0]\n user_id = new_user['id']\n if new_user['username']:\n user_name = new_user['username']\n elif new_user['first_name']:\n user_name = new_user['first_name']\n elif new_user['last_name']:\n user_name = new_user['last_name']\n else:\n user_name = 'Пользователь без имени'\n await sqlite_db.sql_add_user_to_db(user_id, user_name)\n await bot.send_message(message.chat.id,\n f\"\"\"Добро пожаловать, {user_name}!\nКоманда - /start переход в пользовательское меню.\nКоманда - /help помощь по командам бота.\"\"\"\n )\n\n\nasync def left_member(message):\n left_user = message.left_chat_member\n user_name = await sqlite_db.sql_get_user_name(left_user['id'])\n user_name = user_name[0][0]\n await sqlite_db.sql_del_user_from_db(left_user['id'])\n await bot.send_message(message.chat.id,\n f'Будем рады Вас видеть, {user_name}! Возвращайтесь!')\n\n\ndef register_handlers_for_other(dp: Dispatcher):\n dp.register_message_handler(new_member, content_types=['new_chat_members'])\n dp.register_message_handler(left_member, content_types=['left_chat_member']\n )\n",
"step-4": "from aiogram import Dispatcher\nfrom create_bot import bot\nfrom data_base import sqlite_db\n\n\nasync def new_member(message):\n new_user = message.new_chat_members[0]\n user_id = new_user['id']\n if new_user['username']:\n user_name = new_user['username']\n elif new_user['first_name']:\n user_name = new_user['first_name']\n elif new_user['last_name']:\n user_name = new_user['last_name']\n else:\n user_name = 'Пользователь без имени'\n await sqlite_db.sql_add_user_to_db(user_id, user_name)\n await bot.send_message(message.chat.id,\n f\"\"\"Добро пожаловать, {user_name}!\nКоманда - /start переход в пользовательское меню.\nКоманда - /help помощь по командам бота.\"\"\"\n )\n\n\nasync def left_member(message):\n left_user = message.left_chat_member\n user_name = await sqlite_db.sql_get_user_name(left_user['id'])\n user_name = user_name[0][0]\n await sqlite_db.sql_del_user_from_db(left_user['id'])\n await bot.send_message(message.chat.id,\n f'Будем рады Вас видеть, {user_name}! Возвращайтесь!')\n\n\ndef register_handlers_for_other(dp: Dispatcher):\n dp.register_message_handler(new_member, content_types=['new_chat_members'])\n dp.register_message_handler(left_member, content_types=['left_chat_member']\n )\n",
"step-5": "from aiogram import Dispatcher\n\nfrom create_bot import bot\nfrom data_base import sqlite_db\n\n\n# new user in group\nasync def new_member(message):\n new_user = message.new_chat_members[0]\n user_id = new_user['id']\n if new_user['username']:\n user_name = new_user['username']\n elif new_user['first_name']:\n user_name = new_user['first_name']\n elif new_user['last_name']:\n user_name = new_user['last_name']\n else:\n user_name = 'Пользователь без имени'\n await sqlite_db.sql_add_user_to_db(user_id, user_name)\n await bot.send_message(message.chat.id, f'Добро пожаловать, {user_name}!\\nКоманда - /start переход'\n f' в пользовательское меню.\\nКоманда - /help помощь по командам бота.')\n\n\n# left user from group\nasync def left_member(message):\n left_user = message.left_chat_member\n user_name = await sqlite_db.sql_get_user_name(left_user['id'])\n user_name = user_name[0][0]\n await sqlite_db.sql_del_user_from_db(left_user['id'])\n await bot.send_message(message.chat.id, f'Будем рады Вас видеть, {user_name}! Возвращайтесь!')\n\n\ndef register_handlers_for_other(dp: Dispatcher):\n dp.register_message_handler(new_member, content_types=[\"new_chat_members\"])\n dp.register_message_handler(left_member, content_types=[\"left_chat_member\"])\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from typing import List
def sift_up(heap: List, pos: int = None):
if pos is None:
pos = len(heap) - 1
current, parent = pos, (pos - 1) // 2
while current > 0:
if heap[current] > heap[parent]:
heap[current], heap[parent] = heap[parent], heap[current]
else:
break
current, parent = parent, (parent - 1) // 2
def sift_down(heap: List, pos: int = 0):
while pos < len(heap):
left = pos * 2 + 1
right = pos * 2 + 2
if right < len(heap):
max_child = left if heap[left] > heap[right] else right
elif left < len(heap):
max_child = left
else:
return
if heap[pos] < heap[max_child]:
heap[pos], heap[max_child] = heap[max_child], heap[pos]
pos = max_child
def insert(heap: List, number: int):
heap.append(number)
sift_up(heap, len(heap) - 1)
def heapify(array: List):
for idx in range(len(array), -1, -1):
sift_down(array, idx)
def pop(heap: List):
root = heap[0]
if heap:
heap[0] = heap[-1]
heap.pop()
sift_down(heap)
return root
def make_answer(ops):
heap = list()
for op in ops:
op = op.split()
if len(op) > 1:
insert(heap, int(op[1]))
else:
yield(pop(heap))
if __name__ == "__main__":
pass
|
normal
|
{
"blob_id": "9cc6700ab14bed9d69d90c1540f6d42186033a19",
"index": 5052,
"step-1": "<mask token>\n\n\ndef sift_up(heap: List, pos: int=None):\n if pos is None:\n pos = len(heap) - 1\n current, parent = pos, (pos - 1) // 2\n while current > 0:\n if heap[current] > heap[parent]:\n heap[current], heap[parent] = heap[parent], heap[current]\n else:\n break\n current, parent = parent, (parent - 1) // 2\n\n\n<mask token>\n\n\ndef insert(heap: List, number: int):\n heap.append(number)\n sift_up(heap, len(heap) - 1)\n\n\ndef heapify(array: List):\n for idx in range(len(array), -1, -1):\n sift_down(array, idx)\n\n\ndef pop(heap: List):\n root = heap[0]\n if heap:\n heap[0] = heap[-1]\n heap.pop()\n sift_down(heap)\n return root\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef sift_up(heap: List, pos: int=None):\n if pos is None:\n pos = len(heap) - 1\n current, parent = pos, (pos - 1) // 2\n while current > 0:\n if heap[current] > heap[parent]:\n heap[current], heap[parent] = heap[parent], heap[current]\n else:\n break\n current, parent = parent, (parent - 1) // 2\n\n\ndef sift_down(heap: List, pos: int=0):\n while pos < len(heap):\n left = pos * 2 + 1\n right = pos * 2 + 2\n if right < len(heap):\n max_child = left if heap[left] > heap[right] else right\n elif left < len(heap):\n max_child = left\n else:\n return\n if heap[pos] < heap[max_child]:\n heap[pos], heap[max_child] = heap[max_child], heap[pos]\n pos = max_child\n\n\ndef insert(heap: List, number: int):\n heap.append(number)\n sift_up(heap, len(heap) - 1)\n\n\ndef heapify(array: List):\n for idx in range(len(array), -1, -1):\n sift_down(array, idx)\n\n\ndef pop(heap: List):\n root = heap[0]\n if heap:\n heap[0] = heap[-1]\n heap.pop()\n sift_down(heap)\n return root\n\n\ndef make_answer(ops):\n heap = list()\n for op in ops:\n op = op.split()\n if len(op) > 1:\n insert(heap, int(op[1]))\n else:\n yield pop(heap)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef sift_up(heap: List, pos: int=None):\n if pos is None:\n pos = len(heap) - 1\n current, parent = pos, (pos - 1) // 2\n while current > 0:\n if heap[current] > heap[parent]:\n heap[current], heap[parent] = heap[parent], heap[current]\n else:\n break\n current, parent = parent, (parent - 1) // 2\n\n\ndef sift_down(heap: List, pos: int=0):\n while pos < len(heap):\n left = pos * 2 + 1\n right = pos * 2 + 2\n if right < len(heap):\n max_child = left if heap[left] > heap[right] else right\n elif left < len(heap):\n max_child = left\n else:\n return\n if heap[pos] < heap[max_child]:\n heap[pos], heap[max_child] = heap[max_child], heap[pos]\n pos = max_child\n\n\ndef insert(heap: List, number: int):\n heap.append(number)\n sift_up(heap, len(heap) - 1)\n\n\ndef heapify(array: List):\n for idx in range(len(array), -1, -1):\n sift_down(array, idx)\n\n\ndef pop(heap: List):\n root = heap[0]\n if heap:\n heap[0] = heap[-1]\n heap.pop()\n sift_down(heap)\n return root\n\n\ndef make_answer(ops):\n heap = list()\n for op in ops:\n op = op.split()\n if len(op) > 1:\n insert(heap, int(op[1]))\n else:\n yield pop(heap)\n\n\nif __name__ == '__main__':\n pass\n",
"step-4": "from typing import List\n\n\ndef sift_up(heap: List, pos: int=None):\n if pos is None:\n pos = len(heap) - 1\n current, parent = pos, (pos - 1) // 2\n while current > 0:\n if heap[current] > heap[parent]:\n heap[current], heap[parent] = heap[parent], heap[current]\n else:\n break\n current, parent = parent, (parent - 1) // 2\n\n\ndef sift_down(heap: List, pos: int=0):\n while pos < len(heap):\n left = pos * 2 + 1\n right = pos * 2 + 2\n if right < len(heap):\n max_child = left if heap[left] > heap[right] else right\n elif left < len(heap):\n max_child = left\n else:\n return\n if heap[pos] < heap[max_child]:\n heap[pos], heap[max_child] = heap[max_child], heap[pos]\n pos = max_child\n\n\ndef insert(heap: List, number: int):\n heap.append(number)\n sift_up(heap, len(heap) - 1)\n\n\ndef heapify(array: List):\n for idx in range(len(array), -1, -1):\n sift_down(array, idx)\n\n\ndef pop(heap: List):\n root = heap[0]\n if heap:\n heap[0] = heap[-1]\n heap.pop()\n sift_down(heap)\n return root\n\n\ndef make_answer(ops):\n heap = list()\n for op in ops:\n op = op.split()\n if len(op) > 1:\n insert(heap, int(op[1]))\n else:\n yield pop(heap)\n\n\nif __name__ == '__main__':\n pass\n",
"step-5": "from typing import List\n\n\ndef sift_up(heap: List, pos: int = None):\n if pos is None:\n pos = len(heap) - 1\n current, parent = pos, (pos - 1) // 2\n\n while current > 0:\n if heap[current] > heap[parent]:\n heap[current], heap[parent] = heap[parent], heap[current]\n else:\n break\n current, parent = parent, (parent - 1) // 2\n\n\ndef sift_down(heap: List, pos: int = 0):\n while pos < len(heap):\n left = pos * 2 + 1\n right = pos * 2 + 2\n if right < len(heap):\n max_child = left if heap[left] > heap[right] else right\n elif left < len(heap):\n max_child = left\n else:\n return\n\n if heap[pos] < heap[max_child]:\n heap[pos], heap[max_child] = heap[max_child], heap[pos]\n\n pos = max_child\n\n\ndef insert(heap: List, number: int):\n heap.append(number)\n sift_up(heap, len(heap) - 1)\n\n\ndef heapify(array: List):\n for idx in range(len(array), -1, -1):\n sift_down(array, idx)\n\n\ndef pop(heap: List):\n root = heap[0]\n if heap:\n heap[0] = heap[-1]\n heap.pop()\n sift_down(heap)\n return root\n\n\ndef make_answer(ops):\n heap = list()\n for op in ops:\n op = op.split()\n if len(op) > 1:\n insert(heap, int(op[1]))\n else:\n yield(pop(heap))\n\n\nif __name__ == \"__main__\":\n pass\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
import pandas as pd
import numpy as np
import random
import copy
class Node(object):
'''
Defines a Node Class for storing characteristics and CPT of each node
'''
def __init__(self,name):
self.parents = []
self.children = []
self.name = name
self.cpt=[]
self.limit = 3
def addParent(self,x):
self.parents.append(x)
def addChild(self,x):
self.children.append(x)
def createCPT(self,data):
cpt = computeProb(data,self.limit,self.parents,self.name)
self.cpt = cpt
def computeProb(data,limit,cols,target):
numCol = len(cols)
if numCol==0:
return(cpt_0(data,limit,cols,target))
elif numCol ==1:
return(cpt_1(data,limit,cols,target))
elif numCol ==2:
return(cpt_2(data,limit,cols,target))
elif numCol ==3:
return(cpt_3(data,limit,cols,target))
else:
return(cpt_4(data,limit,cols,target))
#Functions for computing the Conditional Probability Tables (CPTs)
def cpt_2(data,limit,cols,target):
cpt = []
alpha = 0.001
for var1 in range(limit):
for var2 in range(limit):
totalN = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) ] )
for targetVar in range(limit):
count = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[target]==targetVar) ] )
if totalN ==0:
cpt.append([var1,var2,targetVar, float(totalN + 3*alpha)])
else:
cpt.append([var1,var2,targetVar, float(count)/float(totalN + 3*alpha)])
cpt = pd.DataFrame(cpt, columns=[cols[0],cols[1],target,'prob'])
return(cpt)
def cpt_1(data,limit,cols,target):
cpt = []
alpha = 0.001
for var1 in range(limit):
totalN = len( data[ (data[cols[0]]==var1)] )
for targetVar in range(limit):
count = len( data[ (data[cols[0]]==var1) & (data[target]==targetVar) ] )
if totalN ==0:
cpt.append([var1,targetVar, float(totalN + 3*alpha)])
else:
cpt.append([var1,targetVar, float(count)/float(totalN + 3*alpha)])
cpt = pd.DataFrame(cpt, columns=[cols[0],target,'prob'])
return(cpt)
def cpt_0(data,limit,cols,target):
alpha = 0.001
cpt = []
totalN = len( data )
for targetVar in range(limit):
count = len( data[ (data[target]==targetVar) ] )
if totalN ==0:
cpt.append([targetVar, alpha/float(totalN + 3*alpha)])
else:
cpt.append([targetVar, float(count)/(totalN + 3*alpha)])
cpt = pd.DataFrame(cpt, columns=[target,'prob'])
return(cpt)
def cpt_3(data,limit,cols,target):
cpt = []
alpha = 0.001
for var1 in range(limit):
for var2 in range(limit):
for var3 in range(limit):
totalN = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) ] )
for targetVar in range(limit):
count = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) & (data[target]==targetVar) ] )
if totalN ==0:
cpt.append([var1,var2,var3,targetVar, alpha/float(totalN + 3*alpha)])
else:
cpt.append([var1,var2,var3,targetVar, float(count)/float(totalN + 3*alpha)])
cpt = pd.DataFrame(cpt, columns=[cols[0],cols[1],cols[2],target,'prob'])
return(cpt)
def cpt_4(data,limit,cols,target):
cpt = []
alpha = 0.001
for var1 in range(limit):
for var2 in range(limit):
for var3 in range(limit):
for var4 in range(limit):
totalN = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) & (data[cols[3]]==var4) ] )
for targetVar in range(limit):
count = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) & (data[cols[3]]==var4) & (data[target]==targetVar) ] )
if totalN ==0:
cpt.append([var1,var2,var3,var4,targetVar, alpha/float(totalN + 3*alpha)])
else:
cpt.append([var1,var2,var3,var4,targetVar, float(count)/float(totalN + 3*alpha)])
cpt = pd.DataFrame(cpt, columns=[cols[0],cols[1],cols[2],cols[3],target,'prob'])
return(cpt)
structMap = {0:[1,2],1:[1,3],2:[1,4],3:[2,3],4:[2,4],5:[3,4]} # Mapping of the structure position and the nodes that it connects
class BayesNet(object):
def __init__(self,numNodes,structure):
self.structure = structure # Array that defines the structure of the Bayes Net
self.numNodes = numNodes
self.varNodes={}
self.classNode=0
def initGraph(self):
'''
Initializes components of the Bayes Net Graph
'''
self.classNode = Node('Class')
for i in range(self.numNodes):
self.varNodes['x'+str(i+1)]=Node('x'+str(i+1))
self.varNodes['x'+str(i+1)].parents.append('Class')
for i in range(len(self.structure)):
edgeNodes = structMap[i]
firstNode = 'x'+str(edgeNodes[0])
secondNode = 'x'+str(edgeNodes[1])
if self.structure[i]==1:
self.varNodes[firstNode].children.append(secondNode)
self.varNodes[secondNode].parents.append(firstNode)
elif self.structure[i]==-1:
self.varNodes[secondNode].children.append(firstNode)
self.varNodes[firstNode].parents.append(secondNode)
def compCPT(self,data):
'''
Computes Conditional Probability Table for all the nodes
'''
self.classNode.createCPT(data)
for i in range(len(self.varNodes)):
self.varNodes['x'+str(i+1)].createCPT(data)
def predict(self,data):
'''
Predicts most likely class given a single data sample
'''
maxProb = 0
maxProbClass = 0
for classVal in range(3):
dt = data.copy()
dt["Class"] = classVal
prob = 1.0
for i in range(self.numNodes):
#print('Node is x'+str(i+1))
pt=self.varNodes['x'+str(i+1)].cpt
mergeList = self.varNodes['x'+str(i+1)].parents + ['x'+str(i+1)]
cpt_prob = pd.merge(left=pt,right=dt,on=mergeList,how='inner')['prob'][0]
#print("cpt_prob is ",str(cpt_prob))
prob = cpt_prob*prob
#print("Class :%d Prob : %f"%(classVal,prob))
if prob>maxProb:
maxProb = prob
maxProbClass = classVal
return(maxProbClass)
|
normal
|
{
"blob_id": "eb4bc008b7e68f8a6e80e837fa970d77a5ed3547",
"index": 8218,
"step-1": "<mask token>\n\n\nclass Node(object):\n \"\"\"\n Defines a Node Class for storing characteristics and CPT of each node\n \"\"\"\n\n def __init__(self, name):\n self.parents = []\n self.children = []\n self.name = name\n self.cpt = []\n self.limit = 3\n\n def addParent(self, x):\n self.parents.append(x)\n\n def addChild(self, x):\n self.children.append(x)\n\n def createCPT(self, data):\n cpt = computeProb(data, self.limit, self.parents, self.name)\n self.cpt = cpt\n\n\n<mask token>\n\n\ndef cpt_0(data, limit, cols, target):\n alpha = 0.001\n cpt = []\n totalN = len(data)\n for targetVar in range(limit):\n count = len(data[data[target] == targetVar])\n if totalN == 0:\n cpt.append([targetVar, alpha / float(totalN + 3 * alpha)])\n else:\n cpt.append([targetVar, float(count) / (totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[target, 'prob'])\n return cpt\n\n\n<mask token>\n\n\nclass BayesNet(object):\n\n def __init__(self, numNodes, structure):\n self.structure = structure\n self.numNodes = numNodes\n self.varNodes = {}\n self.classNode = 0\n\n def initGraph(self):\n \"\"\"\n Initializes components of the Bayes Net Graph\n \"\"\"\n self.classNode = Node('Class')\n for i in range(self.numNodes):\n self.varNodes['x' + str(i + 1)] = Node('x' + str(i + 1))\n self.varNodes['x' + str(i + 1)].parents.append('Class')\n for i in range(len(self.structure)):\n edgeNodes = structMap[i]\n firstNode = 'x' + str(edgeNodes[0])\n secondNode = 'x' + str(edgeNodes[1])\n if self.structure[i] == 1:\n self.varNodes[firstNode].children.append(secondNode)\n self.varNodes[secondNode].parents.append(firstNode)\n elif self.structure[i] == -1:\n self.varNodes[secondNode].children.append(firstNode)\n self.varNodes[firstNode].parents.append(secondNode)\n\n def compCPT(self, data):\n \"\"\"\n Computes Conditional Probability Table for all the nodes\n \"\"\"\n self.classNode.createCPT(data)\n for i in range(len(self.varNodes)):\n self.varNodes['x' + str(i + 1)].createCPT(data)\n\n def predict(self, data):\n \"\"\"\n Predicts most likely class given a single data sample\n \n \"\"\"\n maxProb = 0\n maxProbClass = 0\n for classVal in range(3):\n dt = data.copy()\n dt['Class'] = classVal\n prob = 1.0\n for i in range(self.numNodes):\n pt = self.varNodes['x' + str(i + 1)].cpt\n mergeList = self.varNodes['x' + str(i + 1)].parents + ['x' +\n str(i + 1)]\n cpt_prob = pd.merge(left=pt, right=dt, on=mergeList, how=\n 'inner')['prob'][0]\n prob = cpt_prob * prob\n if prob > maxProb:\n maxProb = prob\n maxProbClass = classVal\n return maxProbClass\n",
"step-2": "<mask token>\n\n\nclass Node(object):\n \"\"\"\n Defines a Node Class for storing characteristics and CPT of each node\n \"\"\"\n\n def __init__(self, name):\n self.parents = []\n self.children = []\n self.name = name\n self.cpt = []\n self.limit = 3\n\n def addParent(self, x):\n self.parents.append(x)\n\n def addChild(self, x):\n self.children.append(x)\n\n def createCPT(self, data):\n cpt = computeProb(data, self.limit, self.parents, self.name)\n self.cpt = cpt\n\n\n<mask token>\n\n\ndef cpt_1(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n totalN = len(data[data[cols[0]] == var1])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[target] ==\n targetVar)])\n if totalN == 0:\n cpt.append([var1, targetVar, float(totalN + 3 * alpha)])\n else:\n cpt.append([var1, targetVar, float(count) / float(totalN + \n 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], target, 'prob'])\n return cpt\n\n\ndef cpt_0(data, limit, cols, target):\n alpha = 0.001\n cpt = []\n totalN = len(data)\n for targetVar in range(limit):\n count = len(data[data[target] == targetVar])\n if totalN == 0:\n cpt.append([targetVar, alpha / float(totalN + 3 * alpha)])\n else:\n cpt.append([targetVar, float(count) / (totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[target, 'prob'])\n return cpt\n\n\n<mask token>\n\n\nclass BayesNet(object):\n\n def __init__(self, numNodes, structure):\n self.structure = structure\n self.numNodes = numNodes\n self.varNodes = {}\n self.classNode = 0\n\n def initGraph(self):\n \"\"\"\n Initializes components of the Bayes Net Graph\n \"\"\"\n self.classNode = Node('Class')\n for i in range(self.numNodes):\n self.varNodes['x' + str(i + 1)] = Node('x' + str(i + 1))\n self.varNodes['x' + str(i + 1)].parents.append('Class')\n for i in range(len(self.structure)):\n edgeNodes = structMap[i]\n firstNode = 'x' + str(edgeNodes[0])\n secondNode = 'x' + str(edgeNodes[1])\n if self.structure[i] == 1:\n self.varNodes[firstNode].children.append(secondNode)\n self.varNodes[secondNode].parents.append(firstNode)\n elif self.structure[i] == -1:\n self.varNodes[secondNode].children.append(firstNode)\n self.varNodes[firstNode].parents.append(secondNode)\n\n def compCPT(self, data):\n \"\"\"\n Computes Conditional Probability Table for all the nodes\n \"\"\"\n self.classNode.createCPT(data)\n for i in range(len(self.varNodes)):\n self.varNodes['x' + str(i + 1)].createCPT(data)\n\n def predict(self, data):\n \"\"\"\n Predicts most likely class given a single data sample\n \n \"\"\"\n maxProb = 0\n maxProbClass = 0\n for classVal in range(3):\n dt = data.copy()\n dt['Class'] = classVal\n prob = 1.0\n for i in range(self.numNodes):\n pt = self.varNodes['x' + str(i + 1)].cpt\n mergeList = self.varNodes['x' + str(i + 1)].parents + ['x' +\n str(i + 1)]\n cpt_prob = pd.merge(left=pt, right=dt, on=mergeList, how=\n 'inner')['prob'][0]\n prob = cpt_prob * prob\n if prob > maxProb:\n maxProb = prob\n maxProbClass = classVal\n return maxProbClass\n",
"step-3": "<mask token>\n\n\nclass Node(object):\n \"\"\"\n Defines a Node Class for storing characteristics and CPT of each node\n \"\"\"\n\n def __init__(self, name):\n self.parents = []\n self.children = []\n self.name = name\n self.cpt = []\n self.limit = 3\n\n def addParent(self, x):\n self.parents.append(x)\n\n def addChild(self, x):\n self.children.append(x)\n\n def createCPT(self, data):\n cpt = computeProb(data, self.limit, self.parents, self.name)\n self.cpt = cpt\n\n\n<mask token>\n\n\ndef cpt_2(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n for var2 in range(limit):\n totalN = len(data[(data[cols[0]] == var1) & (data[cols[1]] ==\n var2)])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[cols[1]] ==\n var2) & (data[target] == targetVar)])\n if totalN == 0:\n cpt.append([var1, var2, targetVar, float(totalN + 3 *\n alpha)])\n else:\n cpt.append([var1, var2, targetVar, float(count) / float\n (totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], cols[1], target, 'prob'])\n return cpt\n\n\ndef cpt_1(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n totalN = len(data[data[cols[0]] == var1])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[target] ==\n targetVar)])\n if totalN == 0:\n cpt.append([var1, targetVar, float(totalN + 3 * alpha)])\n else:\n cpt.append([var1, targetVar, float(count) / float(totalN + \n 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], target, 'prob'])\n return cpt\n\n\ndef cpt_0(data, limit, cols, target):\n alpha = 0.001\n cpt = []\n totalN = len(data)\n for targetVar in range(limit):\n count = len(data[data[target] == targetVar])\n if totalN == 0:\n cpt.append([targetVar, alpha / float(totalN + 3 * alpha)])\n else:\n cpt.append([targetVar, float(count) / (totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[target, 'prob'])\n return cpt\n\n\n<mask token>\n\n\ndef cpt_4(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n for var2 in range(limit):\n for var3 in range(limit):\n for var4 in range(limit):\n totalN = len(data[(data[cols[0]] == var1) & (data[cols[\n 1]] == var2) & (data[cols[2]] == var3) & (data[cols\n [3]] == var4)])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[\n cols[1]] == var2) & (data[cols[2]] == var3) & (\n data[cols[3]] == var4) & (data[target] ==\n targetVar)])\n if totalN == 0:\n cpt.append([var1, var2, var3, var4, targetVar, \n alpha / float(totalN + 3 * alpha)])\n else:\n cpt.append([var1, var2, var3, var4, targetVar, \n float(count) / float(totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], cols[1], cols[2], cols[3],\n target, 'prob'])\n return cpt\n\n\n<mask token>\n\n\nclass BayesNet(object):\n\n def __init__(self, numNodes, structure):\n self.structure = structure\n self.numNodes = numNodes\n self.varNodes = {}\n self.classNode = 0\n\n def initGraph(self):\n \"\"\"\n Initializes components of the Bayes Net Graph\n \"\"\"\n self.classNode = Node('Class')\n for i in range(self.numNodes):\n self.varNodes['x' + str(i + 1)] = Node('x' + str(i + 1))\n self.varNodes['x' + str(i + 1)].parents.append('Class')\n for i in range(len(self.structure)):\n edgeNodes = structMap[i]\n firstNode = 'x' + str(edgeNodes[0])\n secondNode = 'x' + str(edgeNodes[1])\n if self.structure[i] == 1:\n self.varNodes[firstNode].children.append(secondNode)\n self.varNodes[secondNode].parents.append(firstNode)\n elif self.structure[i] == -1:\n self.varNodes[secondNode].children.append(firstNode)\n self.varNodes[firstNode].parents.append(secondNode)\n\n def compCPT(self, data):\n \"\"\"\n Computes Conditional Probability Table for all the nodes\n \"\"\"\n self.classNode.createCPT(data)\n for i in range(len(self.varNodes)):\n self.varNodes['x' + str(i + 1)].createCPT(data)\n\n def predict(self, data):\n \"\"\"\n Predicts most likely class given a single data sample\n \n \"\"\"\n maxProb = 0\n maxProbClass = 0\n for classVal in range(3):\n dt = data.copy()\n dt['Class'] = classVal\n prob = 1.0\n for i in range(self.numNodes):\n pt = self.varNodes['x' + str(i + 1)].cpt\n mergeList = self.varNodes['x' + str(i + 1)].parents + ['x' +\n str(i + 1)]\n cpt_prob = pd.merge(left=pt, right=dt, on=mergeList, how=\n 'inner')['prob'][0]\n prob = cpt_prob * prob\n if prob > maxProb:\n maxProb = prob\n maxProbClass = classVal\n return maxProbClass\n",
"step-4": "<mask token>\n\n\nclass Node(object):\n \"\"\"\n Defines a Node Class for storing characteristics and CPT of each node\n \"\"\"\n\n def __init__(self, name):\n self.parents = []\n self.children = []\n self.name = name\n self.cpt = []\n self.limit = 3\n\n def addParent(self, x):\n self.parents.append(x)\n\n def addChild(self, x):\n self.children.append(x)\n\n def createCPT(self, data):\n cpt = computeProb(data, self.limit, self.parents, self.name)\n self.cpt = cpt\n\n\ndef computeProb(data, limit, cols, target):\n numCol = len(cols)\n if numCol == 0:\n return cpt_0(data, limit, cols, target)\n elif numCol == 1:\n return cpt_1(data, limit, cols, target)\n elif numCol == 2:\n return cpt_2(data, limit, cols, target)\n elif numCol == 3:\n return cpt_3(data, limit, cols, target)\n else:\n return cpt_4(data, limit, cols, target)\n\n\ndef cpt_2(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n for var2 in range(limit):\n totalN = len(data[(data[cols[0]] == var1) & (data[cols[1]] ==\n var2)])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[cols[1]] ==\n var2) & (data[target] == targetVar)])\n if totalN == 0:\n cpt.append([var1, var2, targetVar, float(totalN + 3 *\n alpha)])\n else:\n cpt.append([var1, var2, targetVar, float(count) / float\n (totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], cols[1], target, 'prob'])\n return cpt\n\n\ndef cpt_1(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n totalN = len(data[data[cols[0]] == var1])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[target] ==\n targetVar)])\n if totalN == 0:\n cpt.append([var1, targetVar, float(totalN + 3 * alpha)])\n else:\n cpt.append([var1, targetVar, float(count) / float(totalN + \n 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], target, 'prob'])\n return cpt\n\n\ndef cpt_0(data, limit, cols, target):\n alpha = 0.001\n cpt = []\n totalN = len(data)\n for targetVar in range(limit):\n count = len(data[data[target] == targetVar])\n if totalN == 0:\n cpt.append([targetVar, alpha / float(totalN + 3 * alpha)])\n else:\n cpt.append([targetVar, float(count) / (totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[target, 'prob'])\n return cpt\n\n\ndef cpt_3(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n for var2 in range(limit):\n for var3 in range(limit):\n totalN = len(data[(data[cols[0]] == var1) & (data[cols[1]] ==\n var2) & (data[cols[2]] == var3)])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[cols[1\n ]] == var2) & (data[cols[2]] == var3) & (data[\n target] == targetVar)])\n if totalN == 0:\n cpt.append([var1, var2, var3, targetVar, alpha /\n float(totalN + 3 * alpha)])\n else:\n cpt.append([var1, var2, var3, targetVar, float(\n count) / float(totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], cols[1], cols[2], target, 'prob']\n )\n return cpt\n\n\ndef cpt_4(data, limit, cols, target):\n cpt = []\n alpha = 0.001\n for var1 in range(limit):\n for var2 in range(limit):\n for var3 in range(limit):\n for var4 in range(limit):\n totalN = len(data[(data[cols[0]] == var1) & (data[cols[\n 1]] == var2) & (data[cols[2]] == var3) & (data[cols\n [3]] == var4)])\n for targetVar in range(limit):\n count = len(data[(data[cols[0]] == var1) & (data[\n cols[1]] == var2) & (data[cols[2]] == var3) & (\n data[cols[3]] == var4) & (data[target] ==\n targetVar)])\n if totalN == 0:\n cpt.append([var1, var2, var3, var4, targetVar, \n alpha / float(totalN + 3 * alpha)])\n else:\n cpt.append([var1, var2, var3, var4, targetVar, \n float(count) / float(totalN + 3 * alpha)])\n cpt = pd.DataFrame(cpt, columns=[cols[0], cols[1], cols[2], cols[3],\n target, 'prob'])\n return cpt\n\n\n<mask token>\n\n\nclass BayesNet(object):\n\n def __init__(self, numNodes, structure):\n self.structure = structure\n self.numNodes = numNodes\n self.varNodes = {}\n self.classNode = 0\n\n def initGraph(self):\n \"\"\"\n Initializes components of the Bayes Net Graph\n \"\"\"\n self.classNode = Node('Class')\n for i in range(self.numNodes):\n self.varNodes['x' + str(i + 1)] = Node('x' + str(i + 1))\n self.varNodes['x' + str(i + 1)].parents.append('Class')\n for i in range(len(self.structure)):\n edgeNodes = structMap[i]\n firstNode = 'x' + str(edgeNodes[0])\n secondNode = 'x' + str(edgeNodes[1])\n if self.structure[i] == 1:\n self.varNodes[firstNode].children.append(secondNode)\n self.varNodes[secondNode].parents.append(firstNode)\n elif self.structure[i] == -1:\n self.varNodes[secondNode].children.append(firstNode)\n self.varNodes[firstNode].parents.append(secondNode)\n\n def compCPT(self, data):\n \"\"\"\n Computes Conditional Probability Table for all the nodes\n \"\"\"\n self.classNode.createCPT(data)\n for i in range(len(self.varNodes)):\n self.varNodes['x' + str(i + 1)].createCPT(data)\n\n def predict(self, data):\n \"\"\"\n Predicts most likely class given a single data sample\n \n \"\"\"\n maxProb = 0\n maxProbClass = 0\n for classVal in range(3):\n dt = data.copy()\n dt['Class'] = classVal\n prob = 1.0\n for i in range(self.numNodes):\n pt = self.varNodes['x' + str(i + 1)].cpt\n mergeList = self.varNodes['x' + str(i + 1)].parents + ['x' +\n str(i + 1)]\n cpt_prob = pd.merge(left=pt, right=dt, on=mergeList, how=\n 'inner')['prob'][0]\n prob = cpt_prob * prob\n if prob > maxProb:\n maxProb = prob\n maxProbClass = classVal\n return maxProbClass\n",
"step-5": "import pandas as pd\nimport numpy as np\nimport random\nimport copy\n\nclass Node(object):\n '''\n Defines a Node Class for storing characteristics and CPT of each node\n '''\n \n def __init__(self,name):\n self.parents = []\n self.children = []\n self.name = name\n self.cpt=[]\n self.limit = 3\n \n def addParent(self,x):\n self.parents.append(x)\n \n def addChild(self,x):\n self.children.append(x)\n \n def createCPT(self,data):\n cpt = computeProb(data,self.limit,self.parents,self.name)\n self.cpt = cpt\n\n\ndef computeProb(data,limit,cols,target):\n \n numCol = len(cols)\n \n if numCol==0:\n return(cpt_0(data,limit,cols,target))\n elif numCol ==1:\n return(cpt_1(data,limit,cols,target))\n elif numCol ==2:\n return(cpt_2(data,limit,cols,target))\n elif numCol ==3:\n return(cpt_3(data,limit,cols,target))\n else:\n return(cpt_4(data,limit,cols,target))\n \n\n#Functions for computing the Conditional Probability Tables (CPTs)\n\ndef cpt_2(data,limit,cols,target):\n \n cpt = []\n alpha = 0.001\n \n for var1 in range(limit):\n for var2 in range(limit):\n \n totalN = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) ] )\n \n for targetVar in range(limit):\n \n count = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[target]==targetVar) ] )\n if totalN ==0:\n cpt.append([var1,var2,targetVar, float(totalN + 3*alpha)])\n else:\n cpt.append([var1,var2,targetVar, float(count)/float(totalN + 3*alpha)])\n \n cpt = pd.DataFrame(cpt, columns=[cols[0],cols[1],target,'prob'])\n \n return(cpt)\n\ndef cpt_1(data,limit,cols,target):\n \n cpt = []\n alpha = 0.001\n \n for var1 in range(limit):\n \n \n totalN = len( data[ (data[cols[0]]==var1)] )\n \n \n for targetVar in range(limit):\n \n count = len( data[ (data[cols[0]]==var1) & (data[target]==targetVar) ] )\n \n if totalN ==0:\n cpt.append([var1,targetVar, float(totalN + 3*alpha)])\n else:\n cpt.append([var1,targetVar, float(count)/float(totalN + 3*alpha)])\n \n cpt = pd.DataFrame(cpt, columns=[cols[0],target,'prob'])\n \n return(cpt)\n\ndef cpt_0(data,limit,cols,target):\n \n alpha = 0.001\n cpt = []\n \n \n totalN = len( data )\n \n \n for targetVar in range(limit):\n \n count = len( data[ (data[target]==targetVar) ] )\n if totalN ==0:\n cpt.append([targetVar, alpha/float(totalN + 3*alpha)])\n else:\n cpt.append([targetVar, float(count)/(totalN + 3*alpha)])\n \n cpt = pd.DataFrame(cpt, columns=[target,'prob'])\n \n return(cpt)\n\n\ndef cpt_3(data,limit,cols,target):\n \n cpt = []\n alpha = 0.001\n \n for var1 in range(limit):\n for var2 in range(limit):\n for var3 in range(limit):\n \n totalN = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) ] )\n\n for targetVar in range(limit):\n\n count = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) & (data[target]==targetVar) ] )\n if totalN ==0:\n cpt.append([var1,var2,var3,targetVar, alpha/float(totalN + 3*alpha)])\n else:\n cpt.append([var1,var2,var3,targetVar, float(count)/float(totalN + 3*alpha)])\n \n cpt = pd.DataFrame(cpt, columns=[cols[0],cols[1],cols[2],target,'prob'])\n \n return(cpt)\n\ndef cpt_4(data,limit,cols,target):\n \n cpt = []\n alpha = 0.001\n \n for var1 in range(limit):\n for var2 in range(limit):\n for var3 in range(limit):\n for var4 in range(limit):\n \n totalN = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) & (data[cols[3]]==var4) ] )\n\n for targetVar in range(limit):\n\n count = len( data[ (data[cols[0]]==var1) & (data[cols[1]]==var2) & (data[cols[2]]==var3) & (data[cols[3]]==var4) & (data[target]==targetVar) ] )\n if totalN ==0:\n cpt.append([var1,var2,var3,var4,targetVar, alpha/float(totalN + 3*alpha)])\n else:\n cpt.append([var1,var2,var3,var4,targetVar, float(count)/float(totalN + 3*alpha)])\n\n cpt = pd.DataFrame(cpt, columns=[cols[0],cols[1],cols[2],cols[3],target,'prob'])\n \n return(cpt)\n\nstructMap = {0:[1,2],1:[1,3],2:[1,4],3:[2,3],4:[2,4],5:[3,4]} # Mapping of the structure position and the nodes that it connects\n\n\nclass BayesNet(object):\n \n def __init__(self,numNodes,structure):\n self.structure = structure # Array that defines the structure of the Bayes Net\n self.numNodes = numNodes\n self.varNodes={}\n self.classNode=0\n \n \n def initGraph(self):\n '''\n Initializes components of the Bayes Net Graph\n '''\n \n self.classNode = Node('Class')\n \n for i in range(self.numNodes):\n self.varNodes['x'+str(i+1)]=Node('x'+str(i+1))\n self.varNodes['x'+str(i+1)].parents.append('Class')\n \n for i in range(len(self.structure)):\n \n edgeNodes = structMap[i]\n firstNode = 'x'+str(edgeNodes[0])\n secondNode = 'x'+str(edgeNodes[1])\n \n if self.structure[i]==1:\n self.varNodes[firstNode].children.append(secondNode)\n self.varNodes[secondNode].parents.append(firstNode)\n elif self.structure[i]==-1:\n self.varNodes[secondNode].children.append(firstNode)\n self.varNodes[firstNode].parents.append(secondNode)\n \n def compCPT(self,data):\n '''\n Computes Conditional Probability Table for all the nodes\n '''\n \n self.classNode.createCPT(data)\n \n for i in range(len(self.varNodes)):\n self.varNodes['x'+str(i+1)].createCPT(data)\n \n \n def predict(self,data):\n '''\n Predicts most likely class given a single data sample\n \n '''\n maxProb = 0\n maxProbClass = 0\n\n for classVal in range(3):\n\n dt = data.copy()\n dt[\"Class\"] = classVal\n prob = 1.0\n\n for i in range(self.numNodes):\n #print('Node is x'+str(i+1))\n\n pt=self.varNodes['x'+str(i+1)].cpt\n\n mergeList = self.varNodes['x'+str(i+1)].parents + ['x'+str(i+1)]\n\n cpt_prob = pd.merge(left=pt,right=dt,on=mergeList,how='inner')['prob'][0]\n #print(\"cpt_prob is \",str(cpt_prob))\n\n prob = cpt_prob*prob\n\n #print(\"Class :%d Prob : %f\"%(classVal,prob))\n\n if prob>maxProb:\n maxProb = prob\n maxProbClass = classVal\n \n return(maxProbClass)\n\n\n ",
"step-ids": [
12,
13,
15,
17,
20
]
}
|
[
12,
13,
15,
17,
20
] |
# Percy's playground.
from __future__ import print_function
import sympy as sp
import numpy as np
import BorderBasis as BB
np.set_printoptions(precision=3)
from IPython.display import display, Markdown, Math
sp.init_printing()
R, x, y = sp.ring('x,y', sp.RR, order=sp.grevlex)
I = [ x**2 + y**2 - 1.0, x + y ]
R, x, y, z = sp.ring('x,y,z', sp.RR, order=sp.grevlex)
I = [ x**2 - 1, y**2 - 4, z**2 - 9]
# n = 4 takes a long time
n = 4
Rvs = sp.ring(' '.join('v'+str(i) for i in range(1, n + 1)), sp.RR, order=sp.grevlex)
R, vs = Rvs[0], Rvs[1:]
I = []
I.extend([v**2 - 1 for v in vs])
#I.extend([(v-1)**2 for v in vs])
#I.extend([v-1 for v in vs])
#I.extend([vs[i] - vs[i-1] for i in range(1, len(vs))]) # Makes it fast
print('Generating')
B = BB.BorderBasisFactory(1e-5).generate(R,I)
print('Done')
print("=== Generator Basis:")
for f in B.generator_basis:
display(f.as_expr())
print("=== Quotient Basis:")
for f in B.quotient_basis():
display(f.as_expr())
# v2 is always zero
print("=== Variety:")
for v in B.zeros():
print(zip(R.symbols, v))
|
normal
|
{
"blob_id": "88af8b4eeb40ecf19622ecde1a5dea9a078bb66c",
"index": 8817,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.set_printoptions(precision=3)\n<mask token>\nsp.init_printing()\n<mask token>\nI.extend([(v ** 2 - 1) for v in vs])\nprint('Generating')\n<mask token>\nprint('Done')\nprint('=== Generator Basis:')\nfor f in B.generator_basis:\n display(f.as_expr())\nprint('=== Quotient Basis:')\nfor f in B.quotient_basis():\n display(f.as_expr())\nprint('=== Variety:')\nfor v in B.zeros():\n print(zip(R.symbols, v))\n",
"step-3": "<mask token>\nnp.set_printoptions(precision=3)\n<mask token>\nsp.init_printing()\nR, x, y = sp.ring('x,y', sp.RR, order=sp.grevlex)\nI = [x ** 2 + y ** 2 - 1.0, x + y]\nR, x, y, z = sp.ring('x,y,z', sp.RR, order=sp.grevlex)\nI = [x ** 2 - 1, y ** 2 - 4, z ** 2 - 9]\nn = 4\nRvs = sp.ring(' '.join('v' + str(i) for i in range(1, n + 1)), sp.RR, order\n =sp.grevlex)\nR, vs = Rvs[0], Rvs[1:]\nI = []\nI.extend([(v ** 2 - 1) for v in vs])\nprint('Generating')\nB = BB.BorderBasisFactory(1e-05).generate(R, I)\nprint('Done')\nprint('=== Generator Basis:')\nfor f in B.generator_basis:\n display(f.as_expr())\nprint('=== Quotient Basis:')\nfor f in B.quotient_basis():\n display(f.as_expr())\nprint('=== Variety:')\nfor v in B.zeros():\n print(zip(R.symbols, v))\n",
"step-4": "from __future__ import print_function\nimport sympy as sp\nimport numpy as np\nimport BorderBasis as BB\nnp.set_printoptions(precision=3)\nfrom IPython.display import display, Markdown, Math\nsp.init_printing()\nR, x, y = sp.ring('x,y', sp.RR, order=sp.grevlex)\nI = [x ** 2 + y ** 2 - 1.0, x + y]\nR, x, y, z = sp.ring('x,y,z', sp.RR, order=sp.grevlex)\nI = [x ** 2 - 1, y ** 2 - 4, z ** 2 - 9]\nn = 4\nRvs = sp.ring(' '.join('v' + str(i) for i in range(1, n + 1)), sp.RR, order\n =sp.grevlex)\nR, vs = Rvs[0], Rvs[1:]\nI = []\nI.extend([(v ** 2 - 1) for v in vs])\nprint('Generating')\nB = BB.BorderBasisFactory(1e-05).generate(R, I)\nprint('Done')\nprint('=== Generator Basis:')\nfor f in B.generator_basis:\n display(f.as_expr())\nprint('=== Quotient Basis:')\nfor f in B.quotient_basis():\n display(f.as_expr())\nprint('=== Variety:')\nfor v in B.zeros():\n print(zip(R.symbols, v))\n",
"step-5": "# Percy's playground.\n\nfrom __future__ import print_function\nimport sympy as sp\nimport numpy as np\nimport BorderBasis as BB\nnp.set_printoptions(precision=3)\nfrom IPython.display import display, Markdown, Math\nsp.init_printing()\n\nR, x, y = sp.ring('x,y', sp.RR, order=sp.grevlex)\nI = [ x**2 + y**2 - 1.0, x + y ]\n\nR, x, y, z = sp.ring('x,y,z', sp.RR, order=sp.grevlex)\nI = [ x**2 - 1, y**2 - 4, z**2 - 9]\n\n# n = 4 takes a long time\nn = 4\nRvs = sp.ring(' '.join('v'+str(i) for i in range(1, n + 1)), sp.RR, order=sp.grevlex)\nR, vs = Rvs[0], Rvs[1:]\nI = []\nI.extend([v**2 - 1 for v in vs])\n#I.extend([(v-1)**2 for v in vs])\n#I.extend([v-1 for v in vs])\n#I.extend([vs[i] - vs[i-1] for i in range(1, len(vs))]) # Makes it fast\n\nprint('Generating')\nB = BB.BorderBasisFactory(1e-5).generate(R,I)\nprint('Done')\n\nprint(\"=== Generator Basis:\")\nfor f in B.generator_basis:\n display(f.as_expr())\n\nprint(\"=== Quotient Basis:\")\nfor f in B.quotient_basis():\n display(f.as_expr())\n\n# v2 is always zero\nprint(\"=== Variety:\")\nfor v in B.zeros():\n print(zip(R.symbols, v))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import ordenador
import pytest
import contatempo
class TestaOrdenador:
@pytest.fixture
def ordenad(self):
return ordenador.Ordenador()
@pytest.fixture
def list_quase_ord(self):
c = contatempo.ContaTempos()
return c.lista_quase_ordenada(100)
@pytest.fixture
def list_aleatoria(self):
c = contatempo.ContaTempos()
return c.lista_aleatoria(100)
def esta_ordenada(self, lista):
for i in range(len(lista)-1):
if lista[i] > lista[i+1]:
return False
return True
def test_selecao_bolha_melhorada_aleatoria(self, ordenad, list_aleatoria):
ordenad.selecao_bolha_melhorada(list_aleatoria)
assert self.esta_ordenada(list_aleatoria)
def test_selecao_direta_aleatoria(self, ordenad, list_aleatoria):
ordenad.selecao_direta(list_aleatoria)
assert self.esta_ordenada(list_aleatoria)
def test_selecao_bolha_melhorada__quase_ord(self, ordenad, list_quase_ord):
ordenad.selecao_bolha_melhorada(list_quase_ord)
assert self.esta_ordenada(list_quase_ord)
def test_selecao_direta_quase_ord(self, ordenad, list_quase_ord):
ordenad.selecao_direta(list_quase_ord)
assert self.esta_ordenada(list_quase_ord)
|
normal
|
{
"blob_id": "32bb6d5ad0a1398c9ab89190c087fe3916631878",
"index": 7750,
"step-1": "<mask token>\n\n\nclass TestaOrdenador:\n\n @pytest.fixture\n def ordenad(self):\n return ordenador.Ordenador()\n <mask token>\n <mask token>\n <mask token>\n\n def test_selecao_bolha_melhorada_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_bolha_melhorada(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_direta_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_direta(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_bolha_melhorada__quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_bolha_melhorada(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestaOrdenador:\n\n @pytest.fixture\n def ordenad(self):\n return ordenador.Ordenador()\n\n @pytest.fixture\n def list_quase_ord(self):\n c = contatempo.ContaTempos()\n return c.lista_quase_ordenada(100)\n <mask token>\n <mask token>\n\n def test_selecao_bolha_melhorada_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_bolha_melhorada(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_direta_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_direta(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_bolha_melhorada__quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_bolha_melhorada(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TestaOrdenador:\n\n @pytest.fixture\n def ordenad(self):\n return ordenador.Ordenador()\n\n @pytest.fixture\n def list_quase_ord(self):\n c = contatempo.ContaTempos()\n return c.lista_quase_ordenada(100)\n <mask token>\n\n def esta_ordenada(self, lista):\n for i in range(len(lista) - 1):\n if lista[i] > lista[i + 1]:\n return False\n return True\n\n def test_selecao_bolha_melhorada_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_bolha_melhorada(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_direta_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_direta(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_bolha_melhorada__quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_bolha_melhorada(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n\n def test_selecao_direta_quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_direta(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n",
"step-4": "<mask token>\n\n\nclass TestaOrdenador:\n\n @pytest.fixture\n def ordenad(self):\n return ordenador.Ordenador()\n\n @pytest.fixture\n def list_quase_ord(self):\n c = contatempo.ContaTempos()\n return c.lista_quase_ordenada(100)\n\n @pytest.fixture\n def list_aleatoria(self):\n c = contatempo.ContaTempos()\n return c.lista_aleatoria(100)\n\n def esta_ordenada(self, lista):\n for i in range(len(lista) - 1):\n if lista[i] > lista[i + 1]:\n return False\n return True\n\n def test_selecao_bolha_melhorada_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_bolha_melhorada(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_direta_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_direta(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_bolha_melhorada__quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_bolha_melhorada(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n\n def test_selecao_direta_quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_direta(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n",
"step-5": "import ordenador\nimport pytest\nimport contatempo\n\nclass TestaOrdenador:\n\n @pytest.fixture\n def ordenad(self):\n return ordenador.Ordenador()\n\n @pytest.fixture\n def list_quase_ord(self):\n c = contatempo.ContaTempos()\n return c.lista_quase_ordenada(100)\n\n @pytest.fixture\n def list_aleatoria(self):\n c = contatempo.ContaTempos()\n return c.lista_aleatoria(100)\n\n def esta_ordenada(self, lista):\n for i in range(len(lista)-1):\n if lista[i] > lista[i+1]:\n return False\n return True\n \n def test_selecao_bolha_melhorada_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_bolha_melhorada(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_direta_aleatoria(self, ordenad, list_aleatoria):\n ordenad.selecao_direta(list_aleatoria)\n assert self.esta_ordenada(list_aleatoria)\n\n def test_selecao_bolha_melhorada__quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_bolha_melhorada(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n\n def test_selecao_direta_quase_ord(self, ordenad, list_quase_ord):\n ordenad.selecao_direta(list_quase_ord)\n assert self.esta_ordenada(list_quase_ord)\n\n \n",
"step-ids": [
5,
6,
8,
9,
11
]
}
|
[
5,
6,
8,
9,
11
] |
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User, Group
# Create your models here.
def default_expiration():
return timezone.now() + timezone.timedelta(days=10)
class Category(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
def __str__(self):
return self.name
class Survey(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
category = models.ForeignKey(Category, blank=True, null=True, on_delete=models.CASCADE)
users = models.ManyToManyField(User, through='SurveyToUser')
groups = models.ManyToManyField(Group, through='SurveyToGroup')
pub_date = models.DateTimeField(default=timezone.now)
exp_date = models.DateTimeField(default=default_expiration)
period = models.DurationField(default=None, null=True, blank=True)
@property
def get_questions(self):
return self.question_set.all()
def __str__(self):
return f'{self.name}: {self.description}'
class Question(models.Model):
TEXT = 'text'
NUMBER = 'number'
SELECT = 'select'
SELECT_MULTIPLE = 'select-multiple'
RADIO = 'radio'
CHECKBOX = 'checkbox'
SURVEY_QUESTION_TYPES_CHOICES = [
(TEXT, 'text'),
(NUMBER, 'number'),
(SELECT, 'select'),
(SELECT_MULTIPLE, 'select-multiple'),
(RADIO, 'radio'),
(CHECKBOX, 'checkbox'),
]
text = models.TextField()
required = models.BooleanField(default=True)
survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=models.CASCADE)
question_type = models.CharField(
max_length=50,
choices=SURVEY_QUESTION_TYPES_CHOICES,
default=TEXT,
)
choices = models.TextField(blank=True, null=True)
other = models.TextField(blank=True, null=True)
def __str__(self):
return f'Question: {self.text} of survey {self.survey.name}'
class Answer(models.Model):
text = models.TextField(blank=True)
question = models.ForeignKey(Question, blank=False, null=False, on_delete=models.CASCADE)
user = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE)
def __str__(self):
return f'Answer: {self.text} to the Question {self.question.text} ' \
f'given by User: {self.user.username} for survey {self.question.survey.name}'
class SurveyToUser(models.Model):
user = models.ForeignKey(User, blank=True, null=True, on_delete=models.CASCADE)
survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=models.CASCADE, related_name='user_survey')
completed = models.BooleanField(default=False, null=False)
completion_date = models.DateTimeField(default=None, null=True, blank=True)
class SurveyToGroup(models.Model):
group = models.ForeignKey(Group, blank=True, null=True, on_delete=models.CASCADE)
survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=models.CASCADE)
completed = models.BooleanField(default=False, null=False)
completion_date = models.DateTimeField(default=None, null=True, blank=True)
class StarRating(models.Model):
text = models.TextField(blank=True, null=False, default=None)
user = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE)
completed = models.DateTimeField(blank=True, null=True, default=None)
|
normal
|
{
"blob_id": "33b6a4c76079ed698809b29772abb59a34831472",
"index": 5900,
"step-1": "<mask token>\n\n\nclass Survey(models.Model):\n name = models.CharField(max_length=200)\n description = models.TextField()\n category = models.ForeignKey(Category, blank=True, null=True, on_delete\n =models.CASCADE)\n users = models.ManyToManyField(User, through='SurveyToUser')\n groups = models.ManyToManyField(Group, through='SurveyToGroup')\n pub_date = models.DateTimeField(default=timezone.now)\n exp_date = models.DateTimeField(default=default_expiration)\n period = models.DurationField(default=None, null=True, blank=True)\n\n @property\n def get_questions(self):\n return self.question_set.all()\n\n def __str__(self):\n return f'{self.name}: {self.description}'\n\n\nclass Question(models.Model):\n TEXT = 'text'\n NUMBER = 'number'\n SELECT = 'select'\n SELECT_MULTIPLE = 'select-multiple'\n RADIO = 'radio'\n CHECKBOX = 'checkbox'\n SURVEY_QUESTION_TYPES_CHOICES = [(TEXT, 'text'), (NUMBER, 'number'), (\n SELECT, 'select'), (SELECT_MULTIPLE, 'select-multiple'), (RADIO,\n 'radio'), (CHECKBOX, 'checkbox')]\n text = models.TextField()\n required = models.BooleanField(default=True)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n question_type = models.CharField(max_length=50, choices=\n SURVEY_QUESTION_TYPES_CHOICES, default=TEXT)\n choices = models.TextField(blank=True, null=True)\n other = models.TextField(blank=True, null=True)\n\n def __str__(self):\n return f'Question: {self.text} of survey {self.survey.name}'\n\n\nclass Answer(models.Model):\n text = models.TextField(blank=True)\n question = models.ForeignKey(Question, blank=False, null=False,\n on_delete=models.CASCADE)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return (\n f'Answer: {self.text} to the Question {self.question.text} given by User: {self.user.username} for survey {self.question.survey.name}'\n )\n\n\nclass SurveyToUser(models.Model):\n user = models.ForeignKey(User, blank=True, null=True, on_delete=models.\n CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE, related_name='user_survey')\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass SurveyToGroup(models.Model):\n group = models.ForeignKey(Group, blank=True, null=True, on_delete=\n models.CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass StarRating(models.Model):\n text = models.TextField(blank=True, null=False, default=None)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.DateTimeField(blank=True, null=True, default=None)\n",
"step-2": "<mask token>\n\n\nclass Category(models.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Survey(models.Model):\n name = models.CharField(max_length=200)\n description = models.TextField()\n category = models.ForeignKey(Category, blank=True, null=True, on_delete\n =models.CASCADE)\n users = models.ManyToManyField(User, through='SurveyToUser')\n groups = models.ManyToManyField(Group, through='SurveyToGroup')\n pub_date = models.DateTimeField(default=timezone.now)\n exp_date = models.DateTimeField(default=default_expiration)\n period = models.DurationField(default=None, null=True, blank=True)\n\n @property\n def get_questions(self):\n return self.question_set.all()\n\n def __str__(self):\n return f'{self.name}: {self.description}'\n\n\nclass Question(models.Model):\n TEXT = 'text'\n NUMBER = 'number'\n SELECT = 'select'\n SELECT_MULTIPLE = 'select-multiple'\n RADIO = 'radio'\n CHECKBOX = 'checkbox'\n SURVEY_QUESTION_TYPES_CHOICES = [(TEXT, 'text'), (NUMBER, 'number'), (\n SELECT, 'select'), (SELECT_MULTIPLE, 'select-multiple'), (RADIO,\n 'radio'), (CHECKBOX, 'checkbox')]\n text = models.TextField()\n required = models.BooleanField(default=True)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n question_type = models.CharField(max_length=50, choices=\n SURVEY_QUESTION_TYPES_CHOICES, default=TEXT)\n choices = models.TextField(blank=True, null=True)\n other = models.TextField(blank=True, null=True)\n\n def __str__(self):\n return f'Question: {self.text} of survey {self.survey.name}'\n\n\nclass Answer(models.Model):\n text = models.TextField(blank=True)\n question = models.ForeignKey(Question, blank=False, null=False,\n on_delete=models.CASCADE)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return (\n f'Answer: {self.text} to the Question {self.question.text} given by User: {self.user.username} for survey {self.question.survey.name}'\n )\n\n\nclass SurveyToUser(models.Model):\n user = models.ForeignKey(User, blank=True, null=True, on_delete=models.\n CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE, related_name='user_survey')\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass SurveyToGroup(models.Model):\n group = models.ForeignKey(Group, blank=True, null=True, on_delete=\n models.CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass StarRating(models.Model):\n text = models.TextField(blank=True, null=False, default=None)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.DateTimeField(blank=True, null=True, default=None)\n",
"step-3": "<mask token>\n\n\nclass Category(models.Model):\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.name\n\n\nclass Survey(models.Model):\n name = models.CharField(max_length=200)\n description = models.TextField()\n category = models.ForeignKey(Category, blank=True, null=True, on_delete\n =models.CASCADE)\n users = models.ManyToManyField(User, through='SurveyToUser')\n groups = models.ManyToManyField(Group, through='SurveyToGroup')\n pub_date = models.DateTimeField(default=timezone.now)\n exp_date = models.DateTimeField(default=default_expiration)\n period = models.DurationField(default=None, null=True, blank=True)\n\n @property\n def get_questions(self):\n return self.question_set.all()\n\n def __str__(self):\n return f'{self.name}: {self.description}'\n\n\nclass Question(models.Model):\n TEXT = 'text'\n NUMBER = 'number'\n SELECT = 'select'\n SELECT_MULTIPLE = 'select-multiple'\n RADIO = 'radio'\n CHECKBOX = 'checkbox'\n SURVEY_QUESTION_TYPES_CHOICES = [(TEXT, 'text'), (NUMBER, 'number'), (\n SELECT, 'select'), (SELECT_MULTIPLE, 'select-multiple'), (RADIO,\n 'radio'), (CHECKBOX, 'checkbox')]\n text = models.TextField()\n required = models.BooleanField(default=True)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n question_type = models.CharField(max_length=50, choices=\n SURVEY_QUESTION_TYPES_CHOICES, default=TEXT)\n choices = models.TextField(blank=True, null=True)\n other = models.TextField(blank=True, null=True)\n\n def __str__(self):\n return f'Question: {self.text} of survey {self.survey.name}'\n\n\nclass Answer(models.Model):\n text = models.TextField(blank=True)\n question = models.ForeignKey(Question, blank=False, null=False,\n on_delete=models.CASCADE)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return (\n f'Answer: {self.text} to the Question {self.question.text} given by User: {self.user.username} for survey {self.question.survey.name}'\n )\n\n\nclass SurveyToUser(models.Model):\n user = models.ForeignKey(User, blank=True, null=True, on_delete=models.\n CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE, related_name='user_survey')\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass SurveyToGroup(models.Model):\n group = models.ForeignKey(Group, blank=True, null=True, on_delete=\n models.CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass StarRating(models.Model):\n text = models.TextField(blank=True, null=False, default=None)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.DateTimeField(blank=True, null=True, default=None)\n",
"step-4": "<mask token>\n\n\nclass Category(models.Model):\n name = models.CharField(max_length=200)\n description = models.TextField()\n\n def __str__(self):\n return self.name\n\n\nclass Survey(models.Model):\n name = models.CharField(max_length=200)\n description = models.TextField()\n category = models.ForeignKey(Category, blank=True, null=True, on_delete\n =models.CASCADE)\n users = models.ManyToManyField(User, through='SurveyToUser')\n groups = models.ManyToManyField(Group, through='SurveyToGroup')\n pub_date = models.DateTimeField(default=timezone.now)\n exp_date = models.DateTimeField(default=default_expiration)\n period = models.DurationField(default=None, null=True, blank=True)\n\n @property\n def get_questions(self):\n return self.question_set.all()\n\n def __str__(self):\n return f'{self.name}: {self.description}'\n\n\nclass Question(models.Model):\n TEXT = 'text'\n NUMBER = 'number'\n SELECT = 'select'\n SELECT_MULTIPLE = 'select-multiple'\n RADIO = 'radio'\n CHECKBOX = 'checkbox'\n SURVEY_QUESTION_TYPES_CHOICES = [(TEXT, 'text'), (NUMBER, 'number'), (\n SELECT, 'select'), (SELECT_MULTIPLE, 'select-multiple'), (RADIO,\n 'radio'), (CHECKBOX, 'checkbox')]\n text = models.TextField()\n required = models.BooleanField(default=True)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n question_type = models.CharField(max_length=50, choices=\n SURVEY_QUESTION_TYPES_CHOICES, default=TEXT)\n choices = models.TextField(blank=True, null=True)\n other = models.TextField(blank=True, null=True)\n\n def __str__(self):\n return f'Question: {self.text} of survey {self.survey.name}'\n\n\nclass Answer(models.Model):\n text = models.TextField(blank=True)\n question = models.ForeignKey(Question, blank=False, null=False,\n on_delete=models.CASCADE)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n\n def __str__(self):\n return (\n f'Answer: {self.text} to the Question {self.question.text} given by User: {self.user.username} for survey {self.question.survey.name}'\n )\n\n\nclass SurveyToUser(models.Model):\n user = models.ForeignKey(User, blank=True, null=True, on_delete=models.\n CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE, related_name='user_survey')\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass SurveyToGroup(models.Model):\n group = models.ForeignKey(Group, blank=True, null=True, on_delete=\n models.CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass StarRating(models.Model):\n text = models.TextField(blank=True, null=False, default=None)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=\n models.CASCADE)\n completed = models.DateTimeField(blank=True, null=True, default=None)\n",
"step-5": "from django.db import models\nfrom django.utils import timezone\nfrom django.contrib.auth.models import User, Group\n\n# Create your models here.\n\n\ndef default_expiration():\n return timezone.now() + timezone.timedelta(days=10)\n\n\nclass Category(models.Model):\n name = models.CharField(max_length=200)\n description = models.TextField()\n\n def __str__(self):\n return self.name\n\n\nclass Survey(models.Model):\n name = models.CharField(max_length=200)\n description = models.TextField()\n category = models.ForeignKey(Category, blank=True, null=True, on_delete=models.CASCADE)\n users = models.ManyToManyField(User, through='SurveyToUser')\n groups = models.ManyToManyField(Group, through='SurveyToGroup')\n pub_date = models.DateTimeField(default=timezone.now)\n exp_date = models.DateTimeField(default=default_expiration)\n period = models.DurationField(default=None, null=True, blank=True)\n\n @property\n def get_questions(self):\n return self.question_set.all()\n\n def __str__(self):\n return f'{self.name}: {self.description}'\n\n\nclass Question(models.Model):\n TEXT = 'text'\n NUMBER = 'number'\n SELECT = 'select'\n SELECT_MULTIPLE = 'select-multiple'\n RADIO = 'radio'\n CHECKBOX = 'checkbox'\n\n SURVEY_QUESTION_TYPES_CHOICES = [\n (TEXT, 'text'),\n (NUMBER, 'number'),\n (SELECT, 'select'),\n (SELECT_MULTIPLE, 'select-multiple'),\n (RADIO, 'radio'),\n (CHECKBOX, 'checkbox'),\n ]\n\n text = models.TextField()\n required = models.BooleanField(default=True)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=models.CASCADE)\n question_type = models.CharField(\n max_length=50,\n choices=SURVEY_QUESTION_TYPES_CHOICES,\n default=TEXT,\n )\n choices = models.TextField(blank=True, null=True)\n other = models.TextField(blank=True, null=True)\n\n def __str__(self):\n return f'Question: {self.text} of survey {self.survey.name}'\n\n\nclass Answer(models.Model):\n text = models.TextField(blank=True)\n question = models.ForeignKey(Question, blank=False, null=False, on_delete=models.CASCADE)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE)\n\n def __str__(self):\n return f'Answer: {self.text} to the Question {self.question.text} ' \\\n f'given by User: {self.user.username} for survey {self.question.survey.name}'\n\n\nclass SurveyToUser(models.Model):\n user = models.ForeignKey(User, blank=True, null=True, on_delete=models.CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=models.CASCADE, related_name='user_survey')\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass SurveyToGroup(models.Model):\n group = models.ForeignKey(Group, blank=True, null=True, on_delete=models.CASCADE)\n survey = models.ForeignKey(Survey, blank=False, null=False, on_delete=models.CASCADE)\n completed = models.BooleanField(default=False, null=False)\n completion_date = models.DateTimeField(default=None, null=True, blank=True)\n\n\nclass StarRating(models.Model):\n text = models.TextField(blank=True, null=False, default=None)\n user = models.ForeignKey(User, blank=False, null=False, on_delete=models.CASCADE)\n completed = models.DateTimeField(blank=True, null=True, default=None)\n\n",
"step-ids": [
16,
17,
18,
19,
22
]
}
|
[
16,
17,
18,
19,
22
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.